Java 类com.hp.hpl.jena.query.Dataset 实例源码

项目:semtool    文件:JenaEngine.java   
private void copyToTdb() throws RepositoryException {
    if ( !needsSave || null == tdbdir ) {
        return;
    }

    final Dataset dataset = TDBFactory.createDataset( tdbdir.getAbsolutePath() );

    try {
        rc.export( new TdbExporter( dataset ) );
    }
    catch ( RepositoryException | RDFHandlerException e ) {
        log.error( "Problem exporting data to TDB", e );
        dataset.abort();
    }
    finally {
        dataset.close();
    }
}
项目:StreamEventCoreference    文件:TrigUtil.java   
static ArrayList<String> getAllEntityEvents (Dataset dataset, String entity) {
    ArrayList<String> events = new ArrayList<String>();
    Iterator<String> it = dataset.listNames();
    while (it.hasNext()) {
        String name = it.next();
        if (!name.equals(instanceGraph) && (!name.equals(provenanceGraph))) {
            Model namedModel = dataset.getNamedModel(name);
            StmtIterator siter = namedModel.listStatements();
            while (siter.hasNext()) {
                Statement s = siter.nextStatement();
                String object = getObjectValue(s).toLowerCase();
                if (object.indexOf(entity.toLowerCase()) > -1) {
                    String subject = s.getSubject().getURI();
                    if (!events.contains(subject)) {
                        events.add(subject);
                    }
                }
            }
        }
    }
    return events;
}
项目:JenaKBClient    文件:KBIndividualImplTest.java   
@Test
public void testSomeMethod2() throws Exception {
  Dataset ds = TDBFactory.createDataset("/scratch/WORK2/jena/dataset2/");

  OntModel model1 = ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM, ds.getNamedModel("vijaym1"));
  OntModel model2 = ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM, ds.getNamedModel("vijaym2"));
  OntClass thing = model1.createClass("http://www.w3.org/2002/07/owl#Thing");
  model1.createIndividual("http://example.com/onto1#VijayRaj", thing);
  model2.createIndividual("http://example.;cegilovcom/onto2#VijayRaj", thing);
  Model m = model1.union(model2);

  FileWriter fw = new FileWriter("/scratch/WORK2/jena/testModels/mergetestds.xml");
  RDFDataMgr.write(fw, ds, RDFFormat.NQUADS_UTF8);


}
项目:p3-osm-transformer    文件:JenaTextConfig.java   
/**
 * Creates an in-memory Jena TDB data set and Lucene index from code.
 * @return
 */
public Dataset createMemDatasetFromCode(){
    log.info("Construct an in-memory dataset with in-memory lucene index using code") ;
    TextQuery.init();
    // Build a text dataset by code.
    // Here , in-memory base data and in-memory Lucene index
    // Base data
    Dataset jenads = DatasetFactory.createMem() ;
    Property streetAddress = jenads.getDefaultModel().createProperty("http://schema.org/streetAddress");
    // Define the index mapping
    //EntityDefinition entDef = new EntityDefinition("uri", "text", RDFS.label.asNode()) ;
    EntityDefinition entDef = new EntityDefinition("uri", "text", streetAddress.asNode()) ;
    // Lucene, in memory.
    Directory dir = new RAMDirectory();
    // Join together into a dataset
    Dataset ds = TextDatasetFactory.createLucene(jenads, dir, entDef) ;
    return ds ;
}
项目:p3-osm-transformer    文件:JenaTextConfig.java   
/**
 * Creates a persistent Jena TDB data set and Lucene index. 
 * @return
 * @throws IOException 
 */
public Dataset createPersistentDatasetFromCode() throws IOException{
    log.info("Construct a persistent Jena data set with lucene index using code") ;
    // Build a text dataset by code.
    TextQuery.init();
    // Remove old files and folders
    deleteFiles(JENA_TDB_TEMP_FOLDER);
    deleteFiles(LUCENE_INDEX_TEMP_FOLDER);
    // Creates new folders
    JENA_TDB_TEMP_FOLDER.mkdirs();
    LUCENE_INDEX_TEMP_FOLDER.mkdirs();
    // Creates persisted Jena data set and Lucene index
    Dataset jenaDataset = TDBFactory.createDataset(JENA_TDB_TEMP_FOLDER.getAbsolutePath()) ;
    // Lucene, persisted.
    Directory luceneIndex = FSDirectory.open(LUCENE_INDEX_TEMP_FOLDER);
    // Define the index mapping
    EntityDefinition entDef = new EntityDefinition("uri", "text", RDFS.label.asNode()) ;


    // Join together into a dataset
    return TextDatasetFactory.createLucene(jenaDataset, luceneIndex, entDef) ;
}
项目:p3-osm-transformer    文件:JenaTextConfig.java   
/**
 * Import the data into the data set. When a new data set is imported the old data is deleted. 
 * @param dataset
 * @param file
 */
public void loadData(Dataset dataset, String file){
    log.info("Start loading") ;
    long startTime = System.nanoTime() ;
    dataset.begin(ReadWrite.WRITE) ;
    try {
        Model m = dataset.getDefaultModel() ;
        log.info("Number of triples before loading: " + m.size());
        RDFDataMgr.read(m, file) ;
        log.info("Number of triples after loading: " + m.size());
        dataset.commit() ;
    } 
    finally { 
        dataset.end() ;
    }
    long finishTime = System.nanoTime() ;
    double time = (finishTime-startTime)/1.0e6 ;
    log.info(String.format("Finish loading - %.2fms", time)) ;
}
项目:lodreclib    文件:RDFTripleExtractor.java   
/**
 * Load jena TDB
 */
private void TDBloading(){

    logger.info("TDB loading");

    // create model from tdb
    Dataset dataset = TDBFactory.createDataset(tdbDirectory);

    // assume we want the default model, or we could get a named model here
    dataset.begin(ReadWrite.READ);
    model = dataset.getDefaultModel();
    dataset.end() ;

    // if model is null load local dataset into jena TDB
    if(model == null)
        TDBloading(datasetFile);

}
项目:Jena-Based-Semantic-Web-Tutorial    文件:TDBManipulation.java   
static void demoOfUsingADirectory() {
    // Make a TDB-backed dataset
    String directory = TDB_DIR;

    // read something
    Dataset dataset = TDBFactory.createDataset(directory);
    logger.debug("read tx start!!!");
    demoOfReadTransaction(dataset);
    logger.debug("read tx end!!!");
    dataset.close();

    // write something
    dataset = TDBFactory.createDataset(directory);
    logger.debug("write tx start!!!");
    demoOfWriteTransaction(dataset);
    logger.debug("write tx end!!!");
    dataset.close();

    // read again
    dataset = TDBFactory.createDataset(directory);
    logger.debug("read tx start!!!");
    demoOfReadTransaction(dataset);
    logger.debug("read tx end!!!");
    dataset.close();
}
项目:Jena-Based-Semantic-Web-Tutorial    文件:TDBManipulation.java   
private static void demoOfReadTransaction(Dataset dataset) {
    dataset.begin(ReadWrite.READ);

    // Get model inside the transaction
    Model model = dataset.getDefaultModel();

    // query the inserted facts
    StringBuilder query = SPARQLUtils.getRegualrSPARQLPREFIX();
    query.append("PREFIX foaf: <http://xmlns.com/foaf/0.1/>").append(Constants.NEWLINE);
    query.append("SELECT DISTINCT ?person WHERE {?person rdf:type foaf:Person}");
    SPARQLUtils.query(model, query.toString(), "?person");

    model.close();// closing the model to flush

    dataset.end();
}
项目:Jena-Based-Semantic-Web-Tutorial    文件:TDBManipulation.java   
private static void demoOfWriteTransaction(Dataset dataset) {
    dataset.begin(ReadWrite.WRITE);

    Model model = dataset.getDefaultModel();

    ModelUtils.fillModel(model, FOAF_BASE_URI, FOAF_SCHEMA_FilePath);

    // insert foaf:me rdf:type foaf:Person
    Resource me = model.createResource(FOAF_BASE_URI + "me");
    Property rdfType = model.getProperty(Constants.RDF_TYPE_URL);
    Resource FOAFPersonClass = model.getResource(FOAF_BASE_URI + "Person");
    model.add(me, rdfType, FOAFPersonClass);
    // model.write(System.out);// for debug

    model.close();// closing the model to flush

    dataset.commit();

    dataset.end();
}
项目:fcrepo4-scape    文件:PlanLifecycleStates.java   
/**
 * Retrieve the life cycle state for a plan stored in Fedora
 * 
 * @param planId
 *            the id of the plan
 * @param uriInfo
 *            the {@link javax.ws.rs.core.UriInfo} injected by JAX-RS for having the context
 *            path available
 * @return the plan's current life cycle state
 * @throws javax.jcr.RepositoryException
 *             if an error occurred while fetching the life cycle tate of
 *             the plan
 */
@GET
@Path("{id}")
public Response retrievePlanLifecycleState(@PathParam("id")
final String planId, @Context
UriInfo uriInfo) throws RepositoryException {
    /* fetch the plan RDF from fedora */
    final String planUri = "/" + Plans.PLAN_FOLDER + planId;
    final FedoraObject plan = this.objectService.findOrCreateObject(this.session, planUri);

    /* get the relevant information from the RDF dataset */
    final IdentifierTranslator subjects = new DefaultIdentifierTranslator();        final Dataset data = plan.getPropertiesDataset(subjects);
    final Model rdfModel = SerializationUtils.unifyDatasetModel(data);

    final String lifecycle = rdfModel
            .listStatements(subjects.getSubject(plan.getNode().getPath()), rdfModel.getProperty("http://scapeproject.eu/model#hasLifecycleState"),
                    (RDFNode) null).next().getObject().asLiteral().getString();
    return Response.ok(lifecycle, MediaType.TEXT_PLAIN).build();
}
项目:cobalt    文件:DatasetPopulatorTest.java   
@Test
public void addModel() throws Exception {
  final Dataset ds = TDBFactory.createDataset();
  final DatasetPopulator dsp = new DatasetPopulator(ds);

  final Model model = ModelFactory.createDefaultModel();
  final Resource s = model.createResource();
  final Property p = model.createProperty("urn:example:prop", "foo");
  final Resource o = model.createResource();
  model.add(s, p, o);

  dsp.addModel(model);

  ds.begin(ReadWrite.READ);

  try {
    assertTrue(ds.getDefaultModel().containsAll(model));
  } finally {
    ds.end();
  }
}
项目:cobalt    文件:DatasetPopulatorTest.java   
@Test
public void inferMissingPropertyNames() throws Exception {
  final Dataset ds = TDBFactory.createDataset();
  final DatasetPopulator dsp = new DatasetPopulator(ds);
  dsp.addModel(loadModel("infer-property-names/data.ttl"));

  final Model x = loadModel("infer-property-names/expected.ttl");

  ds.begin(ReadWrite.READ);

  try {
    final Model m = ds.getDefaultModel();
    assertTrue(m.containsAll(x));
  } finally {
    ds.end();
  }
}
项目:cobalt    文件:CompatibleResourceFinderTest.java   
@Test
public void findCompatiblePublicProperties() {
  final Dataset ds = loadDataset("compatibility/properties.ttl");
  final CompatibleResourceFinder finder = new CompatibleResourceFinder(ds);

  final Type y0 = type(0);
  final Type y1 = type(1);
  final Type y2 = type(2);
  final Type y3 = type(3);

  final Property p0 = property(0, y0);
  final Property p1 = property(1, y1);
  final Property p2 = property(2, y2);
  final Property p3 = property(3, y3);

  final Action a1 = action(widget(1, p1), p1);
  final Action a2 = action(widget(2, p2), p2, p3);

  final PublishedProperty pp1 = new PublishedProperty(p1, a1);
  final PublishedProperty pp2 = new PublishedProperty(p2, a2);

  final Set<PublishedProperty> xpps = setOf(pp1, pp2);
  final Set<PublishedProperty> pps = finder.findCompatibleOffers(p0);

  assertEquals(pps, xpps);
}
项目:cobalt    文件:CompatibleResourceFinderTest.java   
@Test
public void findCompatibleFunctionalities() {
  final Dataset ds = loadDataset("compatibility/functionalities.ttl");
  final CompatibleResourceFinder finder = new CompatibleResourceFinder(ds);

  final Functionality f0 = functionality(0);
  final Functionality f1 = functionality(1);
  final Functionality f2 = functionality(2);

  final Action a1 = action(widget(1), f1);
  final Action a2 = action(widget(2), f2);

  final RealizedFunctionality rt1 = new RealizedFunctionality(f1, a1);
  final RealizedFunctionality rt2 = new RealizedFunctionality(f2, a2);

  final Set<RealizedFunctionality> xrts = setOf(rt1, rt2);
  final Set<RealizedFunctionality> rts = finder.findCompatibleOffers(f0);

  assertEquals(rts, xrts);
}
项目:xbrl2rdf    文件:RdfFactoryTest.java   
@Test
public void test_rdfcreation_msft() throws SAXException, IOException, ParserConfigurationException, Exception {

    Document dataDoc = parser.parse(RdfFactoryTest.class.getResourceAsStream(
            "/data/msft-20130630.xml"), -1);

    RdfFactory factory = new RdfFactory(new RunConfig(domain));
    factory.createRdfs(dataDoc, testTdbDir);

    Dataset dataset = TDBFactory.createDataset(testTdbDir);
    dataset.begin(ReadWrite.READ);
    Model model = dataset.getDefaultModel();
    Assert.assertFalse("No RDF was generated. TDB directory: " + testTdbDir, model.isEmpty());

    dataset.end();
}
项目:SolRDF    文件:FromAndFromNamedClauses_ITCase.java   
public static void main(String[] args) throws Exception {
    Dataset memoryDataset = DatasetFactory.createMem();
    Model memoryModel = ModelFactory.createDefaultModel();
    memoryModel.read(new FileReader("/work/workspaces/rdf/SolRDF/solrdf/src/test/resources/sample_data/one_triple_1.ttl"), "http://e.org", "TTL");
    memoryDataset.addNamedModel("http://grapha.com", memoryModel);

    memoryModel = ModelFactory.createDefaultModel();
    memoryModel.read(new FileReader("/work/workspaces/rdf/SolRDF/solrdf/src/test/resources/sample_data/one_triple_2.ttl"), "http://e.org", "TTL");
    memoryDataset.addNamedModel("http://graphb.com", memoryModel);

    memoryModel = ModelFactory.createDefaultModel();
    memoryModel.read(new FileReader("/work/workspaces/rdf/SolRDF/solrdf/src/test/resources/sample_data/one_triple_3.ttl"), "http://e.org", "TTL");
    memoryDataset.addNamedModel("http://graphc.com", memoryModel);

    memoryModel = ModelFactory.createDefaultModel();
    memoryModel.read(new FileReader("/work/workspaces/rdf/SolRDF/solrdf/src/test/resources/sample_data/one_triple_4.ttl"), "http://e.org", "TTL");
    memoryDataset.addNamedModel("http://graphd.com", memoryModel);

    final Query query = QueryFactory.create(q2());//"SELECT ?s FROM <http://grapha.com> WHERE { ?s <http://example.org/title> ?o }");

    System.out.println(ResultSetFormatter.asText(QueryExecutionFactory.create(query, memoryDataset).execSelect()));
}
项目:Luzzu    文件:Facets.java   
private static void loadFile(File fileOrFolder){
    if (fileOrFolder.isHidden()) return ;
    if (fileOrFolder.getPath().endsWith(".trig")){
        Dataset _ds = RDFDataMgr.loadDataset(fileOrFolder.getPath());

        Iterator<String> iter = _ds.listNames();
        while (iter.hasNext()){
            String name = iter.next();
            d.addNamedModel(name, _ds.getNamedModel(name));
        }

        d.getDefaultModel().add(_ds.getDefaultModel());
    }
    if (fileOrFolder.isDirectory()){
        File[] listOfFiles = fileOrFolder.listFiles();
        for(File file : listOfFiles){
            loadFile(file);
        }
    }
}
项目:RdfLiteralStats    文件:NquadsTest.java   
public void doit() {
    Dataset dataset = DatasetFactory.createMem();

    Model model = dataset.getDefaultModel();
    model.read("category_labels_en.nq");
    if (model.READ){
        System.out.println("right!!");
    }
    Query q = QueryFactory.create(query);
    QueryExecution qe = QueryExecutionFactory.create(q, model);
    ResultSet rs = qe.execSelect();
    ResultSetFormatter.out(rs);



}
项目:p3-geo-enriching-transformer    文件:SpatialDataEnhancer.java   
private Dataset joinDataset(Dataset baseDataset, File indexDir) throws IOException {
    EntityDefinition entDef = new EntityDefinition("entityField", "geoField");

    // you need JTS lib in the classpath to run the examples
    //entDef.setSpatialContextFactory(SpatialQuery.JTS_SPATIAL_CONTEXT_FACTORY_CLASS);
    // set custom goe predicates

    entDef.addSpatialPredicatePair(ResourceFactory.createResource("http://schema.org/latitude"), ResourceFactory.createResource("http://schema.org/longitude"));
    /*
    entDef.addSpatialPredicatePair(ResourceFactory.createResource("http://localhost/jena_example/#latitude_2"), ResourceFactory.createResource("http://localhost/jena_example/#longitude_2"));
    entDef.addWKTPredicate(ResourceFactory.createResource("http://localhost/jena_example/#wkt_1"));
    entDef.addWKTPredicate(ResourceFactory.createResource("http://localhost/jena_example/#wkt_2"));
    */
    // Lucene, index in File system.
    Directory dir = FSDirectory.open(indexDir);

    // Join together into a dataset
    Dataset ds = SpatialDatasetFactory.createLucene(baseDataset, dir, entDef);

    return ds;
}
项目:p3-geo-enriching-transformer    文件:SpatialDataEnhancer.java   
public boolean isCachedGraph(Dataset dataset, String graphName){
    boolean isCached = false;
    dataset.begin(ReadWrite.READ);
    try {
        Iterator<String> inames = getDataset().listNames();
        while(inames.hasNext()){
            if( graphName.equals( inames.next() )) {
                 isCached = true;  
            }
        }
    }
    finally {
        dataset.end();
    }
    return isCached;
}
项目:lodreclib    文件:RDFTripleExtractor.java   
/**
 * Load jena TDB
 */
private void TDBloading(){

    logger.info("TDB loading");

    // create model from tdb
    Dataset dataset = TDBFactory.createDataset(tdbDirectory);

    // assume we want the default model, or we could get a named model here
    dataset.begin(ReadWrite.READ);
    model = dataset.getDefaultModel();
    dataset.end() ;

    // if model is null load local dataset into jena TDB
    if(model == null)
        TDBloading(datasetFile);

}
项目:coreference-evaluation    文件:SemCoref.java   
static public HashMap<String, String>  readSemTrig (ArrayList<String> eventIdentifierArray,String trigFolder) {
    HashMap<String, String> tokenIdMap = new HashMap<String, String>();
    ArrayList<File> trigFiles = Util.makeRecursiveFileList(new File(trigFolder), ".trig");
   // System.out.println("trigFiles.size() = " + trigFiles.size());

    for (int i = 0; i < trigFiles.size(); i++) {
        File file = trigFiles.get(i);
        Dataset dataset = TDBFactory.createDataset();
        dataset = RDFDataMgr.loadDataset(file.getAbsolutePath());
        Iterator<String> it = dataset.listNames();
        while (it.hasNext()) {
            String name = it.next();
            if (name.equals(instanceGraph)) {
                Model namedModel = dataset.getNamedModel(name);
                StmtIterator siter = namedModel.listStatements();
                while (siter.hasNext()) {
                    Statement s = siter.nextStatement();
                    updateTokenMap(eventIdentifierArray, tokenIdMap, s);
                }
            }
        }
        dataset.close();
    }
    return tokenIdMap;
}
项目:rdfindex    文件:SPARQLModelIndex.java   
/** Create an index using your own model saving you the time needed to import the model when using an endpoint.
 * If you only have an endpoint or want to index a subset of the triples,
 * use the static methods {@link #createIndex(String, String, List)}, {@link #createClassIndex(String, String)} or {@link #createPropertyIndex(String, String)}.
 * All triples (uri,rdfs:label,label) will be put into the index.
 * @param model the jena model containing the rdf:label statements that you want to index. Changes to the model after the construtor call are probably not indexed.
 * @param minSimilarity Between 0 (maximum fuzzyness) and 1f (no fuzzy matching).
 */
public SPARQLModelIndex(Model model,float minSimilarity)
{
    this.minSimilarity=minSimilarity;
    Dataset ds1 = DatasetFactory.createMem() ;

    EntityDefinition entDef = new EntityDefinition("uri", "text", RDFS.label) ;
    // Lucene, in memory.
    Directory dir =  new RAMDirectory();
    // Join together into a dataset
    dataset = TextDatasetFactory.createLucene(ds1, dir, entDef);
    //      ds.setDefaultModel(model);


    synchronized(model)
    {
        dataset.begin(ReadWrite.WRITE);
        try {
            dataset.getDefaultModel().add(model);
            dataset.commit();
        } finally {
            dataset.end();
        }
    }
    //      this.model = model;
}
项目:DDx2NP    文件:NanopubBasis.java   
/**
 * Method to save the nanopub.
 * @param f Receives the file.
 * @throws Exception It can throw an exception.
 */
public void save(String f) throws Exception {
    this.quads = this.getAllQuads();
    if (quads == null) {
        throw new Exception(
                "Quad list is null. Do you call createNanoPub() first?");
    }
    if (quads.size() == 0) {
        throw new Exception("Quad list is empty.");
    }
    Dataset ds = TDBFactory.createDataset();
    DatasetGraph dsg = ds.asDatasetGraph();
    for (int i = 0; i < quads.size(); i++) {
        dsg.add(quads.get(i));
    }
    RDFDataMgr.write(new FileOutputStream(new File(f)), dsg,
            RDFFormat.NQUADS);
}
项目:Quick-SPARQL-Endpoint    文件:SparqlEndpoint.java   
public void start(final int port) {
    final Dataset dataset = DatasetFactory.create(model);
    ServerConfig config = FusekiConfig.defaultConfiguration("dataset", dataset.asDatasetGraph(), false, true);
    config.port = config.pagesPort = port;
    config.pages = null;

    final SPARQLServer server = new SPARQLServer(config);
    server.start();
}
项目:PigSPARQL    文件:QueryEvaluator.java   
public static void main(String []args)
{
    // Parse
    //Query query = QueryFactory.read("file:C:\\SVN\\PigSPARQL_main\\queries\\q8.sparql") ;
    Query query = QueryFactory.read("file:queries/SP2Bench/q8mod.sparql") ;
    //System.out.println(query) ;

    // Generate algebra
    Op op = Algebra.compile(query) ;
    op = Algebra.optimize(op) ;
    //System.out.println(op) ;

    // Print Algebra Using SSE
    //PrintUtils.printOp(query, true);
    //System.out.println();

    String dftGraphURI = "file:datasets/SP2BEnch/dblp25M.n3" ;
    //String dftGraphURI = "file:D:\\ZerProf\\Uni\\Master\\Masterarbeit\\sp2b\\bin\\dblp50K.n3" ;
    Dataset dataset = DatasetFactory.create(dftGraphURI);

    // Execute it.
    QueryIterator qIter = Algebra.exec(op, dataset) ;

    // Results
    int results = 0;
    for ( ; qIter.hasNext() ; )
    {
        Binding b = qIter.nextBinding() ;
        results++;
        System.out.println(b) ;
    }
    qIter.close() ;
    System.out.println("# solution mappings: "+results);
}
项目:KBox    文件:Server.java   
public Server(int port, String pagePath, String subDomain, Model model, Listener listener) {
    this.port = port;
    this.subDomain = subDomain;
    this.pagePath = pagePath;
    Dataset dataset = DatasetFactory.create(model);
    this.dsg = dataset.asDatasetGraph();
    this.listener = listener;
}
项目:KBox    文件:TDB.java   
public static Model createModel(String... dbDirs) {
    Model mainModel = null; 
    Dataset dataset = null;
    for(String dbDir : dbDirs) {
        dataset = TDBFactory.createDataset(dbDir);
        if(mainModel == null) {
            mainModel = dataset.getDefaultModel();              
        } else {
            Model secondaryModel = dataset.getDefaultModel();
            mainModel = ModelFactory.createUnion(mainModel, secondaryModel);
        }
    }
    mainModel = ModelFactory.createRDFSModel(mainModel);
    return mainModel;
}
项目:semtool    文件:JenaEngine.java   
public JenaEngine( Dataset dataset ) throws RepositoryException {
    openDB( new Properties() );
    tdbdir = null;
    try {
        copyFromTdb( dataset );
    }
    catch ( Exception e ) {
        log.fatal( e, e );
    }
}
项目:semtool    文件:JenaEngine.java   
private void copyFromTdb( String file ) throws RepositoryException {
    tdbdir = new File( file );

    Dataset dataset = TDBFactory.createDataset( file );
    try {
        copyFromTdb( dataset );
    }
    finally {
        dataset.close();
    }
}
项目:bygle-ldp    文件:JenaEndPointManager.java   
@Override
public void publishRecord(byte[] rdf, String rdfAbout, String host) throws Exception {
    super.publishRecord(rdf, rdfAbout, host);
    SDBConnection conn = new SDBConnection(jenaDataSource);
    StoreDesc storeDesc = new StoreDesc(LayoutType.LayoutTripleNodesHash, BygleSystemUtils.getDBType(databaseType));
    Store store = SDBFactory.connectStore(conn, storeDesc);
    if (!StoreUtils.isFormatted(store))
        store.getTableFormatter().create();
    Dataset dataset = SDBFactory.connectDataset(store);
    Model modelTpl = ModelFactory.createDefaultModel();
    modelTpl.read(new ByteArrayInputStream(rdf), "");
    dataset.getDefaultModel().add(modelTpl);
    store.getConnection().close();
    store.close();
}
项目:bygle-ldp    文件:JenaEndPointManager.java   
@Override
public void dePublishRecord(byte[] rdf, String rdfAbout, String host) throws Exception {
    super.dePublishRecord(rdf, rdfAbout, host);
    SDBConnection conn = new SDBConnection(jenaDataSource);
    StoreDesc storeDesc = new StoreDesc(LayoutType.LayoutTripleNodesHash, BygleSystemUtils.getDBType(databaseType));
    Store store = SDBFactory.connectStore(conn, storeDesc);
    Dataset dataset = SDBFactory.connectDataset(store);
    Model modelTpl = ModelFactory.createDefaultModel();
    modelTpl.read(new ByteArrayInputStream(rdf), "");

    StringBuilder query = new StringBuilder();
    query.append("DELETE  {?bn ?a ?b}   WHERE {");
    query.append("{<" + host + "/" + rdfAbout + "> ?p ?o");
    query.append(". FILTER(isBlank(?o))");
    query.append(". ?o ?c ?s");
    query.append(". FILTER(isBlank(?s))");
    query.append(". ?s ?d ?bn");
    query.append(". FILTER(isBlank(?bn))}");
    query.append("UNION{");
    query.append("<" + host + "/" + rdfAbout + "> ?p ?o");
    query.append(". FILTER(isBlank(?o))");
    query.append(". ?o ?c ?bn");
    query.append(". FILTER(isBlank(?bn))}");
    query.append("UNION{");
    query.append(" <" + host + "/" + rdfAbout + "> ?p ?bn");
    query.append(". FILTER(isBlank(?bn))");
    query.append("} ?bn ?a ?b}");

    UpdateAction.parseExecute(query.toString(), modelTpl);
    modelTpl.removeAll(modelTpl.createResource(host + "/" + rdfAbout), null, null);
    dataset.getDefaultModel().remove(modelTpl);
    store.getConnection().close();
    store.close();

}
项目:bygle-ldp    文件:JenaEndPointManager.java   
@Override
public void executePublishing() throws Exception {
    super.executeImport();
    SDBConnection conn = new SDBConnection(jenaDataSource);
    StoreDesc storeDesc = new StoreDesc(LayoutType.LayoutTripleNodesHash, BygleSystemUtils.getDBType(databaseType));
    Store store = SDBFactory.connectStore(conn, storeDesc);
    if (!StoreUtils.isFormatted(store))
        store.getTableFormatter().create();
    File importDir = new File(importDirectory);
    FileFilter fileFilter = new WildcardFileFilter("*.nt");
    File[] importFiles = importDir.listFiles(fileFilter);
    if (importFiles.length > 0) {
        OntModel ontModel = ModelFactory.createOntologyModel();
        FileFilter ontologyFileFilter = new WildcardFileFilter("*.owl");
        File[] ontologyfiles = importDir.listFiles(ontologyFileFilter);
        for (int x = 0; x < ontologyfiles.length; x++) {
            FileManager.get().readModel(ontModel, ontologyfiles[x].getAbsolutePath());
        }
        System.out.println("##############################STARTING PUBLISHING#############################");
        for (int i = 0; i < importFiles.length; i++) {
            Model modelTpl = ModelFactory.createDefaultModel();
            FileManager.get().readModel(modelTpl, importFiles[i].getAbsolutePath());
            System.out.println("PUBLISHING  FILE " + importFiles[i].getName());
            System.out.println("##############################START SAVING DATA###############################");
            ontModel.add(modelTpl);
        }
        Dataset dataset = SDBFactory.connectDataset(store);
        dataset.getDefaultModel().add(ontModel);
        store.getConnection().close();
        store.close();
        System.out.println("##############################END PUBLISHING##################################");
        FileUtils.cleanDirectory(importDir);
        System.out.println("##############################PUBLISHING SUCCESS##############################");
    } else {
        System.out.println("##############################NO FILES TO PUBLISH##############################");
    }
}
项目:WikiOnto    文件:DissolveGUI.java   
public DissolveGUI(Dataset data, String typename) {
    this.data = data;
    this.typename = typename;
    getInstances();
    getSubtypes();
    setDefaultCloseOperation(javax.swing.WindowConstants.DISPOSE_ON_CLOSE);
    getContentPane();
    displayMembers();
}
项目:WikiOnto    文件:SemDistGUI.java   
public SemDistGUI(Dataset data) {
    this.data = data;
    getDistantEntities();
    getDistantTypes();
    setDefaultCloseOperation(javax.swing.WindowConstants.DISPOSE_ON_CLOSE);
    displayMembers();
}
项目:WikiOnto    文件:TransformationProcessor.java   
public static void main(String[] args0){
    //load dataset
    Dataset dataset;
    JFileChooser fc = new JFileChooser();
    fc.setCurrentDirectory(new File(System.getProperty("user.dir")));
    fc.setFileSelectionMode(JFileChooser.DIRECTORIES_ONLY);
    int returnVal = fc.showOpenDialog(null);
    if (returnVal == JFileChooser.APPROVE_OPTION) {
        dataset = TDBFactory.createDataset(fc.getSelectedFile().toString());
        TransformationProcessor tp = new TransformationProcessor(dataset);
        Map<String,String> pmap = new HashMap<>();
        tp.transform("deletex.sparql",pmap);
    }
}
项目:StreamEventCoreference    文件:GetPerspectiveRelations.java   
public static void perspectiveRelationsToTrig (String pathToTrigFile, ArrayList<PerspectiveObject> perspectiveObjects) {
    try {
        OutputStream fos = new FileOutputStream(pathToTrigFile);
        Dataset ds = TDBFactory.createDataset();
        Model defaultModel = ds.getDefaultModel();
        ResourcesUri.prefixModel(defaultModel);
      //  Model provenanceModel = ds.getNamedModel("http://www.newsreader-project.eu/perspective");
        ResourcesUri.prefixModelGaf(defaultModel);
        JenaSerialization.addJenaPerspectiveObjects(ds, perspectiveObjects);
        RDFDataMgr.write(fos, ds, RDFFormat.TRIG_PRETTY);
        fos.close();
    } catch (IOException e) {
        e.printStackTrace();  //To change body of catch statement use File | Settings | File Templates.
    }
}
项目:StreamEventCoreference    文件:GetPerspectiveRelations.java   
public static void perspectiveRelationsToTrigStream (OutputStream fos, ArrayList<PerspectiveObject> perspectiveObjects) {

                Dataset ds = TDBFactory.createDataset();
                Model defaultModel = ds.getDefaultModel();
                ResourcesUri.prefixModel(defaultModel);
              //  Model provenanceModel = ds.getNamedModel("http://www.newsreader-project.eu/perspective");
                ResourcesUri.prefixModelGaf(defaultModel);
                JenaSerialization.addJenaPerspectiveObjects(ds, perspectiveObjects);
                RDFDataMgr.write(fos, ds, RDFFormat.TRIG_PRETTY);
    }
项目:StreamEventCoreference    文件:JenaSerialization.java   
static public void addJenaPerspectiveObjects(Dataset ds,
                                        ArrayList<PerspectiveObject> perspectiveObjects) {
    for (int i = 0; i < perspectiveObjects.size(); i++) {
        PerspectiveObject perspectiveObject = perspectiveObjects.get(i);
       // System.out.println("perspectiveObject.toString() = " + perspectiveObject.toString());
        perspectiveObject.addToJenaDataSet(ds);
    }
}