Java 类com.hp.hpl.jena.query.DatasetFactory 实例源码

项目:p3-osm-transformer    文件:JenaTextConfig.java   
/**
 * Creates an in-memory Jena TDB data set and Lucene index from code.
 * @return
 */
public Dataset createMemDatasetFromCode(){
    log.info("Construct an in-memory dataset with in-memory lucene index using code") ;
    TextQuery.init();
    // Build a text dataset by code.
    // Here , in-memory base data and in-memory Lucene index
    // Base data
    Dataset jenads = DatasetFactory.createMem() ;
    Property streetAddress = jenads.getDefaultModel().createProperty("http://schema.org/streetAddress");
    // Define the index mapping
    //EntityDefinition entDef = new EntityDefinition("uri", "text", RDFS.label.asNode()) ;
    EntityDefinition entDef = new EntityDefinition("uri", "text", streetAddress.asNode()) ;
    // Lucene, in memory.
    Directory dir = new RAMDirectory();
    // Join together into a dataset
    Dataset ds = TextDatasetFactory.createLucene(jenads, dir, entDef) ;
    return ds ;
}
项目:Jena-Based-Semantic-Web-Tutorial    文件:SelectQueryUsingDataset.java   
@BeforeClass
public static void setUpBeforeClass() {
    String dftGraphURI = "http://www.w3.org/People/Berners-Lee/card#";
    List<String> namedGraphURIs = new ArrayList<String>();
    namedGraphURIs.add("http://www.koalie.net/foaf.rdf");
    // error
    // namedGraphURIs.add("http://heddley.com/edd/foaf.rdf");
    // 404
    // namedGraphURIs.add("http://www.cs.umd.edu/~hendler/2003/foaf.rdf");
    namedGraphURIs.add("http://www.dajobe.org/foaf.rdf");
    namedGraphURIs.add("http://www.isi.edu/~gil/foaf.rdf");
    namedGraphURIs.add("http://www.ivan-herman.net/foaf.rdf");
    namedGraphURIs.add("http://www.kjetil.kjernsmo.net/foaf");
    namedGraphURIs.add("http://www.lassila.org/ora.rdf");
    // no response
    // namedGraphURIs.add("http://www.mindswap.org/2004/owl/mindswappers");

    dataset = DatasetFactory.create(dftGraphURI, namedGraphURIs);

    assertNotNull(dataset);
}
项目:SolRDF    文件:FromAndFromNamedClauses_ITCase.java   
public static void main(String[] args) throws Exception {
    Dataset memoryDataset = DatasetFactory.createMem();
    Model memoryModel = ModelFactory.createDefaultModel();
    memoryModel.read(new FileReader("/work/workspaces/rdf/SolRDF/solrdf/src/test/resources/sample_data/one_triple_1.ttl"), "http://e.org", "TTL");
    memoryDataset.addNamedModel("http://grapha.com", memoryModel);

    memoryModel = ModelFactory.createDefaultModel();
    memoryModel.read(new FileReader("/work/workspaces/rdf/SolRDF/solrdf/src/test/resources/sample_data/one_triple_2.ttl"), "http://e.org", "TTL");
    memoryDataset.addNamedModel("http://graphb.com", memoryModel);

    memoryModel = ModelFactory.createDefaultModel();
    memoryModel.read(new FileReader("/work/workspaces/rdf/SolRDF/solrdf/src/test/resources/sample_data/one_triple_3.ttl"), "http://e.org", "TTL");
    memoryDataset.addNamedModel("http://graphc.com", memoryModel);

    memoryModel = ModelFactory.createDefaultModel();
    memoryModel.read(new FileReader("/work/workspaces/rdf/SolRDF/solrdf/src/test/resources/sample_data/one_triple_4.ttl"), "http://e.org", "TTL");
    memoryDataset.addNamedModel("http://graphd.com", memoryModel);

    final Query query = QueryFactory.create(q2());//"SELECT ?s FROM <http://grapha.com> WHERE { ?s <http://example.org/title> ?o }");

    System.out.println(ResultSetFormatter.asText(QueryExecutionFactory.create(query, memoryDataset).execSelect()));
}
项目:RdfLiteralStats    文件:NquadsTest.java   
public void doit() {
    Dataset dataset = DatasetFactory.createMem();

    Model model = dataset.getDefaultModel();
    model.read("category_labels_en.nq");
    if (model.READ){
        System.out.println("right!!");
    }
    Query q = QueryFactory.create(query);
    QueryExecution qe = QueryExecutionFactory.create(q, model);
    ResultSet rs = qe.execSelect();
    ResultSetFormatter.out(rs);



}
项目:rdfindex    文件:SPARQLModelIndex.java   
/** Create an index using your own model saving you the time needed to import the model when using an endpoint.
 * If you only have an endpoint or want to index a subset of the triples,
 * use the static methods {@link #createIndex(String, String, List)}, {@link #createClassIndex(String, String)} or {@link #createPropertyIndex(String, String)}.
 * All triples (uri,rdfs:label,label) will be put into the index.
 * @param model the jena model containing the rdf:label statements that you want to index. Changes to the model after the construtor call are probably not indexed.
 * @param minSimilarity Between 0 (maximum fuzzyness) and 1f (no fuzzy matching).
 */
public SPARQLModelIndex(Model model,float minSimilarity)
{
    this.minSimilarity=minSimilarity;
    Dataset ds1 = DatasetFactory.createMem() ;

    EntityDefinition entDef = new EntityDefinition("uri", "text", RDFS.label) ;
    // Lucene, in memory.
    Directory dir =  new RAMDirectory();
    // Join together into a dataset
    dataset = TextDatasetFactory.createLucene(ds1, dir, entDef);
    //      ds.setDefaultModel(model);


    synchronized(model)
    {
        dataset.begin(ReadWrite.WRITE);
        try {
            dataset.getDefaultModel().add(model);
            dataset.commit();
        } finally {
            dataset.end();
        }
    }
    //      this.model = model;
}
项目:Quick-SPARQL-Endpoint    文件:SparqlEndpoint.java   
public void start(final int port) {
    final Dataset dataset = DatasetFactory.create(model);
    ServerConfig config = FusekiConfig.defaultConfiguration("dataset", dataset.asDatasetGraph(), false, true);
    config.port = config.pagesPort = port;
    config.pages = null;

    final SPARQLServer server = new SPARQLServer(config);
    server.start();
}
项目:PigSPARQL    文件:QueryEvaluator.java   
public static void main(String []args)
{
    // Parse
    //Query query = QueryFactory.read("file:C:\\SVN\\PigSPARQL_main\\queries\\q8.sparql") ;
    Query query = QueryFactory.read("file:queries/SP2Bench/q8mod.sparql") ;
    //System.out.println(query) ;

    // Generate algebra
    Op op = Algebra.compile(query) ;
    op = Algebra.optimize(op) ;
    //System.out.println(op) ;

    // Print Algebra Using SSE
    //PrintUtils.printOp(query, true);
    //System.out.println();

    String dftGraphURI = "file:datasets/SP2BEnch/dblp25M.n3" ;
    //String dftGraphURI = "file:D:\\ZerProf\\Uni\\Master\\Masterarbeit\\sp2b\\bin\\dblp50K.n3" ;
    Dataset dataset = DatasetFactory.create(dftGraphURI);

    // Execute it.
    QueryIterator qIter = Algebra.exec(op, dataset) ;

    // Results
    int results = 0;
    for ( ; qIter.hasNext() ; )
    {
        Binding b = qIter.nextBinding() ;
        results++;
        System.out.println(b) ;
    }
    qIter.close() ;
    System.out.println("# solution mappings: "+results);
}
项目:KBox    文件:Server.java   
public Server(int port, String pagePath, String subDomain, Model model, Listener listener) {
    this.port = port;
    this.subDomain = subDomain;
    this.pagePath = pagePath;
    Dataset dataset = DatasetFactory.create(model);
    this.dsg = dataset.asDatasetGraph();
    this.listener = listener;
}
项目:p3-osm-transformer    文件:JenaTextConfig.java   
/**
 * Creates a data set from an assembler file. 
 * @return
 */
public Dataset createDatasetFromAssembler(){
    log.info("Construct text dataset using an assembler description") ;
    // There are two datasets in the configuration:
    // the one for the base data and one with text index.
    // Therefore we need to name the dataset we are interested in.
    Dataset ds = DatasetFactory.assemble("text-config.ttl", "http://localhost/jena_example/#text_dataset") ;
    return ds ;
}
项目:p3-osm-transformer    文件:XsltProcessorImplTest.java   
@Test
public void testOsmData() throws TransformerConfigurationException, FileNotFoundException, 
                                  TransformerException, IOException {
    InputStream xmlIn = getClass().getResourceAsStream("giglio_island.osm");
    InputStream xslIn = getClass().getResourceAsStream("/osm-way-node-keys.xsl");
    InputStream rdfIn = processor.processXml(xslIn, xmlIn, locationHeader);
    Dataset jenads = DatasetFactory.createMem() ;
    Model graph = jenads.getDefaultModel();     
    RDFDataMgr.read(graph, rdfIn, null, Lang.TURTLE);
    Property streetAddress = graph.createProperty("http://schema.org/streetAddress");
    StmtIterator istmt = graph.listStatements(null, streetAddress, (RDFNode) null);
    Assert.assertTrue(istmt.hasNext());

}
项目:jena-nosql    文件:LearningSparql_ITCase.java   
/**
 * Setup fixture for this test.
 */
@Before
public final void setUp() {
    factory = StorageLayerFactory.getFactory();
    dataset = DatasetFactory.create(factory.getDatasetGraph());
    memoryDataset = DatasetFactory.createMem();
}
项目:jena-nosql    文件:SimpleTestClient.java   
/** 
     * Starts this client.
     * 
     * @param args the command line arguments.
     */   
    public static void main(final String[] args) { 
        StorageLayerFactory factory = null; 
        Dataset dataset; 
        QueryExecution execution = null; 
        try {  
            factory = StorageLayerFactory.getFactory();
            dataset = DatasetFactory.create(factory.getDatasetGraph());

            Model model = dataset.getDefaultModel(); 
            model.read(new FileReader(new File("/home/agazzarini/Desktop/triples_dogfood.nt")), "http://ba.s.d", "N-TRIPLE");

            System.out.println(model.size());

//          Thread.sleep(1000);

//          String q = 
//                  "PREFIX : <http://learningsparql.com/ns/papers#> " +
//                  "PREFIX c: <http://learningsparql.com/ns/citations#> " +
//                  "SELECT ?s " + 
//                  "WHERE { ?s c:cites :paperA . }";
//          
//          final Query query = QueryFactory.create(q);
//          execution = QueryExecutionFactory.create(query, model);
//          
//          ResultSet rs = execution.execSelect();
//          
//          System.out.println(ResultSetFormatter.asText(rs));
        } catch (Exception e) {
            e.printStackTrace();
        } finally {
            if (execution != null) execution.close();
            factory.getClientShutdownHook().close();
        }
    }
项目:jena-nosql    文件:SparqlIntegrationTestCase.java   
/**
 * Setup fixture for this test.
 */
@Before
public final void setUp() {
    factory = StorageLayerFactory.getFactory();
    dataset = DatasetFactory.create(factory.getDatasetGraph());
    memoryDataset = DatasetFactory.createMem();

    load("data.ttl");
}
项目:SolRDF    文件:SolRDF.java   
/**
 * Builds a new SolRDF proxy with the given {@link DatasetAccessor}.
 * 
 * @param dataset the {@link DatasetAccessor} representing the remote endpoint.
 * @param solr the (remote) Solr proxy.
 */
SolRDF(
        final DatasetAccessor dataset, 
        final String sparqlEndpointAddress,
        final SolrClient solr) {
    this.remoteDataset = dataset;
    this.localDataset = DatasetFactory.createMem();
    this.solr = solr;
    this.sparqlEndpoint = sparqlEndpointAddress;
}
项目:Luzzu    文件:SPARQLEndPointProcessor.java   
/**
 * Generates the quality metadata corresponding to the data processed by this instance. Stores the 
 * resulting metadata into a file, along the corresponding configuration parameters.
 * TODO: Consider other concurrency cases such as: several instances of the JVM and different class loaders
 */
private synchronized void writeQualityMetadataFile() {
    // Build the full path of the file where quality metadata will be written
    String fld = this.metadataBaseDir + "/" + this.baseURI.replace("http://", "");
    fld = fld.replaceFirst("^~",System.getProperty("user.home"));

    File folder = new File(fld);
    folder.mkdirs();

    String metadataFilePath = fld + "/quality-meta-data.trig";
    metadataFilePath = metadataFilePath.replace("//", "/");
    logger.debug("Writing quality meta-data to file: metadataFilePath...");

    File fileMetadata = new File(metadataFilePath);
    Dataset model = DatasetFactory.createMem();
    // Verify whether there's already a quality metadata file for the assessed resource and load it if so
    if(fileMetadata.exists()) {
        RDFDataMgr.read(model, metadataFilePath, this.baseURI, Lang.TRIG);
    }
    // Note that createResource() intelligently reuses the resource if found within a read model
    Resource res = ModelFactory.createDefaultModel().createResource(this.baseURI);
    QualityMetadata md = new QualityMetadata(model, res);
    // Write quality metadata about the metrics assessed through this processor
    for(String className : this.metricInstances.keySet()){
        QualityMetric m = this.metricInstances.get(className);
        md.addMetricData(m);
    }

    try {
        // Make sure the file is created (the following call has no effect if the file exists)
        fileMetadata.createNewFile();
        // Write new quality metadata into file
        OutputStream out = new FileOutputStream(fileMetadata, false);
        RDFDataMgr.write(out, md.createQualityMetadata(), RDFFormat.TRIG);

        logger.debug("Quality meta-data successfully written.");
    } catch(MetadataException | IOException ex) {
        logger.error("Quality meta-data could not be written to file: " + metadataFilePath, ex);
    }
}
项目:Luzzu    文件:MemoryProcessor.java   
@Override
public void setUpProcess() {
    this.memoryModel = DatasetFactory.createMem();

    this.isInitalised = true;

    try {
        this.loadMetrics();
    } catch (ExternalMetricLoaderException e) {
        logger.error(e.getLocalizedMessage());
    }
}
项目:Luzzu    文件:MemoryProcessor.java   
/**
 * Writes the quality report generated as part of the this assessment process. Stores the 
 * report metadata into a new file, identified by the along the corresponding configuration parameters.
 * TODO: Consider other concurrency cases such as: several instances of the JVM and different class loaders
 */
private synchronized void writeReportMetadataFile() {
    // Build the full path of the file where quality report metadata will be written.
    // Use current timestamp to identify the report corresponding to each individual quality assessment process
    String fld = this.metadataBaseDir + "/" + this.baseURI.replace("http://", "");
    fld = fld.replaceFirst("^~",System.getProperty("user.home"));

    File folder = new File(fld);
    if (!(folder.exists())) folder.mkdirs();

    long timestamp = (new Date()).getTime();
    String metadataFilePath = String.format("%s/%s/problem-report-%d.trig", this.metadataBaseDir, this.baseURI.replace("http://", ""), timestamp);
    metadataFilePath = metadataFilePath.replace("//", "/");
    metadataFilePath = metadataFilePath.replaceFirst("^~",System.getProperty("user.home"));

    logger.debug("Writing quality report to file: metadataFilePath...");

    // Make sure that the quality report model has been properly generated before hand
    if(this.retreiveQualityReport() != null) {
        File fileMetadata = new File(metadataFilePath);
        Dataset model = DatasetFactory.create(this.retreiveQualityReport());

        try {
            // Make sure the file is created (the following call has no effect if the file exists)
            fileMetadata.createNewFile();
            // Write new quality metadata into file
            OutputStream out = new FileOutputStream(fileMetadata, false);
            RDFDataMgr.write(out, model, RDFFormat.TRIG);

            logger.debug("Quality report successfully written.");
        } catch(IOException ex) {
            logger.error("Quality meta-data could not be written to file: " + metadataFilePath, ex);
        }
    } else {
        logger.warn("Attempted to write quality report, but no report model has been generated");
    }
}
项目:Luzzu    文件:LargeStreamProcessor.java   
/**
 * Writes the quality report generated as part of the this assessment process. Stores the 
 * report metadata into a new file, identified by the along the corresponding configuration parameters.
 * TODO: Consider other concurrency cases such as: several instances of the JVM and different class loaders
 */
private synchronized void writeReportMetadataFile() {
    // Build the full path of the file where quality report metadata will be written.
    // Use current timestamp to identify the report corresponding to each individual quality assessment process
    String fld = this.metadataBaseDir + "/" + this.baseURI.replace("http://", "");
    fld = fld.replaceFirst("^~",System.getProperty("user.home"));

    File folder = new File(fld);
    if (!(folder.exists())) folder.mkdirs();

    long timestamp = (new Date()).getTime();
    String metadataFilePath = String.format("%s/%s/problem-report-%d.trig", this.metadataBaseDir, this.baseURI.replace("http://", ""), timestamp);
    metadataFilePath = metadataFilePath.replace("//", "/");
    metadataFilePath = metadataFilePath.replaceFirst("^~",System.getProperty("user.home"));

    logger.debug("Writing quality report to file: metadataFilePath...");

    // Make sure that the quality report model has been properly generated before hand
    if(this.retreiveQualityReport() != null) {
        File fileMetadata = new File(metadataFilePath);
        Dataset model = DatasetFactory.create(this.retreiveQualityReport());

        try {
            // Make sure the file is created (the following call has no effect if the file exists)
            fileMetadata.createNewFile();
            // Write new quality metadata into file
            OutputStream out = new FileOutputStream(fileMetadata, false);
            RDFDataMgr.write(out, model, RDFFormat.TRIG);

            logger.debug("Quality report successfully written.");
        } catch(IOException ex) {
            logger.error("Quality meta-data could not be written to file: " + metadataFilePath, ex);
        }
    } else {
        logger.warn("Attempted to write quality report, but no report model has been generated");
    }
}
项目:Luzzu    文件:ParallelSPARQLEndPointProcessor.java   
/**
 * Generates the quality metadata corresponding to the data processed by this instance. Stores the 
 * resulting metadata into a file, along the corresponding configuration parameters.
 * TODO: Consider other concurrency cases such as: several instances of the JVM and different class loaders
 */
private synchronized void writeQualityMetadataFile() {
    // Build the full path of the file where quality metadata will be written
    String fld = this.metadataBaseDir + "/" + this.baseURI.replace("http://", "");
    fld = fld.replaceFirst("^~",System.getProperty("user.home"));

    File folder = new File(fld);
    folder.mkdirs();

    String metadataFilePath = fld + "/quality-meta-data.trig";
    metadataFilePath = metadataFilePath.replace("//", "/");
    logger.debug("Writing quality meta-data to file: metadataFilePath...");

    File fileMetadata = new File(metadataFilePath);
    Dataset model = DatasetFactory.createMem();
    // Verify whether there's already a quality metadata file for the assessed resource and load it if so
    if(fileMetadata.exists()) {
        RDFDataMgr.read(model, metadataFilePath, this.baseURI, Lang.TRIG);
    }
    // Note that createResource() intelligently reuses the resource if found within a read model
    Resource res = ModelFactory.createDefaultModel().createResource(this.baseURI);
    QualityMetadata md = new QualityMetadata(model, res);
    // Write quality metadata about the metrics assessed through this processor
    for(String className : this.metricInstances.keySet()){
        QualityMetric m = this.metricInstances.get(className);
        md.addMetricData(m);
    }

    try {
        // Make sure the file is created (the following call has no effect if the file exists)
        fileMetadata.createNewFile();
        // Write new quality metadata into file
        OutputStream out = new FileOutputStream(fileMetadata, false);
        RDFDataMgr.write(out, md.createQualityMetadata(), RDFFormat.TRIG);

        logger.debug("Quality meta-data successfully written.");
    } catch(MetadataException | IOException ex) {
        logger.error("Quality meta-data could not be written to file: " + metadataFilePath, ex);
    }
}
项目:Luzzu    文件:ParallelSPARQLEndPointProcessor.java   
/**
 * Writes the quality report generated as part of the this assessment process. Stores the 
 * report metadata into a new file, identified by the along the corresponding configuration parameters.
 * TODO: Consider other concurrency cases such as: several instances of the JVM and different class loaders
 */
private synchronized void writeReportMetadataFile() {
    // Build the full path of the file where quality report metadata will be written.
    // Use current timestamp to identify the report corresponding to each individual quality assessment process
    String fld = this.metadataBaseDir + "/" + this.baseURI.replace("http://", "");
    fld = fld.replaceFirst("^~",System.getProperty("user.home"));

    File folder = new File(fld);
    if (!(folder.exists())) folder.mkdirs();

    long timestamp = (new Date()).getTime();
    String metadataFilePath = String.format("%s/%s/problem-report-%d.trig", this.metadataBaseDir, this.baseURI.replace("http://", ""), timestamp);
    metadataFilePath = metadataFilePath.replace("//", "/");
    metadataFilePath = metadataFilePath.replaceFirst("^~",System.getProperty("user.home"));

    logger.debug("Writing quality report to file: metadataFilePath...");

    // Make sure that the quality report model has been properly generated before hand
    if(this.retreiveQualityReport() != null) {
        File fileMetadata = new File(metadataFilePath);
        Dataset model = DatasetFactory.create(this.retreiveQualityReport());

        try {
            // Make sure the file is created (the following call has no effect if the file exists)
            fileMetadata.createNewFile();
            // Write new quality metadata into file
            OutputStream out = new FileOutputStream(fileMetadata, false);
            RDFDataMgr.write(out, model, RDFFormat.TRIG);

            logger.debug("Quality report successfully written.");
        } catch(IOException ex) {
            logger.error("Quality meta-data could not be written to file: " + metadataFilePath, ex);
        }
    } else {
        logger.warn("Attempted to write quality report, but no report model has been generated");
    }
}
项目:Luzzu    文件:GZNTMemoryProcessor.java   
/**
 * Writes the quality report generated as part of the this assessment process. Stores the 
 * report metadata into a new file, identified by the along the corresponding configuration parameters.
 * TODO: Consider other concurrency cases such as: several instances of the JVM and different class loaders
 */
private synchronized void writeReportMetadataFile() {
    // Build the full path of the file where quality report metadata will be written.
    // Use current timestamp to identify the report corresponding to each individual quality assessment process
    String fld = this.metadataBaseDir + "/" + this.baseURI.replace("http://", "");
    fld = fld.replaceFirst("^~",System.getProperty("user.home"));

    File folder = new File(fld);
    if (!(folder.exists())) folder.mkdirs();

    long timestamp = (new Date()).getTime();
    String metadataFilePath = String.format("%s/%s/problem-report-%d.trig", this.metadataBaseDir, this.baseURI.replace("http://", ""), timestamp);
    metadataFilePath = metadataFilePath.replace("//", "/");
    metadataFilePath = metadataFilePath.replaceFirst("^~",System.getProperty("user.home"));

    logger.debug("Writing quality report to file: metadataFilePath...");

    // Make sure that the quality report model has been properly generated before hand
    if(this.retreiveQualityReport() != null) {
        File fileMetadata = new File(metadataFilePath);
        Dataset model = DatasetFactory.create(this.retreiveQualityReport());

        try {
            // Make sure the file is created (the following call has no effect if the file exists)
            fileMetadata.createNewFile();
            // Write new quality metadata into file
            OutputStream out = new FileOutputStream(fileMetadata, false);
            RDFDataMgr.write(out, model, RDFFormat.TRIG);

            logger.debug("Quality report successfully written.");
        } catch(IOException ex) {
            logger.error("Quality meta-data could not be written to file: " + metadataFilePath, ex);
        }
    } else {
        logger.warn("Attempted to write quality report, but no report model has been generated");
    }
}
项目:Luzzu    文件:StreamProcessor.java   
/**
 * Writes the quality report generated as part of the this assessment process. Stores the 
 * report metadata into a new file, identified by the along the corresponding configuration parameters.
 * TODO: Consider other concurrency cases such as: several instances of the JVM and different class loaders
 */
private synchronized void writeReportMetadataFile() {
    // Build the full path of the file where quality report metadata will be written.
    // Use current timestamp to identify the report corresponding to each individual quality assessment process
    String fld = this.metadataBaseDir + "/" + this.baseURI.replace("http://", "");
    fld = fld.replaceFirst("^~",System.getProperty("user.home"));

    File folder = new File(fld);
    if (!(folder.exists())) folder.mkdirs();

    long timestamp = (new Date()).getTime();
    String metadataFilePath = String.format("%s/%s/problem-report-%d.trig", this.metadataBaseDir, this.baseURI.replace("http://", ""), timestamp);
    metadataFilePath = metadataFilePath.replace("//", "/");
    metadataFilePath = metadataFilePath.replaceFirst("^~",System.getProperty("user.home"));

    logger.debug("Writing quality report to file: metadataFilePath...");

    // Make sure that the quality report model has been properly generated before hand
    if(this.retreiveQualityReport() != null) {
        File fileMetadata = new File(metadataFilePath);
        Dataset model = DatasetFactory.create(this.retreiveQualityReport());

        try {
            // Make sure the file is created (the following call has no effect if the file exists)
            fileMetadata.createNewFile();
            // Write new quality metadata into file
            OutputStream out = new FileOutputStream(fileMetadata, false);
            RDFDataMgr.write(out, model, RDFFormat.TRIG);

            logger.debug("Quality report successfully written.");
        } catch(IOException ex) {
            logger.error("Quality meta-data could not be written to file: " + metadataFilePath, ex);
        }
    } else {
        logger.warn("Attempted to write quality report, but no report model has been generated");
    }
}
项目:JenaTutorial    文件:ReadRDF_ListTriples.java   
public static void main(String[] args) 
{
    Dataset dataset = DatasetFactory.createMem();
    RDFDataMgr.read(dataset, "file:/development/presentations/trijug_semantic/JenaTutorial/data/input/rdfxml/customers.rdf.xml", Lang.RDFXML);

    Model model = dataset.getDefaultModel();

    StmtIterator sIter = model.listStatements();

    while( sIter.hasNext() )
    {
        Statement s = sIter.next();
        System.out.println( "s: " + s );
    }
}
项目:JenaTutorial    文件:ReadRDFXML_WriteJSONLD.java   
public static void main(String[] args) throws Exception
{
    Dataset dataset = DatasetFactory.createMem();
    RDFDataMgr.read(dataset, "file:/development/presentations/trijug_semantic/JenaTutorial/data/input/rdfxml/customers.rdf.xml", Lang.RDFXML);

    Model model = dataset.getDefaultModel();

    FileOutputStream fos = new FileOutputStream( "/development/presentations/trijug_semantic/JenaTutorial/data/output/jsonld/customers.rdf.jsonld" );
    RDFDataMgr.write( fos, model, Lang.JSONLD );
    fos.close();
}
项目:JenaTutorial    文件:ReadRDFXML_WriteN3.java   
public static void main(String[] args) throws Exception
{
    Dataset dataset = DatasetFactory.createMem();
    RDFDataMgr.read(dataset, "file:/development/presentations/trijug_semantic/JenaTutorial/data/input/rdfxml/customers.rdf.xml", Lang.RDFXML);

    Model model = dataset.getDefaultModel();

    FileOutputStream fos = new FileOutputStream( "/development/presentations/trijug_semantic/JenaTutorial/data/output/n3/customers.rdf.n3" );
    RDFDataMgr.write( fos, model, Lang.N3 );
    fos.close();

}
项目:JenaTutorial    文件:ReadRDFXML_WriteTurtle.java   
public static void main(String[] args) throws Exception
{
    Dataset dataset = DatasetFactory.createMem();
    RDFDataMgr.read(dataset, "file:/development/presentations/trijug_semantic/JenaTutorial/data/input/rdfxml/customers.rdf.xml", Lang.RDFXML);

    Model model = dataset.getDefaultModel();

    FileOutputStream fos = new FileOutputStream( "/development/presentations/trijug_semantic/JenaTutorial/data/output/turtle/customers.rdf.ttl" );
    RDFDataMgr.write( fos, model, Lang.TURTLE );
    fos.close();
}
项目:SolRDF    文件:IntegrationTestSupertypeLayer.java   
/**
 * Setup fixture for this test.
 */
@Before
public void setUp() throws Exception {
    super.setUp();
    memoryDataset = DatasetFactory.createMem();
}
项目:Luzzu    文件:MemoryProcessor.java   
/**
 * Generates the quality metadata corresponding to the data processed by this instance. Stores the 
 * resulting metadata into a file, along the corresponding configuration parameters.
 * TODO: Consider other concurrency cases such as: several instances of the JVM and different class loaders
 */
private synchronized void writeQualityMetadataFile() {
    // Build the full path of the file where quality metadata will be written
    String fld = this.metadataBaseDir + "/" + this.baseURI.replace("http://", "");
    fld = fld.replaceFirst("^~",System.getProperty("user.home"));

    File folder = new File(fld);
    folder.mkdirs();

    String metadataFilePath = fld + "/quality-meta-data.trig";
    metadataFilePath = metadataFilePath.replace("//", "/");
    logger.debug("Writing quality meta-data to file: metadataFilePath...");

    File fileMetadata = new File(metadataFilePath);
    Dataset model = DatasetFactory.createMem();
    // Verify whether there's already a quality metadata file for the assessed resource and load it if so
    if(fileMetadata.exists()) {
        RDFDataMgr.read(model, metadataFilePath, this.baseURI, Lang.TRIG);
    }

    // Note that createResource() intelligently reuses the resource if found within a read model
    Resource res = ModelFactory.createDefaultModel().createResource(this.baseURI);
    QualityMetadata md = new QualityMetadata(model, res);

    // Write quality metadata about the metrics assessed through this processor
    for(String className : this.metricInstances.keySet()){
        QualityMetric m = this.metricInstances.get(className);
        md.addMetricData(m);
    }

    try {
        // Make sure the file is created (the following call has no effect if the file exists)
        fileMetadata.createNewFile();
        // Write new quality metadata into file
        OutputStream out = new FileOutputStream(fileMetadata, false);
        RDFDataMgr.write(out, md.createQualityMetadata(), RDFFormat.TRIG);

        logger.debug("Quality meta-data successfully written.");
    } catch(MetadataException | IOException ex) {
        logger.error("Quality meta-data could not be written to file: " + metadataFilePath, ex);
    }
}
项目:Luzzu    文件:LargeStreamProcessor.java   
/**
 * Generates the quality metadata corresponding to the data processed by this instance. Stores the 
 * resulting metadata into a file, along the corresponding configuration parameters.
 * TODO: Consider other concurrency cases such as: several instances of the JVM and different class loaders
 */
private synchronized void writeQualityMetadataFile() {
    // Build the full path of the file where quality metadata will be written
    String fld = this.metadataBaseDir + "/" + this.baseURI.replace("http://", "");
    fld = fld.replaceFirst("^~",System.getProperty("user.home"));

    File folder = new File(fld);
    folder.mkdirs();

    String metadataFilePath = fld + "/quality-meta-data.trig";
    metadataFilePath = metadataFilePath.replace("//", "/");
    logger.debug("Writing quality meta-data to file: metadataFilePath...");

    File fileMetadata = new File(metadataFilePath);
    Dataset model = DatasetFactory.createMem();
    // Verify whether there's already a quality metadata file for the assessed resource and load it if so
    if(fileMetadata.exists()) {
        RDFDataMgr.read(model, metadataFilePath, this.baseURI, Lang.TRIG);
    }

    // Note that createResource() intelligently reuses the resource if found within a read model
    Resource res = ModelFactory.createDefaultModel().createResource(this.baseURI);
    QualityMetadata md = new QualityMetadata(model, res);

    // Write quality metadata about the metrics assessed through this processor
    for(String className : this.metricInstances.keySet()){
        QualityMetric m = this.metricInstances.get(className);
        md.addMetricData(m);
    }

    try {
        // Make sure the file is created (the following call has no effect if the file exists)
        fileMetadata.createNewFile();
        // Write new quality metadata into file
        OutputStream out = new FileOutputStream(fileMetadata, false);
        RDFDataMgr.write(out, md.createQualityMetadata(), RDFFormat.TRIG);

        logger.debug("Quality meta-data successfully written.");
    } catch(MetadataException | IOException ex) {
        logger.error("Quality meta-data could not be written to file: " + metadataFilePath, ex);
    }
}
项目:Luzzu    文件:GZNTMemoryProcessor.java   
/**
 * Generates the quality metadata corresponding to the data processed by this instance. Stores the 
 * resulting metadata into a file, along the corresponding configuration parameters.
 * TODO: Consider other concurrency cases such as: several instances of the JVM and different class loaders
 */
private synchronized void writeQualityMetadataFile() {
    // Build the full path of the file where quality metadata will be written
    String fld = this.metadataBaseDir + "/" + this.baseURI.replace("http://", "");
    fld = fld.replaceFirst("^~",System.getProperty("user.home"));

    File folder = new File(fld);
    folder.mkdirs();

    String metadataFilePath = fld + "/quality-meta-data.trig";
    metadataFilePath = metadataFilePath.replace("//", "/");
    logger.debug("Writing quality meta-data to file: metadataFilePath...");

    File fileMetadata = new File(metadataFilePath);
    Dataset model = DatasetFactory.createMem();
    // Verify whether there's already a quality metadata file for the assessed resource and load it if so
    if(fileMetadata.exists()) {
        RDFDataMgr.read(model, metadataFilePath, this.baseURI, Lang.TRIG);
    }

    // Note that createResource() intelligently reuses the resource if found within a read model
    Resource res = ModelFactory.createDefaultModel().createResource(this.baseURI);
    QualityMetadata md = new QualityMetadata(model, res);

    // Write quality metadata about the metrics assessed through this processor
    for(String className : this.metricInstances.keySet()){
        QualityMetric m = this.metricInstances.get(className);
        md.addMetricData(m);
    }

    try {
        // Make sure the file is created (the following call has no effect if the file exists)
        fileMetadata.createNewFile();
        // Write new quality metadata into file
        OutputStream out = new FileOutputStream(fileMetadata, false);
        RDFDataMgr.write(out, md.createQualityMetadata(), RDFFormat.TRIG);

        logger.debug("Quality meta-data successfully written.");
    } catch(MetadataException | IOException ex) {
        logger.error("Quality meta-data could not be written to file: " + metadataFilePath, ex);
    }
}
项目:Luzzu    文件:StreamProcessor.java   
/**
 * Generates the quality metadata corresponding to the data processed by this instance. Stores the 
 * resulting metadata into a file, along the corresponding configuration parameters.
 * TODO: Consider other concurrency cases such as: several instances of the JVM and different class loaders
 */
private synchronized void writeQualityMetadataFile() {
    this.isGeneratingQMD = true;
    // Build the full path of the file where quality metadata will be written
    String fld = this.metadataBaseDir + "/" + this.baseURI.replace("http://", "");
    fld = fld.replaceFirst("^~",System.getProperty("user.home"));

    File folder = new File(fld);
    folder.mkdirs();

    String metadataFilePath = fld + "/quality-meta-data.trig";
    metadataFilePath = metadataFilePath.replace("//", "/");
    logger.debug("Writing quality meta-data to file: metadataFilePath...");

    File fileMetadata = new File(metadataFilePath);
    Dataset model = DatasetFactory.createMem();
    // Verify whether there's already a quality metadata file for the assessed resource and load it if so
    if(fileMetadata.exists()) {
        RDFDataMgr.read(model, metadataFilePath, this.baseURI, Lang.TRIG);
    }

    // Note that createResource() intelligently reuses the resource if found within a read model
    Resource res = ModelFactory.createDefaultModel().createResource(this.baseURI);
    QualityMetadata md = new QualityMetadata(model, res);

    // Write quality metadata about the metrics assessed through this processor
    for(String className : this.metricInstances.keySet()){
        QualityMetric m = this.metricInstances.get(className);
        md.addMetricData(m);
    }

    try {
        // Make sure the file is created (the following call has no effect if the file exists)
        fileMetadata.createNewFile();
        // Write new quality metadata into file
        OutputStream out = new FileOutputStream(fileMetadata, false);
        RDFDataMgr.write(out, md.createQualityMetadata(), RDFFormat.TRIG);

        logger.debug("Quality meta-data successfully written.");
    } catch(MetadataException | IOException ex) {
        logger.error("Quality meta-data could not be written to file: " + metadataFilePath, ex);
    }
    this.isGeneratingQMD = false;
    this.endedGeneratingQMD = true;
}
项目:JenaTutorial    文件:FederatedSparqlQuery.java   
public static void main(String[] args) 
{
    Dataset dataset = DatasetFactory.createMem();

    Reasoner reasoner = ReasonerRegistry.getRDFSReasoner();
    reasoner.setParameter(ReasonerVocabulary.PROPsetRDFSLevel, 
               ReasonerVocabulary.RDFS_DEFAULT);

    InfModel infmodel = ModelFactory.createInfModel(reasoner, dataset.getDefaultModel() );

    /* Do a SPARQL Query over the data in the model */
    String queryString = 
        "SELECT * WHERE { SERVICE <http://dbpedia.org/sparql> {  <http://dbpedia.org/resource/George_Harrison> ?p ?o . } } LIMIT 10";

    /* Now create and execute the query using a Query object */
    Query query = QueryFactory.create(queryString) ;
    QueryExecution qexec = QueryExecutionFactory.create(query, infmodel) ;      

    QueryExecUtils.executeQuery(qexec);


    System.out.println( "done" );

}
项目:p3-geo-enriching-transformer    文件:SpatialDataEnhancer.java   
private Dataset createDatasetByCode(File indexDir) throws IOException {
    // Base data
    Dataset ds1 = DatasetFactory.createMem();
    return joinDataset(ds1, indexDir);
}
项目:quack    文件:Quack.java   
public static Dataset createDataset(Location location, OpExecutorFactory executorFactory) {
    return DatasetFactory.create(createDatasetGraph(location, executorFactory)) ;
}
项目:quack    文件:TestOpExecutorNode.java   
@Override
protected Dataset createDataset() {
    return DatasetFactory.createMem() ;
}
项目:vocidex    文件:SPARQLRunner.java   
public SPARQLRunner(Model model) {
    this(DatasetFactory.create(model));
}
项目:sdlnot-rules    文件:SparqlDLNotRulesEngine.java   
private Dataset kb2ds(KnowledgeBase kb) {
    final PelletInfGraph graph = new org.mindswap.pellet.jena.PelletReasoner()
            .bind(kb);
    return DatasetFactory.create(ModelFactory.createInfModel(graph));
}
项目:quality    文件:VocabularyLoader.java   
public void clearDataset(){
    this.dataset.close();
    this.dataset = DatasetFactory.createMem();
}
项目:JenaTutorial    文件:JenaTextMain1.java   
public static void main(String[] args) 
{

    TextQuery.init();

    Dataset dataset = DatasetFactory.createMem();

    EntityDefinition entDef = new EntityDefinition("uri", "text", RDFS.label.asNode()) ;

    // Lucene, in memory.
    Directory dir =  new RAMDirectory();

    // Join together into a dataset
    Dataset ds = TextDatasetFactory.createLucene(dataset, dir, entDef);


    Model m = ds.getDefaultModel();

    Resource rSubject = m.createResource( "http://ontology.fogbeam.com/example/TestResource1" );
    Resource rSubject2 = m.createResource( "http://ontology.fogbeam.com/example/TestResource2" );

    try
    {

        Statement s = m.createStatement(rSubject, RDFS.label, "This is a Test Resource" );

        m.add( s );

        Statement s2 = m.createStatement(rSubject2, RDFS.label, "Bratwurst Test" );

        m.add( s2 );

        String baseQueryString =
                "PREFIX xsd: <http://www.w3.org/2001/XMLSchema#> " +
                "PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> " +
                "PREFIX dc: <http://purl.org/dc/elements/1.1/> " +
                "PREFIX dcterm: <http://purl.org/dc/terms/> " +
                "PREFIX owl: <http://www.w3.org/2002/07/owl#> " +
                "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> " +
                "PREFIX text: <http://jena.apache.org/text#>";


        /* Do a SPARQL query using Jena-Text here... */
        String queryString = baseQueryString + " SELECT * { ?s  text:query( 'Test') ; . ?s rdfs:label ?label . }";


        Query query = QueryFactory.create(queryString) ;


        QueryExecution qexec = QueryExecutionFactory.create(query, ds );
        QueryExecUtils.executeQuery(qexec);

        m.close();


    }
    catch( Exception e )
    {
        e.printStackTrace();
    }

    System.out.println( "done" );
}