Java 类com.hp.hpl.jena.query.ReadWrite 实例源码

项目:p3-osm-transformer    文件:JenaTextConfig.java   
/**
 * Import the data into the data set. When a new data set is imported the old data is deleted. 
 * @param dataset
 * @param file
 */
public void loadData(Dataset dataset, String file){
    log.info("Start loading") ;
    long startTime = System.nanoTime() ;
    dataset.begin(ReadWrite.WRITE) ;
    try {
        Model m = dataset.getDefaultModel() ;
        log.info("Number of triples before loading: " + m.size());
        RDFDataMgr.read(m, file) ;
        log.info("Number of triples after loading: " + m.size());
        dataset.commit() ;
    } 
    finally { 
        dataset.end() ;
    }
    long finishTime = System.nanoTime() ;
    double time = (finishTime-startTime)/1.0e6 ;
    log.info(String.format("Finish loading - %.2fms", time)) ;
}
项目:lodreclib    文件:RDFTripleExtractor.java   
/**
 * Load jena TDB
 */
private void TDBloading(){

    logger.info("TDB loading");

    // create model from tdb
    Dataset dataset = TDBFactory.createDataset(tdbDirectory);

    // assume we want the default model, or we could get a named model here
    dataset.begin(ReadWrite.READ);
    model = dataset.getDefaultModel();
    dataset.end() ;

    // if model is null load local dataset into jena TDB
    if(model == null)
        TDBloading(datasetFile);

}
项目:Jena-Based-Semantic-Web-Tutorial    文件:TDBManipulation.java   
private static void demoOfReadTransaction(Dataset dataset) {
    dataset.begin(ReadWrite.READ);

    // Get model inside the transaction
    Model model = dataset.getDefaultModel();

    // query the inserted facts
    StringBuilder query = SPARQLUtils.getRegualrSPARQLPREFIX();
    query.append("PREFIX foaf: <http://xmlns.com/foaf/0.1/>").append(Constants.NEWLINE);
    query.append("SELECT DISTINCT ?person WHERE {?person rdf:type foaf:Person}");
    SPARQLUtils.query(model, query.toString(), "?person");

    model.close();// closing the model to flush

    dataset.end();
}
项目:Jena-Based-Semantic-Web-Tutorial    文件:TDBManipulation.java   
private static void demoOfWriteTransaction(Dataset dataset) {
    dataset.begin(ReadWrite.WRITE);

    Model model = dataset.getDefaultModel();

    ModelUtils.fillModel(model, FOAF_BASE_URI, FOAF_SCHEMA_FilePath);

    // insert foaf:me rdf:type foaf:Person
    Resource me = model.createResource(FOAF_BASE_URI + "me");
    Property rdfType = model.getProperty(Constants.RDF_TYPE_URL);
    Resource FOAFPersonClass = model.getResource(FOAF_BASE_URI + "Person");
    model.add(me, rdfType, FOAFPersonClass);
    // model.write(System.out);// for debug

    model.close();// closing the model to flush

    dataset.commit();

    dataset.end();
}
项目:cobalt    文件:CompatibleResourceFinder.java   
private Set<O> findOffers(final T request) {
  final Set<O> offers = new HashSet<>();

  dataset.begin(ReadWrite.READ);

  try {

    final Model model = dataset.getDefaultModel();

    try (QueryExecution qx = QueryExecutionFactory.create(getQuery(request, model), dataset)) {
      final ResultSet rs = qx.execSelect();
      while (rs.hasNext()) {
        offers.add(createOffer(rs.next()));
      }
    }

  } finally {
    dataset.end();
  }

  return offers;
}
项目:cobalt    文件:DatasetPopulator.java   
/**
 * Validate a model, infer statements and add it to the dataset
 *
 * @param model the model to add
 *
 * @throws InvalidModelException when the model is invalid according to the ontology
 */
public void addModel(final Model model) throws InvalidModelException {
  // Expect each model's graph to be disjoint from all other model graphs. When that's not the case,
  // the union of all graphs may result in an invalid model, because one model could contain statements
  // incompatible with statements of another model.
  assertModel(model);

  inferPropertyNames(model);

  dataset.begin(ReadWrite.WRITE);
  try {
    dataset.getDefaultModel().add(model);
    dataset.commit();
  } finally {
    dataset.end();
  }
}
项目:cobalt    文件:DatasetPopulatorTest.java   
@Test
public void addModel() throws Exception {
  final Dataset ds = TDBFactory.createDataset();
  final DatasetPopulator dsp = new DatasetPopulator(ds);

  final Model model = ModelFactory.createDefaultModel();
  final Resource s = model.createResource();
  final Property p = model.createProperty("urn:example:prop", "foo");
  final Resource o = model.createResource();
  model.add(s, p, o);

  dsp.addModel(model);

  ds.begin(ReadWrite.READ);

  try {
    assertTrue(ds.getDefaultModel().containsAll(model));
  } finally {
    ds.end();
  }
}
项目:cobalt    文件:DatasetPopulatorTest.java   
@Test
public void inferMissingPropertyNames() throws Exception {
  final Dataset ds = TDBFactory.createDataset();
  final DatasetPopulator dsp = new DatasetPopulator(ds);
  dsp.addModel(loadModel("infer-property-names/data.ttl"));

  final Model x = loadModel("infer-property-names/expected.ttl");

  ds.begin(ReadWrite.READ);

  try {
    final Model m = ds.getDefaultModel();
    assertTrue(m.containsAll(x));
  } finally {
    ds.end();
  }
}
项目:xbrl2rdf    文件:RdfFactoryTest.java   
@Test
public void test_rdfcreation_fb() throws SAXException, IOException, ParserConfigurationException, Exception {

    Document dataDoc = parser.parse(RdfFactoryTest.class.getResourceAsStream(
            "/data/fb-20121231.xml"), -1);

    RdfFactory factory = new RdfFactory(new RunConfig(domain));
    factory.createRdfs(dataDoc, testTdbDir);

    Dataset dataset = TDBFactory.createDataset(testTdbDir);
    dataset.begin(ReadWrite.READ);
    Model model = dataset.getDefaultModel();
    Assert.assertFalse("No RDF was generated. TDB directory: " + testTdbDir, model.isEmpty());

    dataset.end();
}
项目:xbrl2rdf    文件:RdfFactoryTest.java   
@Test
public void test_rdfcreation_msft() throws SAXException, IOException, ParserConfigurationException, Exception {

    Document dataDoc = parser.parse(RdfFactoryTest.class.getResourceAsStream(
            "/data/msft-20130630.xml"), -1);

    RdfFactory factory = new RdfFactory(new RunConfig(domain));
    factory.createRdfs(dataDoc, testTdbDir);

    Dataset dataset = TDBFactory.createDataset(testTdbDir);
    dataset.begin(ReadWrite.READ);
    Model model = dataset.getDefaultModel();
    Assert.assertFalse("No RDF was generated. TDB directory: " + testTdbDir, model.isEmpty());

    dataset.end();
}
项目:p3-geo-enriching-transformer    文件:SpatialDataEnhancer.java   
public boolean isCachedGraph(Dataset dataset, String graphName){
    boolean isCached = false;
    dataset.begin(ReadWrite.READ);
    try {
        Iterator<String> inames = getDataset().listNames();
        while(inames.hasNext()){
            if( graphName.equals( inames.next() )) {
                 isCached = true;  
            }
        }
    }
    finally {
        dataset.end();
    }
    return isCached;
}
项目:lodreclib    文件:RDFTripleExtractor.java   
/**
 * Load jena TDB
 */
private void TDBloading(){

    logger.info("TDB loading");

    // create model from tdb
    Dataset dataset = TDBFactory.createDataset(tdbDirectory);

    // assume we want the default model, or we could get a named model here
    dataset.begin(ReadWrite.READ);
    model = dataset.getDefaultModel();
    dataset.end() ;

    // if model is null load local dataset into jena TDB
    if(model == null)
        TDBloading(datasetFile);

}
项目:rdfindex    文件:SPARQLModelIndex.java   
/** Create an index using your own model saving you the time needed to import the model when using an endpoint.
 * If you only have an endpoint or want to index a subset of the triples,
 * use the static methods {@link #createIndex(String, String, List)}, {@link #createClassIndex(String, String)} or {@link #createPropertyIndex(String, String)}.
 * All triples (uri,rdfs:label,label) will be put into the index.
 * @param model the jena model containing the rdf:label statements that you want to index. Changes to the model after the construtor call are probably not indexed.
 * @param minSimilarity Between 0 (maximum fuzzyness) and 1f (no fuzzy matching).
 */
public SPARQLModelIndex(Model model,float minSimilarity)
{
    this.minSimilarity=minSimilarity;
    Dataset ds1 = DatasetFactory.createMem() ;

    EntityDefinition entDef = new EntityDefinition("uri", "text", RDFS.label) ;
    // Lucene, in memory.
    Directory dir =  new RAMDirectory();
    // Join together into a dataset
    dataset = TextDatasetFactory.createLucene(ds1, dir, entDef);
    //      ds.setDefaultModel(model);


    synchronized(model)
    {
        dataset.begin(ReadWrite.WRITE);
        try {
            dataset.getDefaultModel().add(model);
            dataset.commit();
        } finally {
            dataset.end();
        }
    }
    //      this.model = model;
}
项目:WikiOnto    文件:DissolveGUI.java   
private void getInstances() {
    File metricFile = new File(
            System.getProperty("user.dir") + "/sparql/queries/getInstances.sparql");

    List<String> lines = null;

    try {
        lines = Files.readAllLines(metricFile.toPath());
    } catch (IOException ex) {
        Logger.getLogger(Ontogui.class.getName()).log(Level.SEVERE, null, ex);
    }
    String queryString = "";
    for (String line : lines) {
        queryString += line + System.lineSeparator();
    }
    ParameterizedSparqlString pss = new ParameterizedSparqlString();
    pss.setCommandText(queryString);
    pss.setLiteral("typename", typename);
    data.begin(ReadWrite.READ);
    List<QuerySolution> rlist = null;
    try (QueryExecution qe = QueryExecutionFactory.create(pss.asQuery(), data)) {
        ResultSet results = qe.execSelect();
        rlist = ResultSetFormatter.toList(results);
    } catch (Exception e) {
        JOptionPane.showMessageDialog(null, "Writting to textarea failed!");
        e.printStackTrace();
    }
    instances = new String[rlist.size()];
    for(int j = 0; j < rlist.size(); j++){
        instances[j] = rlist.get(j).getLiteral("iname").getString();
    }
    data.end();
}
项目:WikiOnto    文件:DissolveGUI.java   
private void getSubtypes() {
    File metricFile = new File(
            System.getProperty("user.dir") + "/sparql/queries/getSubtypes.sparql");

    List<String> lines = null;

    try {
        lines = Files.readAllLines(metricFile.toPath());
    } catch (IOException ex) {
        Logger.getLogger(Ontogui.class.getName()).log(Level.SEVERE, null, ex);
    }
    String queryString = "";
    for (String line : lines) {
        queryString += line + System.lineSeparator();
    }
    ParameterizedSparqlString pss = new ParameterizedSparqlString();
    pss.setCommandText(queryString);
    pss.setLiteral("typename", typename);
    data.begin(ReadWrite.READ);
    List<QuerySolution> rlist = null;
    try (QueryExecution qe = QueryExecutionFactory.create(pss.asQuery(), data)) {
        ResultSet results = qe.execSelect();
        rlist = ResultSetFormatter.toList(results);
    } catch (Exception e) {
        JOptionPane.showMessageDialog(null, "Writting to textarea failed!");
        e.printStackTrace();
    }
    data.end();
    subtypes= new String[rlist.size()];
    for(int j = 0; j < rlist.size(); j++){
        subtypes[j] = rlist.get(j).getLiteral("sname").getString();
    }
}
项目:WikiOnto    文件:SemDistGUI.java   
private void getDistantEntities(){
    File metricFile = new File(
            System.getProperty("user.dir") + "/sparql/smells/SemanticallyDistantEntity.sparql");

    List<String> lines = null;

    try {
        lines = Files.readAllLines(metricFile.toPath());
    } catch (IOException ex) {
        Logger.getLogger(Ontogui.class.getName()).log(Level.SEVERE, null, ex);
    }
    String queryString = "";
    for (String line : lines) {
        queryString += line + System.lineSeparator();
    }

    data.begin(ReadWrite.READ);
    List<QuerySolution> rlist = null;
    Query query = QueryFactory.create(queryString, Syntax.syntaxARQ);
    try (QueryExecution qe = QueryExecutionFactory.create(query, data)) {
        ResultSet results = qe.execSelect();
        rlist = ResultSetFormatter.toList(results);
    } catch (Exception e) {
        JOptionPane.showMessageDialog(null, "Writting to textarea failed!");
        e.printStackTrace();
    }
    instances = new String[rlist.size()];
    for(int j = 0; j < rlist.size(); j++){
        instances[j] = rlist.get(j).getLiteral("entityname").getString();
    }
    data.end();
}
项目:WikiOnto    文件:SemDistGUI.java   
private void getDistantTypes(){
    File metricFile = new File(
            System.getProperty("user.dir") + "/sparql/smells/SemanticallyDistantType.sparql");

    List<String> lines = null;

    try {
        lines = Files.readAllLines(metricFile.toPath());
    } catch (IOException ex) {
        Logger.getLogger(Ontogui.class.getName()).log(Level.SEVERE, null, ex);
    }
    String queryString = "";
    for (String line : lines) {
        queryString += line + System.lineSeparator();
    }

    data.begin(ReadWrite.READ);
    List<QuerySolution> rlist = null;
    Query query = QueryFactory.create(queryString, Syntax.syntaxARQ);
    try (QueryExecution qe = QueryExecutionFactory.create(query, data)) {
        ResultSet results = qe.execSelect();
        rlist = ResultSetFormatter.toList(results);
    } catch (Exception e) {
        JOptionPane.showMessageDialog(null, "Writting to textarea failed!");
        e.printStackTrace();
    }
    subtypes = new String[rlist.size()];
    for(int j = 0; j < rlist.size(); j++){
        subtypes[j] = rlist.get(j).getLiteral("typename").getString();
    }
    data.end();
}
项目:WikiOnto    文件:TransformationProcessor.java   
public long transform(String tfilename, Map<String,String> parameter){
    File tfile = new File(System.getProperty("user.dir")+"/sparql/transformations/"+tfilename);
    String transformation = "";
    try {
        List<String> lines = Files.readAllLines(tfile.toPath());
        for(String line : lines){
            transformation+=line+"\n";
        }
    } catch (IOException ex) {
        System.err.println("Exception transforming:"+tfilename);;
    }
    dataset.begin(ReadWrite.WRITE);
    Graph graph = dataset.asDatasetGraph().getDefaultGraph();
    long size = graph.size();
    ParameterizedSparqlString pss = new ParameterizedSparqlString();
    pss.setCommandText(transformation);
    for(String key: parameter.keySet()){
        String query = pss.asUpdate().toString();
        if(!parameter.get(key).contains("http://")){
            pss.setLiteral(key, parameter.get(key).trim());
        }else{
            pss.setIri(key, parameter.get(key).trim());
        }
        if(query.equals(pss.asUpdate().toString())) {
            JOptionPane.showMessageDialog(null,"Querynames are flawed. This should not happen.");
            System.err.println(pss.toString());
            return 0;
        }
    }
    UpdateAction.execute(pss.asUpdate(), graph);
    size = graph.size() - size;
    dataset.commit();
    return size;
}
项目:WikiOnto    文件:QueryProcessor.java   
@Override
public void run() {
    dataset.begin(ReadWrite.READ);
    System.out.println("------------------");
    System.out.println(query);
    Op op = Algebra.compile(query);
    op = Algebra.optimize(op);
    System.out.println(op);
    System.out.println("------------------");
    System.out.println(query);
    long time = System.currentTimeMillis();
    try (QueryExecution qe = QueryExecutionFactory.create(query, dataset)) {
        ResultSet results = qe.execSelect();
        if(pretty){
            System.out.println("Output as pretty printed text");
            ResultSetFormatter.out(stream, results, query);

        }else{
            System.out.println("Output as CSV");
            ResultSetFormatter.outputAsCSV(stream, results);

        }
    }catch (Exception e){
        JOptionPane.showMessageDialog(null, "Writting to textarea failed!");
        e.printStackTrace();
    }
    time = System.currentTimeMillis() - time;
    String timeString = "\n Performed query in: "+time+"ms";
    try {
        stream.write(timeString.getBytes());
        stream.showText();
    } catch (IOException ex) {
        JOptionPane.showMessageDialog(null, "Writting to textarea failed!");
    }
    System.out.println(time);
    System.out.println("Finished query");
    dataset.end();
}
项目:p3-osm-transformer    文件:JenaTextConfigTest.java   
private int queryData(Dataset dataset, String toponym){
    int addressCounter = 0;
    log.info("START") ;

    long startTime = System.nanoTime() ;

    String pre = StrUtils.strjoinNL( 
        "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>" ,
        "PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>" ,
        "PREFIX schema: <http://schema.org/>" ,
        "PREFIX text: <http://jena.apache.org/text#>" ,
        "PREFIX ogc: <http://www.opengis.net/ont/geosparql#>") ;
    String qs = StrUtils.strjoinNL( "SELECT DISTINCT ?s ?address ?wkt " ,
                                " { ?s text:query (schema:streetAddress '" + toponym + "') ;" ,
                                "      schema:streetAddress ?address ;" ,
                                "      ogc:geometry ?geo ." ,
                                "   ?geo ogc:asWKT ?wkt ." ,
                                " }") ;

    dataset.begin(ReadWrite.READ) ;
    try {
        Query q = QueryFactory.create(pre + "\n" + qs) ;
        QueryExecution qexec = QueryExecutionFactory.create(q , dataset) ;
        //QueryExecUtils.executeQuery(q, qexec) ;
        ResultSet results = qexec.execSelect();            
        for( ; results.hasNext(); ){
            addressCounter++;
            QuerySolution sol = results.nextSolution();
            System.out.println( sol.get("s") );

        }
    } 
    finally { 
        dataset.end() ; 
    }
    long finishTime = System.nanoTime() ;
    double time = (finishTime-startTime)/1.0e6 ;
    log.info(String.format("FINISH - %.2fms", time)) ;
    return addressCounter;
}
项目:lodreclib    文件:RDFTripleExtractor.java   
/**
 * Load local dataset into jena TDB
 */
private void TDBloading(String fileDump){

    logger.info("TDB creation");

    // create tdb from .nt local file 
    FileManager fm = FileManager.get();
    fm.addLocatorClassLoader(RDFTripleExtractor.class.getClassLoader());
    InputStream in = fm.open(fileDump);

    Location location = new Location (tdbDirectory);

    // load some initial data
    try{
        TDBLoader.load(TDBInternal.getBaseDatasetGraphTDB(TDBFactory.createDatasetGraph(location)), in, true);
    }
    catch(Exception e){
        logger.error("TDB loading error: " + e.getMessage());
    }

    logger.info("TDB loading");

    //create model from tdb
    Dataset dataset = TDBFactory.createDataset(tdbDirectory);

    // assume we want the default model, or we could get a named model here
    dataset.begin(ReadWrite.READ) ;
    model = dataset.getDefaultModel();
    dataset.end();

}
项目:cobalt    文件:RequestOfferDistanceFinder.java   
private <T extends Identifiable> int findDistance(final T request, final T offer, final Property property) {
  dataset.begin(ReadWrite.READ);
  try {
    final Model model = dataset.getDefaultModel();
    final Resource source = asResource(offer, model);
    final Resource target = asResource(request, model);
    final ShortestPathFinder finder = new ShortestPathFinder(model, property);
    return finder.findShortestPathLength(source, target);
  } finally {
    dataset.end();
  }
}
项目:cobalt    文件:WidgetActionFinder.java   
private Set<Action> getWidgetActions(final Widget widget) {
  dataset.begin(ReadWrite.READ);
  try {
    final Model model = dataset.getDefaultModel();
    final Resource wr = model.createResource(widget.getIdentifier().toString());
    final Iterator<Resource> ars = getActionResources(wr);
    return CachingResourceInternalizers.actions.internalizeAll(ars);
  } finally {
    dataset.end();
  }
}
项目:cobalt    文件:Datasets.java   
/**
 * Create a transactional in-memory dataset and load data from a given URI.
 * <p/>
 * The dataset returned by {@link RDFDataMgr#loadDataset(String)} is not transactional.
 *
 * @param uri URI of data to load
 *
 * @return a new dataset
 */
public static Dataset loadDataset(final String uri) {
  final Dataset ds = TDBFactory.createDataset();
  ds.begin(ReadWrite.WRITE);
  try {
    RDFDataMgr.read(ds, uri);
    ds.commit();
  } finally {
    ds.end();
  }
  return ds;
}
项目:sadlos2    文件:CsvImporter.java   
/**
 * Method to add a model to the TDB repository with appropriate transaction control (in a thread-safe manner)
 * @param m
 * @return true if successful
 */
protected boolean addModelToTdbDS(Model m) {
    logger.debug("Adding a model to the TDB Repo Dataset " + tdbDS);
    getTdbDS(true).begin(ReadWrite.WRITE);
    tdbDS.getDefaultModel().add(m);
    tdbDS.commit();
    tdbDS.end();
    return true;
}
项目:sadlos2    文件:CsvImporter.java   
/** Method to get the default model from the TDB repository in a thread-safe manner
 * 
 * @return the default model
 */
protected Model getModelFromTdbDS() {
    logger.debug("Retrieving TDB Repo default model from Dataset " + tdbDS);
    Dataset ds = getTdbDS(false);
    getTdbDS(true).begin(ReadWrite.READ);
    Model m = tdbDS.getDefaultModel();
    tdbDS.end();
    if (ds == null) {
        closeTdbDS();
    }
    return m;
}
项目:sadlos2    文件:CsvImporter.java   
/**
 * Method to add a model to the TDB repository with appropriate transaction control (in a thread-safe manner)
 * @param m
 * @return true if successful
 */
protected boolean addModelToTdbDS(Model m) {
    logger.debug("Adding a model to the TDB Repo Dataset " + tdbDS);
    getTdbDS(true).begin(ReadWrite.WRITE);
    tdbDS.getDefaultModel().add(m);
    tdbDS.commit();
    tdbDS.end();
    return true;
}
项目:sadlos2    文件:CsvImporter.java   
/** Method to get the default model from the TDB repository in a thread-safe manner
 * 
 * @return the default model
 */
protected Model getModelFromTdbDS() {
    logger.debug("Retrieving TDB Repo default model from Dataset " + tdbDS);
    Dataset ds = getTdbDS(false);
    getTdbDS(true).begin(ReadWrite.READ);
    Model m = tdbDS.getDefaultModel();
    tdbDS.end();
    if (ds == null) {
        closeTdbDS();
    }
    return m;
}
项目:lodreclib    文件:RDFTripleExtractor.java   
/**
 * Load local dataset into jena TDB
 */
private void TDBloading(String fileDump){

    logger.info("TDB creation");

    // create tdb from .nt local file 
    FileManager fm = FileManager.get();
    fm.addLocatorClassLoader(RDFTripleExtractor.class.getClassLoader());
    InputStream in = fm.open(fileDump);

    Location location = new Location (tdbDirectory);

    // load some initial data
    try{
        TDBLoader.load(TDBInternal.getBaseDatasetGraphTDB(TDBFactory.createDatasetGraph(location)), in, true);
    }
    catch(Exception e){
        logger.error("TDB loading error: " + e.getMessage());
    }

    logger.info("TDB loading");

    //create model from tdb
    Dataset dataset = TDBFactory.createDataset(tdbDirectory);

    // assume we want the default model, or we could get a named model here
    dataset.begin(ReadWrite.READ) ;
    model = dataset.getDefaultModel();
    dataset.end();

}
项目:rdfindex    文件:SPARQLModelIndex.java   
private ResultSet executeSelect(String query) {
    dataset.begin(ReadWrite.READ);
    try {
        ResultSet rs = QueryExecutionFactory.create(QueryFactory.create(query, Syntax.syntaxARQ), dataset).execSelect();
        return rs;
    } finally {
        dataset.end();
    }
}
项目:semtool    文件:JenaEngine.java   
private void copyFromTdb( Dataset dataset ) throws RepositoryException {
    ValueFactory vf = rc.getValueFactory();

    if ( dataset.supportsTransactions() ) {
        dataset.begin( ReadWrite.READ );
    }

    // Get model inside the transaction
    Model model = dataset.getDefaultModel();
    StmtIterator si = model.listStatements();

    try {
        rc.begin();
        while ( si.hasNext() ) {
            Statement stmt = si.next();
            com.hp.hpl.jena.rdf.model.Resource rsr = stmt.getSubject();
            Property pred = stmt.getPredicate();
            RDFNode val = stmt.getObject();
            Node valnode = val.asNode();

            Resource sub;
            try {
                sub = ( rsr.isAnon()
                        ? vf.createBNode( valnode.getBlankNodeLabel() )
                        : vf.createURI( rsr.toString() ) );
            }
            catch ( UnsupportedOperationException uoo ) {
                log.warn( uoo, uoo );
                continue;
            }

            URI pred2 = vf.createURI( pred.toString() );
            Value val2;

            if ( val.isLiteral() ) {
                Literal lit = val.asLiteral();
                String dtstr = lit.getDatatypeURI();
                URI dt = ( null == dtstr ? null : vf.createURI( dtstr ) );
                String langstr = lit.getLanguage();

                if ( null == dt ) {
                    if ( langstr.isEmpty() ) {
                        val2 = vf.createLiteral( lit.toString() );
                    }
                    else {
                        val2 = vf.createLiteral( lit.toString(), langstr );
                    }
                }
                else {
                    val2 = vf.createLiteral( lit.toString(), dt );
                }
            }
            else {
                if ( val.isAnon() ) {
                    val2 = vf.createBNode( valnode.getBlankNodeLabel() );
                }
                else {
                    val2 = vf.createURI( val.toString() );
                }
            }
            rc.add( sub, pred2, val2 );
        }
        rc.commit();
    }
    catch ( RepositoryException re ) {
        rc.rollback();
        throw re;
    }
    finally {
        if ( dataset.supportsTransactions() ) {
            dataset.end();
        }
    }
}
项目:semtool    文件:JenaEngine.java   
@Override
public void startRDF() throws RDFHandlerException {
    dataset.begin( ReadWrite.WRITE );
    model = dataset.getDefaultModel();
    model.removeAll();
}
项目:c4a_data_repository    文件:AutoReloadableDataset.java   
public void begin(ReadWrite readWrite) {
    throw new UnsupportedOperationException("Read-only dataset");
}
项目:reneviz    文件:JenaService.java   
public ResultSetRewindable runLocalOp(Op op) {
    long startTime = System.currentTimeMillis();
    Query q = OpAsQuery.asQuery(op);
    logger.debug("Running query on the local dataset" + ":"
    // + "\n\nORIGINAL OP:\n"
    // + op.toString()
    // + "\n\nOPTIMIZED OP\n"
    // + Algebra.optimize(op)
            + "\n\nSPARQL QUERY\n" + q.toString(Syntax.syntaxARQ));

    try {
        Integer key = op.toString().hashCode();
        if (cache.containsKey(key)) {
            logger.debug("The query was cached.");
            return cache.get(key);
        }

        ds.begin(ReadWrite.READ);

        QueryIterator qIter = Algebra.exec(op, this.ds);

        List<String> vars = new LinkedList<String>();
        for (Var var : OpAsQuery.asQuery(op).getProjectVars()) {
            vars.add(var.getVarName());
        }

        ResultSetRewindable results = ResultSetFactory
                .copyResults(ResultSetFactory.create(qIter, vars));

        long endTime = System.currentTimeMillis();
        String timeString = new SimpleDateFormat("mm:ss:SSS")
                .format(new Date(endTime - startTime));

        // cache disabled
        // cache.put(op.toString().hashCode(), results);

        logger.info("The query returned after " + timeString + " with "
                + results.size() + " results");
        return results;
    } finally {
        ds.end();
    }
}
项目:p3-osm-transformer    文件:OsmRdfTransformer.java   
/**
 * Search for an address (a node in OSM).
 * @param graph The input graph contains a schema:streetAddress with the name of the street, the locality and the country code .
 * @return Returns the geocoordinates of the street that has been found. 
 */
private TripleCollection geocodeAddress(Dataset ds, Address address){
    TripleCollection geoCodeRdf = new SimpleMGraph();

    String pre = StrUtils.strjoinNL( 
        "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>" ,
        "PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>" ,
        "PREFIX schema: <http://schema.org/>" ,
        "PREFIX text: <http://jena.apache.org/text#>" ,
        "PREFIX geo: <http://www.w3.org/2003/01/geo/wgs84_pos#>" ,
        "PREFIX ogc: <http://www.opengis.net/ont/geosparql#>") ;

    String qs = StrUtils.strjoinNL( "SELECT ?s ?street ?lat ?lon" ,
                                " { ?s text:query (schema:streetAddress '" + address.getStreetAddress() + "') ;" ,
                                "      schema:streetAddress ?street ;" ,
                                "      schema:addressLocality \"" + address.getLocality() + "\" ;" ,
                                "      schema:addressCountry \"" + address.getCountryCode() + "\" ;" ,
                                "      geo:lat ?lat ;" ,
                                "      geo:long ?lon ." ,                                                                       
                                " }") ;

    log.info(pre + "\n" + qs);

    ds.begin(ReadWrite.READ) ;
    try {
        Query q = QueryFactory.create(pre + "\n" + qs) ;
        QueryExecution qexec = QueryExecutionFactory.create(q , ds) ;
        //QueryExecUtils.executeQuery(q, qexec) ;
        ResultSet results = qexec.execSelect();   
        int numberOfAddresses = 0;
        for( ; results.hasNext(); ){
            QuerySolution sol = results.nextSolution();
            String streetUriName = sol.getResource("s").getURI();
            String streetName = sol.getLiteral("?street").getString();  
            String latitude = sol.getLiteral("?lat").getLexicalForm();
            String longitude = sol.getLiteral("?lon").getLexicalForm();
            UriRef addressRef = new UriRef(streetUriName);                
            geoCodeRdf.add(new TripleImpl(addressRef, schema_streetAddress, new PlainLiteralImpl(streetName)));
            geoCodeRdf.add(new TripleImpl(addressRef, schema_addressLocality, new PlainLiteralImpl( address.getLocality())) );
            geoCodeRdf.add(new TripleImpl(addressRef, schema_addressCountry, new PlainLiteralImpl( address.getCountryCode())) );
            geoCodeRdf.add(new TripleImpl(addressRef, geo_lat, new PlainLiteralImpl( latitude )) );
            geoCodeRdf.add(new TripleImpl(addressRef, geo_lon, new PlainLiteralImpl( longitude )) );
            numberOfAddresses++;
        }
        log.info("Number of addresses like " + address.getStreetAddress() + " found: " + numberOfAddresses);
    } 
    finally { 
        ds.end() ; 
    }

    return geoCodeRdf;
}
项目:p3-osm-transformer    文件:OsmRdfTransformer.java   
/**
 * Search for a street (way in OSM) 
 * @param graph The input graph contain a schema:streetAddress with the name of the street.
 * @return Returns the geometry of the street that has been found with the coordinates serialized as WKT. 
 */
private TripleCollection geocodeStreet(Dataset ds, Address address){
    TripleCollection geoCodeRdf = new SimpleMGraph();

    String pre = StrUtils.strjoinNL( 
        "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>" ,
        "PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>" ,
        "PREFIX schema: <http://schema.org/>" ,
        "PREFIX text: <http://jena.apache.org/text#>" ,
        "PREFIX geo: <http://www.w3.org/2003/01/geo/wgs84_pos#>" ,
        "PREFIX ogc: <http://www.opengis.net/ont/geosparql#>") ;

    String qs = StrUtils.strjoinNL( "SELECT ?s ?street ?geometry ?wkt " ,
                                " { ?s text:query (schema:streetAddress '" + address.getStreetAddress() + "') ;" ,
                                "      schema:streetAddress ?street ;" ,
                                "      schema:addressLocality " + address.getLocality() + " ;" ,
                                "      schema:addressCountry " + address.getCountryCode() + " ;" ,
                                "      ogc:geometry ?geometry ." ,
                                "   ?geo ogc:asWKT ?wkt ." ,
                                " }") ;

    System.out.println(pre + "\n" + qs);

    ds.begin(ReadWrite.READ) ;
    try {
        Query q = QueryFactory.create(pre + "\n" + qs) ;
        QueryExecution qexec = QueryExecutionFactory.create(q , ds) ;
        //QueryExecUtils.executeQuery(q, qexec) ;
        ResultSet results = qexec.execSelect();   
        int numberOfToponyms = 0;
        for( ; results.hasNext(); ){
            QuerySolution sol = results.nextSolution();
            String streetUriName = sol.getResource("s").getURI();
            String streetName = sol.getLiteral("?street").getString();
            Resource geo = sol.getResource("?geo");
            String geoUri = geo.getURI();
            String wkt = sol.getLiteral("?wkt").getString();
            UriRef streetRef = new UriRef(streetUriName);
            UriRef geometryRef = new UriRef(geoUri);
            geoCodeRdf.add(new TripleImpl(streetRef, schema_streetAddress, new PlainLiteralImpl(streetName) ));
            geoCodeRdf.add(new TripleImpl(streetRef, schema_addressLocality, new PlainLiteralImpl( address.getLocality())) );
            geoCodeRdf.add(new TripleImpl(streetRef, schema_addressCountry, new PlainLiteralImpl( address.getCountryCode())) );
            geoCodeRdf.add(new TripleImpl(streetRef, new UriRef("http://www.opengis.net/ont/geosparql#geometry"), geometryRef));
            geoCodeRdf.add(new TripleImpl(geometryRef, new UriRef("http://www.opengis.net/ont/geosparql#asWKT"), new PlainLiteralImpl(wkt)));
            numberOfToponyms++;
        }
        log.info("Number of toponymis like " + address.getStreetAddress() + " found: " + numberOfToponyms);
    } 
    finally { 
        ds.end() ; 
    }

    return geoCodeRdf;
}
项目:jena-android    文件:RDFReadWriteActivity.java   
@Override
protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    setContentView(R.layout.activity_rdfread_write);

    TextView textView = (TextView) findViewById(R.id.rdfTextView);
    textView.setMovementMethod(new ScrollingMovementMethod());

    // create tdb dataset
    File directory = new File(getFilesDir(), "tdb_dataset");
    directory.delete();
    directory.mkdir();
    Dataset dataset = TDBFactory.createDataset(directory.getAbsolutePath()) ;
    dataset.begin(ReadWrite.WRITE);
    Model model = dataset.getDefaultModel();

    // read a turtle file from assets and write them into a tdb backed model
    try {
        InputStream skos_ttl = getAssets().open("skos.ttl");
        RDFDataMgr.read(model, skos_ttl, Lang.TURTLE);
        skos_ttl.close();
        InputStream geosparql_rdf_xml = getAssets().open("geosparql_vocab_all.rdf");
        RDFDataMgr.read(model, geosparql_rdf_xml, Lang.RDFXML);
        geosparql_rdf_xml.close();
    } catch (IOException e) {
        Log.e(TAG, e.toString());
        e.printStackTrace();
    }

    // add some example data
    String personURI    = "http://somewhere/JohnSmith";
    String fullName     = "John Smith";
    Resource johnSmith = model.createResource(personURI);
    johnSmith.addProperty(VCARD.FN, fullName);

    dataset.commit();
    dataset.end();

    // read from tdb and print triples
    dataset.begin(ReadWrite.READ);
    Query query = QueryFactory.create("SELECT * WHERE { ?s ?p ?o } LIMIT 2");
    QueryExecution qexec = QueryExecutionFactory.create(query, model);
    ResultSet results = qexec.execSelect();
    String resultString = ResultSetFormatter.asText(results);
    Log.d(TAG, resultString);

    StringWriter dump = new StringWriter();
    RDFDataMgr.write(dump, dataset, RDFFormat.JSONLD_PRETTY);

    dataset.end();
    dataset.close();

    textView.setText(dump.toString());

}
项目:OpenCollegeGraph    文件:AutoReloadableDataset.java   
public void begin(ReadWrite readWrite) {
    throw new UnsupportedOperationException("Read-only dataset");
}
项目:uncc2014watsonsim    文件:DBPediaCandidateType.java   
/**
 * Find the possible lexical types of a name.
 * tag("New York") for example might be:
 *  {"populated place", "place", "municipality"}..
 */
public List<String> viaDBPedia(String text) {
    /*
     * ABOUT THE QUERY
     * ===============
     * Most of these results are not really excellent.
     * The recall is pretty high but the precision is low because it
     * matches every name that _contains_ the candidate answer.
     * 
     * So, we should probably trim the results to the most popular names.
     * 
     * BUT many queries have thousands of names so this will probably be
     * slow. Meaning we probably need to compromise or develop our own
     * solution.
     * 
     * ABOUT THE RESULTS
     * =================
     * A lot of the results are generic, like "place". And "city" is also
     * a place, so it may just be inadequately tagged. We probably need
     * some graph algorithm to help with this.
     * Some results have synonyms. "country" is a real tag, but "nation" is
     * not. WordNet can help with this.
     * 
     */

    rdf.begin(ReadWrite.READ);
    List<String> types = new ArrayList<>();
    try (QueryExecution qe = QueryExecutionFactory.create(getQuery(text), 
            rdf.getDefaultModel())) {
        ResultSet rs = qe.execSelect();
        while (rs.hasNext()) {
            QuerySolution s = rs.next();
            RDFNode node = s.get("?kind");
            if (node == null) {}
            else if (node.isLiteral())
                types.add(node.asLiteral().getLexicalForm().toLowerCase());
            else if (node.isResource())
                types.add(node.asResource().getLocalName().toLowerCase());
        }
    } finally {
        rdf.end();
    }

    return types;
}
项目:sadlos2    文件:SadlJenaModelGetter.java   
/**
 * Call this method to get a model by its public URI and if necessary read it using the specified ModelReader
 */
public Model getModel( String uri, ModelReader loadIfAbsent ) {
    boolean addToTDB = addMissingModelToTDB;
    Model m = null;
    if (uri.equals(IConfigurationManager.ServicesConfigurationURI)) {
        // this is a special case--it is always left as an OWL file in RDF/XML format
        addToTDB = false;
    }
    else if (getFormat().equals(IConfigurationManager.JENA_TDB)) {
        // try TDB first
        m = getModel(uri);
    }
    if (m == null && loadIfAbsent != null) {
        String altUrl = configurationManager.getJenaDocumentMgr().doAltURLMapping(uri);
        if (altUrl != null && altUrl.endsWith(".TDB/")) {
            try {
                SadlUtils su = new SadlUtils();
                Dataset tmpds = TDBFactory.createDataset(su.fileUrlToFileName(altUrl));
                tmpds.begin(ReadWrite.READ);
    m = tmpds.getDefaultModel();
    tmpds.end();
} catch (MalformedURLException e) {
    // TODO Auto-generated catch block
    e.printStackTrace();
}
        }
        else {
            m = ModelFactory.createDefaultModel();
         loadIfAbsent.readModel( m, altUrl != null ? altUrl : uri );
         if (addToTDB && ds != null && getFormat().equals(IConfigurationManager.JENA_TDB)) {
            ds.begin(ReadWrite.WRITE);
            ds.addNamedModel( uri, m );
            ds.commit();
            ds.end();
            TDB.sync(ds);
         }
        }
        loadUserDefinedDataTypes(uri, altUrl);
    }

    return m;
}