Java 类com.hp.hpl.jena.rdf.model.ResourceFactory 实例源码

项目:ld-sniffer    文件:Evaluation.java   
public Resource addQualityReport(Model model) {
    Resource qualityReport = model.createResource(LDQM.QUALITY_REPORT_PREFIX + UUID.randomUUID().toString(),QPRO.QualityReport);
    qualityReport.addProperty(QPRO.computedOn, ResourceFactory.createTypedLiteral(DATE_FORMAT.format(new Date()), XSDDatatype.XSDdateTime));
    Resource noDerefSubjectsProblem = model.createResource(QPRO.QualityProblem);
    noDerefSubjectsProblem.addProperty(QPRO.isDescribedBy, LDQM.IRIdereferenceability);
    for (HttpResponse response : responseMap.values()) {
        if (response.getStatusCode() >= 300 || response.getStatusCode() < 200) {
            Resource errSubject = model.createResource(LDQM.Defect_UndereferenceableURI);
            errSubject.addProperty(DCTerms.subject,response.getUri());
            if (response.getStatusCode() > 0) {
                errSubject.addLiteral(HTTP.statusCodeValue, model.createTypedLiteral(response.getStatusCode(), XSDDatatype.XSDint));
            }
            errSubject.addLiteral(HTTP.methodName, response.getMethod());
            if (response.getReason() != null) {
                errSubject.addLiteral(HTTP.reasonPhrase, response.getReason());
            }
            noDerefSubjectsProblem.addProperty(QPRO.problematicThing, errSubject);
        }
    }
    qualityReport.addProperty(QPRO.hasProblem, noDerefSubjectsProblem);
    qualityReport.addProperty(PROV.wasGeneratedBy, evaluation);
    return qualityReport;
}
项目:r2rml-kit    文件:Message.java   
/**
 * @param problem
 * @param term
 * @param detailCode Optional error code; indicates a subclass of problems
 * @param details Optional string containing error details
 * @param contextResource May be null
 * @param contextProperty May be null
 */
public Message(Problem problem, MappingTerm term, 
        String detailCode, String details, 
        Resource contextResource, Property contextProperty) {
    this.problem = problem;
    this.subject = contextResource; 
    this.predicates = 
        contextProperty == null 
                ? Collections.<Property>emptyList() 
                : Collections.singletonList(contextProperty);
    this.objects = 
        term == null
                ? Collections.<RDFNode>emptyList()
                : Collections.<RDFNode>singletonList(ResourceFactory.createPlainLiteral(term.toString()));
    this.detailCode = detailCode;
    this.details = details;
    this.cause = null;
}
项目:r2rml-kit    文件:PrettyTurtleWriter.java   
private String toTurtle(RDFNode r) {
    if (r.isURIResource()) {
        return PrettyPrinter.qNameOrURI(relativize(r.asResource().getURI()), prefixes);
    } else if (r.isLiteral()) {
        StringBuffer result = new StringBuffer(quote(r.asLiteral().getLexicalForm()));
        if (!"".equals(r.asLiteral().getLanguage())) {
            result.append("@");
            result.append(r.asLiteral().getLanguage());
        } else if (r.asLiteral().getDatatype() != null) {
            result.append("^^");
            result.append(toTurtle(ResourceFactory.createResource(r.asLiteral().getDatatypeURI())));
        }
        return result.toString();
    } else {
        if (!blankNodeMap.containsKey(r)) {
            blankNodeMap.put(r.asResource(), "_:b" + blankNodeCounter++);
        }
        return blankNodeMap.get(r);
    }
}
项目:r2rml-kit    文件:D2RQWriter.java   
private void printTranslationTable(TranslationTable table) {
    printMapObject(table, D2RQ.TranslationTable);
    out.printURIProperty(D2RQ.href, table.getHref());
    out.printProperty(D2RQ.javaClass, table.getJavaClass());
    Iterator<Translation> it = table.getTranslations().iterator();
    List<Map<Property,RDFNode>> values = new ArrayList<Map<Property,RDFNode>>();
    while (it.hasNext()) {
        Translation translation = it.next();
        Map<Property,RDFNode> r = new LinkedHashMap<Property,RDFNode>();
        r.put(D2RQ.databaseValue, 
                ResourceFactory.createPlainLiteral(translation.dbValue()));
        r.put(D2RQ.rdfValue, 
                ResourceFactory.createPlainLiteral(translation.rdfValue()));
        values.add(r);
    }
    out.printCompactBlankNodeProperties(D2RQ.translation, values);
}
项目:entityclassifier-core    文件:DisambiguationPageValidator.java   
public boolean isDisambiguationResource(String uri) {

    if(!linksLoaded){
        System.out.println(Settings.EN_DBPEDIA_DISAMBIGUATION_DATASET);
        System.out.println(Settings.DE_DBPEDIA_DISAMBIGUATION_DATASET);
        System.out.println(Settings.NL_DBPEDIA_DISAMBIGUATION_DATASET);
        InputStream in1 = FileManager.get().open( Settings.EN_DBPEDIA_DISAMBIGUATION_DATASET );
        InputStream in2 = FileManager.get().open( Settings.DE_DBPEDIA_DISAMBIGUATION_DATASET );
        InputStream in3 = FileManager.get().open( Settings.NL_DBPEDIA_DISAMBIGUATION_DATASET );
        model.read(in1, null, "N-TRIPLES");
        System.out.println("Loaded English disambiguation dataset.");
        model.read(in2, null, "N-TRIPLES");
        System.out.println("Loaded German disambiguation dataset.");
        model.read(in3, null, "N-TRIPLES");
        System.out.println("Loaded Dutch disambiguation dataset.");
        linksLoaded = true;
    }

    StmtIterator iter = model.listStatements( new SimpleSelector(
            ResourceFactory.createResource(uri), 
            ResourceFactory.createProperty("http://dbpedia.org/ontology/wikiPageDisambiguates"), 
                    (RDFNode)null));

    return iter.hasNext();
}
项目:entityclassifier-core    文件:YagoOntologyManager.java   
public Model getHierarchyModel(String uri){

    // returns all subclasses for given URI
    Model m = ModelFactory.createDefaultModel();
    OntoRecord initRecord = new OntoRecord();
    initRecord.setUri(uri);

    while(initRecord !=null){

        initRecord = getSuperclass(initRecord.getUri());

        if(initRecord != null){
            StmtIterator iter1 = model.listStatements( new SimpleSelector(ResourceFactory.createResource(uri), RDFS.subClassOf,  (RDFNode)null));                
            m.add(iter1);
        }
    }

    return m;
}
项目:entityclassifier-core    文件:DBpediaOntologyManager.java   
public OntoRecord getSubclass(String resourceURI, String lang){

    StmtIterator iter1 = model.listStatements( new SimpleSelector(ResourceFactory.createResource(resourceURI), ResourceFactory.createProperty("http://www.w3.org/2000/01/rdf-schema#subClassOf"),  (RDFNode)null));
    OntoRecord record = new OntoRecord();
    StmtIterator iter2;

    while(iter1.hasNext()) {
        record.setUri(iter1.next().getObject().toString());
        iter2 = model.listStatements( new SimpleSelector(ResourceFactory.createResource(record.getUri()), ResourceFactory.createProperty("http://www.w3.org/2000/01/rdf-schema#label"),  (RDFNode)null));

        while(iter2.hasNext()){
            Literal res = (Literal) iter2.next().getObject();                
            String tmpLang = res.getLanguage();

            if( tmpLang.equals("en") ){
                record.setLabel(res.getString());
                return record;

            }
        }
    }
    return null;        
}
项目:mdb2rdf    文件:RdbToRdf.java   
private void addRowToMODEL(List<Statement> sa, String key, String puri) {
    for (Statement s : sa) {
        if (MODEL.contains(s)) {
            continue;
        }
        // add to existing resource with same key if exists
        if (s.getPredicate().getLocalName().equals(key)) {
            ResIterator it  =   MODEL.listResourcesWithProperty(s.getPredicate(), s.getObject());
            if (it.hasNext()) { // assume all members are equal
                Resource rsc    = it.nextResource(); // get parent
                Property p  = ResourceFactory.createProperty(genOURI(), puri);
                Statement st    = ResourceFactory.createStatement(rsc, p, s.getSubject());

                MODEL.add(st);

                continue;
            }
        }

        MODEL.add(s);
    }
}
项目:mdb2rdf    文件:RdbToRdf.java   
private List<Statement> convertRowToStatement(Table table, Row row, Resource rcs) {
    List<Statement> sa  = new ArrayList<Statement>(row.size()); 
    int i = 0;

    Set<String> attrs = row.keySet();
    for (String attr : attrs) {
        RDFNode attrRcs;
        Object value    = row.get(attr);
        if (value == null || excludedValue(attr.toString(), value.toString())) { // dealing with empty and excluded values
            continue;
        } else {
            attrRcs = createRDFNode(table.getColumn(attr), value);
        }

        Property p  = ResourceFactory.createProperty(genOURI(), attr.toString());
        Statement s = ResourceFactory.createStatement(rcs, p, attrRcs);

        sa.add(s);
    }

    return sa;
}
项目:reneviz    文件:TopicRegistry.java   
private void addTopics() {
    String[] topicNames = {
            "http://vivoweb.org/ontology/core#FacultyMember",
            "http://xmlns.com/foaf/0.1/Person",
            "http://xmlns.com/foaf/0.1/Organization",
            "http://vivoweb.org/ontology/core#Project",
            "http://purl.org/ontology/bibo/Conference",
            "http://purl.org/ontology/bibo/Workshop",
            "http://vivoweb.org/ontology/core#Grant",
            "http://purl.org/NET/c4dm/event.owl#Event"
            };


    for (String t : topicNames) {
        this.topics.add(ResourceFactory.createResource(t));
    }

}
项目:rocker    文件:ModelManager.java   
/**
 * @param path
 * @return
 */
public static HashMap<String, List<Resource>> loadClassList(String path) {
    HashMap<String, List<Resource>> res = new HashMap<>();
    // load specification file
    Model model = RDFDataMgr.loadModel(path);
    // get all graphs
    Iterator<Statement> statIt = model.listStatements((Resource) null, 
            ResourceFactory.createProperty("http://aksw.org/deduplication/relatedGraph"), (RDFNode) null);
    while(statIt.hasNext()) {
        Statement s = statIt.next();
        Resource dataset = s.getSubject();
        String graph = s.getObject().as(Resource.class).getURI();
        // get all classes for each graph
        ArrayList<Resource> classes = new ArrayList<>();
        Iterator<RDFNode> nodeIt = model.listObjectsOfProperty(dataset, ResourceFactory.createProperty("http://aksw.org/deduplication/requiredClasses"));
        while(nodeIt.hasNext()) {
            Resource c = nodeIt.next().as(Resource.class);
            classes.add(c);
        }
        res.put(graph, classes);
    }
    return res;
}
项目:rocker    文件:SQLiteManager.java   
/**
 * Initialize query for electing some columns only.
 * 
 * @param propertynames
 * @throws SQLException
 */
public void getHashes(String tableName, String[] propertynames) throws SQLException {

    this.aliasesCache = new String[propertynames.length + 1];

    aliasesCache[0] = "id";
    for(int i=1; i<aliasesCache.length; i++)
        aliasesCache[i] = "p" + (propRefMap.get(tableName).indexOf(ResourceFactory.createProperty(propertynames[i-1])) + 1);

    String s = "";
    for(int i=1; i<aliasesCache.length; i++) {
        String p = aliasesCache[i];
        s += p + ", ";
    }
    s = s.substring(0, s.length() - 2);

    String query = "select id, " + s + " from " + tableName;
    this.resSet = statement
            .executeQuery(query);

}
项目:rocker    文件:RockerTest.java   
private static Set<Property> getProperties(int i) {
    HashSet<Property> prop = new HashSet<Property>();
    switch(i) {
    case 1:
        prop.add(ResourceFactory.createProperty("http://www.okkam.org/ontology_person1.owl#has_address"));
        break;
    case 2:
        prop.add(ResourceFactory.createProperty("http://www.okkam.org/ontology_person1.owl#soc_sec_id"));
        break;
    case 3:
        prop.add(ResourceFactory.createProperty("http://www.okkam.org/ontology_person1.owl#given_name"));
        prop.add(ResourceFactory.createProperty("http://www.okkam.org/ontology_person1.owl#phone_numer"));
        prop.add(ResourceFactory.createProperty("http://www.okkam.org/ontology_person1.owl#surname"));
        break;
    case 4:
        prop.add(ResourceFactory.createProperty("http://www.okkam.org/ontology_person1.owl#age"));
        prop.add(ResourceFactory.createProperty("http://www.okkam.org/ontology_person1.owl#phone_numer"));
        prop.add(ResourceFactory.createProperty("http://www.okkam.org/ontology_person1.owl#surname"));
        break;
    }
    return prop;
}
项目:BlazegraphBasedTPFServer    文件:BigdataStatementToJenaStatementMapper.java   
public RDFNode convertToJenaRDFNode( final BigdataValue v )
{
    if ( v instanceof BigdataResource )
        return convertToJenaResource( (BigdataResource) v );

    if ( !(v instanceof BigdataLiteral) )
        throw new IllegalArgumentException( v.getClass().getName() );

    final BigdataLiteral l = (BigdataLiteral) v;
    final String lex = l.getLabel();
    final URI datatypeURI = l.getDatatype();
    final String languageTag = l.getLanguage();

    if ( datatypeURI != null ) {
        final RDFDatatype dt = JENA_TYPE_MAPPER.getSafeTypeByName(
                                               datatypeURI.stringValue() );
        return ResourceFactory.createTypedLiteral( lex, dt );
    }
    else if ( languageTag != null ) {
        return ResourceFactory.createLangLiteral( lex, languageTag );
    }
    else {
        return ResourceFactory.createPlainLiteral( lex );
    }
}
项目:LODVader    文件:RetrieveRDF.java   
private void addDatasetToModel(DatasetDB dataset, String subset) {
    // add distribution to jena model
    Resource r = outModel.createResource(dataset.getUri());
    r.addProperty(RDFProperties.type, ResourceFactory.createResource(NS.VOID_URI + "Dataset"));

    String name;

    if (dataset.getTitle() == null)
        name = dataset.getUri();
    else
        name = dataset.getTitle();

    r.addProperty(RDFProperties.title, name);
    r.addProperty(RDFProperties.triples, String.valueOf(new DatasetQueries().getNumberOfTriples(dataset)));
    r.addProperty(RDFProperties.subset, outModel.createResource(subset));
}
项目:Tapioca    文件:SpecialClassExtractor.java   
protected static Set<Node> loadList(String listName) {
    InputStream is = SpecialClassExtractor.class.getClassLoader().getResourceAsStream(listName);
    if (is == null) {
        LOGGER.error("Couldn't load list " + listName + " from resources. Returning empty list.");
        return new HashSet<Node>();
    }
    List<String> lines;
    try {
        lines = IOUtils.readLines(is);
    } catch (IOException e) {
        LOGGER.error("Couldn't load list from resources. Returning empty list.", e);
        return new HashSet<Node>();
    }
    Set<Node> resourceList = new HashSet<Node>((int) 2 * lines.size());
    for (String line : lines) {
        resourceList.add(ResourceFactory.createResource(line.trim()).asNode());
    }
    return resourceList;
}
项目:Tapioca    文件:SpecialClassExtractor.java   
protected static Set<Node> loadList(String listName) {
    InputStream is = SpecialClassExtractor.class.getClassLoader().getResourceAsStream(listName);
    if (is == null) {
        LOGGER.error("Couldn't load list " + listName + " from resources. Returning empty list.");
        return new HashSet<Node>();
    }
    List<String> lines;
    try {
        lines = IOUtils.readLines(is);
    } catch (IOException e) {
        LOGGER.error("Couldn't load list from resources. Returning empty list.", e);
        return new HashSet<Node>();
    }
    Set<Node> resourceList = new HashSet<Node>((int) 2 * lines.size());
    for (String line : lines) {
        resourceList.add(ResourceFactory.createResource(line.trim()).asNode());
    }
    return resourceList;
}
项目:cobalt    文件:ShortestPathFinderTest.java   
@BeforeMethod
public void setUp() {
  source = ResourceFactory.createResource();
  target = ResourceFactory.createResource();

  mid = ResourceFactory.createResource();
  final Resource y = ResourceFactory.createResource();

  final Property p = ResourceFactory.createProperty("urn:example:p");
  final Model model = ModelFactory.createDefaultModel();

  // a path of length 3
  model.add(source, p, mid);
  model.add(mid, p, y);
  model.add(y, p, target);

  // a shorter path of length 2
  model.add(source, p, mid);
  model.add(mid, p, target);

  finder = new ShortestPathFinder(model, p);
}
项目:neo4jena    文件:ExecutionResultIterator.java   
@Override
    public Triple next() {
        //System.out.println("ExecutionResultIterator#next");
        try(Transaction tx = graphdb.beginTx()) {
        Map<String,Object> row = delegate.next();
        //System.out.println("In execution iterator subject: " + row.get("subject") + row.get("subject").getClass());

        //Node nsubject = (Node) row.get("subject");
//      JenaNeoNode neonode = new JenaNeoNode(nsubject);
//      System.out.println("Node is uri:" + neonode.isURI());

        //System.out.println("Subject: "+ new JenaNeoNode((Node)row.get("subject")));

        Triple t = new Triple(new JenaNeoNode((Node)row.get("subject")),
                ResourceFactory.createProperty((String)row.get("type(predicate)")).asNode(),
                new JenaNeoNode((Node)row.get("object")));
        return t;
        }
    }
项目:sml-converters    文件:TriplesMap.java   
private List<Statement> getNormalizedTermMapStatements(Model triplesMap,
        Statement statement) {

    List<Statement> normalizedStatements = new ArrayList<Statement>();

    Resource subject = statement.getSubject();
    Property shortCutPredicate = statement.getPredicate();
    Property generalProperty = short2general.get(shortCutPredicate);
    RDFNode object = statement.getObject();
    Resource termMapBNode = ResourceFactory.createResource();

    normalizedStatements.add(ResourceFactory.createStatement(
            subject, generalProperty, termMapBNode));

    normalizedStatements.add(ResourceFactory.createStatement(
            termMapBNode, RR.constant, object));

    return normalizedStatements;
}
项目:sml-converters    文件:TermConstructorConverter.java   
public TermConstructorConverter(TermConstructorType type, List<Expr> exprs) {
    this.type = type;

    if (type.equals(TermConstructorType.typedLiteral)) {
        Node dtype = ((NodeValueNode) exprs.get(2)).getNode();
        this.dtype = ResourceFactory.createResource(((Node_URI) dtype).getURI());
    } else {
        this.dtype = null;
    }

    String langStr = ((NodeValueString) exprs.get(1)).asString();

    if (type.equals(TermConstructorType.plainLiteral) && !langStr.isEmpty()) {
        this.langStr = langStr;
    } else {
        this.langStr = null;
    }
    this.expr = exprs.get(0);
}
项目:sml-converters    文件:R2RMLSpecTest.java   
@Test
public void test_getTriplesMaps_1() {
    Model r2rml = readR2RML(r2rml1);
    Resource expectedTMResource = ResourceFactory
            .createResource(prefix + "TriplesMap1");
    int expectedNumTMResources = 1;

    R2RMLSpec spec = new R2RMLSpec(r2rml);
    Set<TriplesMap> triplesMaps = spec.getTriplesMaps();
    Set<Resource> tmResources = new HashSet<Resource>();
    for (TriplesMap tm : triplesMaps) {
        tmResources.add(tm.getResource());
    }

    assertEquals(expectedNumTMResources, triplesMaps.size());
    assertEquals(expectedNumTMResources, tmResources.size());
    assertTrue(tmResources.contains(expectedTMResource));
}
项目:sml-converters    文件:R2RMLSpecTest.java   
@Test
public void test_getTriplesMaps_2() {
    Model r2rml = readR2RML(r2rml2);
    List<Resource> expectedResources = new ArrayList<Resource>();
    expectedResources.add(ResourceFactory.createResource(prefix + "TriplesMap2"));
    expectedResources.add(ResourceFactory.createResource(prefix + "TriplesMap3"));
    int expectedNumTMResources = 2;

    R2RMLSpec spec = new R2RMLSpec(r2rml);
    Set<TriplesMap> triplesMaps = spec.getTriplesMaps();
    Set<Resource> tmResources = new HashSet<Resource>();
    for (TriplesMap tm : triplesMaps) {
        tmResources.add(tm.getResource());
    }

    assertEquals(expectedNumTMResources, triplesMaps.size());
    assertEquals(expectedNumTMResources, tmResources.size());
    for (Resource expctdRes : expectedResources) {
        assertTrue(tmResources.contains(expctdRes));
    }
}
项目:sml-converters    文件:R2RMLSpecTest.java   
@Test
public void test_getTriplesMaps_3() {
    Model r2rml = readR2RML(r2rml3);
    List<Resource> expectedResources = new ArrayList<Resource>();
    expectedResources.add(ResourceFactory.createResource(prefix + "TriplesMap4"));
    expectedResources.add(ResourceFactory.createResource(prefix + "TriplesMap5"));
    int expectedNumTMResources = 3;

    R2RMLSpec spec = new R2RMLSpec(r2rml);
    Set<TriplesMap> triplesMaps = spec.getTriplesMaps();
    Set<Resource> tmResources = new HashSet<Resource>();
    for (TriplesMap tm : triplesMaps) {
        tmResources.add(tm.getResource());
    }

    assertEquals(expectedNumTMResources, triplesMaps.size());
    assertEquals(expectedNumTMResources, tmResources.size());
    for (Resource expctdRes : expectedResources) {
        assertTrue(tmResources.contains(expctdRes));
    }
}
项目:sml-converters    文件:R2RML2SMLConverterTest.java   
@Test
public void test_buildTblToTM_1() {
    Model r2rml = readR2RML(r2rml1);
    R2RMLSpec spec = new R2RMLSpec(r2rml);

    Resource expectedTMSubject = ResourceFactory.createResource(prefix + "TriplesMap1");
    NodeIterator tmpRes = r2rml.listObjectsOfProperty(expectedTMSubject, RR.logicalTable);
    Resource expectedTblSubject = tmpRes.next().asResource();
    LogicalTable logTbl = new LogicalTable(r2rml, expectedTblSubject);
    Pair<LogicalTable, TriplesMap> expectedLtTm =
            new Pair<LogicalTable, TriplesMap>(
                    logTbl, new TriplesMap(r2rml, expectedTMSubject));
    int expectedNumLtTmEntries = 1;

    Map<LogicalTable, Collection<TriplesMap>> tableToTMs =
            R2RML2SMLConverter.buildTblToTM(spec);
    assertEquals(expectedNumLtTmEntries, tableToTMs.size());
    assertEquals(expectedNumLtTmEntries, tableToTMs.keySet().size());
    Set<LogicalTable> tbls = tableToTMs.keySet();
    assertTrue(tbls.contains(expectedLtTm.first));
    Collection<TriplesMap> triplesMaps = tableToTMs.get(logTbl);
    assertTrue(triplesMaps.contains(expectedLtTm.second));
}
项目:storm-query-operators    文件:Triple2Graph.java   
@Override
public void execute(TridentTuple tuple, TridentCollector collector) {
    Statement newStatement = ResourceFactory.createStatement(ResourceFactory.createResource(tuple.getString(0)), 
            ResourceFactory.createProperty(tuple.getString(1)),
            ResourceFactory.createResource(tuple.getString(2)));
    // The name of the graph is stored and added to the tuple at the emission
    // If the new triple matches the starting pattern and the graph is not empty, the graph is emitted.
    if (statementPattern.test(newStatement)) {
        if (!graph.isEmpty()) {
            // The values emitted correspond to the name of the graph (earthquake URI), the timestamp of creation, and the graph.
            collector.emit(new Values(graphName, System.currentTimeMillis(), graph));
            graph.clear();
        }
        graphName = tuple.getString(0);
    }
    graph.add(newStatement.asTriple());
}
项目:storm-query-operators    文件:Triple2GraphBolt.java   
@Override
public void prepare(Map conf, TopologyContext context,  OutputCollector collector) {
    this.collector = collector;
    graph = Factory.createDefaultGraph();
    startingPatternId = (String) conf.get("STARTING_PATTERN_ID");
    Resource subject = null;
    if ((conf.get(startingPatternId + "_SUBJECT")) != null) {
        subject = ResourceFactory.createResource((String) conf.get(startingPatternId + "_SUBJECT"));
    }
    Property predicate = null;
    if ((conf.get(startingPatternId + "_PREDICATE")) != null) {
        predicate = ResourceFactory.createProperty((String) conf.get(startingPatternId + "_PREDICATE"));
    }
    Resource object = null;
    if ((conf.get(startingPatternId + "_OBJECT")) != null) {
        object = ResourceFactory.createProperty((String) conf.get(startingPatternId + "_OBJECT"));
    }
    startingPattern = new SimpleSelector(subject, predicate, object);
}
项目:storm-query-operators    文件:Triple2GraphBolt.java   
@Override
    public void execute(Tuple tuple) {
        Statement newStatement = ResourceFactory.createStatement(ResourceFactory.createResource(tuple.getString(0)), 
                ResourceFactory.createProperty(tuple.getString(1)),
                ResourceFactory.createResource(tuple.getString(2)));
        // The name of the graph is stored and added to the tuple at the emission
        // If the new triple matches the starting pattern and the graph is not empty, the graph is emitted.
        if (startingPattern.test(newStatement)) {
            if (!graph.isEmpty()) {
                // The values emitted correspond to the name of the graph (earthquake URI), the timestamp of creation, and the graph.
//              RDFDataMgr.write(System.out, graph, Lang.N3);
                collector.emit(new Values(graphName, System.currentTimeMillis(), graph));
                System.out.println("EMITTED GRAPH: " + graphName);
                graph.clear();
            }
            graphName = tuple.getString(0);
        }
        graph.add(newStatement.asTriple());
        collector.ack(tuple);
    }
项目:storm-query-operators    文件:GraphCounterTopology.java   
private void wireTopology() {

    String fileName = "data/Earthquakes-Spain-2013.ttl";
    String spoutId = "rdfStreamSpout";
    String triple2graph = "triple2graph";
    String graphCounter = "graphCounter";
    String finalCounter = "finalCounter";

    // TODO: Get the triple pattern from the configuration/context 
    SimpleSelector startingPattern = new SimpleSelector(null, 
            ResourceFactory.createProperty("http://www.w3.org/1999/02/22-rdf-syntax-ns#type"),
            ResourceFactory.createResource("http://purl.oclc.org/NET/ssnx/ssn#FeatureOfInterest"));

    builder.setSpout(spoutId, new RDFStreamSpout(fileName));
    builder.setBolt(triple2graph, new Triple2GraphBolt(STARTING_PATTERN_ID)).shuffleGrouping(spoutId);
    builder.setBolt(graphCounter, new RollingCountBolt(15, 3)).fieldsGrouping(triple2graph, new Fields("name"));
    builder.setBolt(finalCounter, new AckerPrinterBolt()).globalGrouping(graphCounter);
}
项目:fcrepo4-client    文件:FedoraResourceImplTest.java   
@Before
public void setUp() throws IOException {
    initMocks(this);
    when(mockRepository.getRepositoryUrl()).thenReturn(repositoryURL);
    resource = new FedoraResourceImpl(mockRepository, mockHelper, path);
    assertTrue(resource != null);

    final Graph graph = createDefaultGraph();
    graph.add( create(createURI(repositoryURL + "/test"), RdfLexicon.CREATED_DATE.asNode(),
                      ResourceFactory.createPlainLiteral(testDateValue).asNode()) );
    graph.add( create(createURI(repositoryURL + "/test"), RdfLexicon.LAST_MODIFIED_DATE.asNode(),
                      ResourceFactory.createPlainLiteral(testDateValue).asNode()) );
    graph.add( create(createURI(repositoryURL + "/test"), RdfLexicon.HAS_MIXIN_TYPE.asNode(),
                      createURI(testMixinType)) );
    graph.add( create(createURI(repositoryURL + "/test"), RdfLexicon.WRITABLE.asNode(),
                      ResourceFactory.createTypedLiteral(new Boolean(isWritable)).asNode()) );
    resource.setGraph( graph );
}
项目:fcrepo4-client    文件:FedoraDatastreamImplTest.java   
@Before
public void setUp() throws IOException, FedoraException {
    initMocks(this);
    mockRepository.httpHelper = mockHelper;

    when(mockRepository.getRepositoryUrl()).thenReturn(repositoryURL);
    when(mockRepository.getObject(eq("/test"))).thenReturn(mockObject);
    datastream = new FedoraDatastreamImpl(mockRepository, mockHelper, path);
    assertTrue(datastream != null);

    final Graph graph = createDefaultGraph();
    graph.add( create(dsSubj, CREATED_DATE.asNode(), ResourceFactory.createPlainLiteral(testDateValue).asNode()) );
    graph.add( create(dsSubj, LAST_MODIFIED_DATE.asNode(),
        ResourceFactory.createPlainLiteral(testDateValue).asNode()) );
    graph.add( create(dsSubj, HAS_MIXIN_TYPE.asNode(), createURI(testMixinType)) );
    graph.add( create(dsSubj, WRITABLE.asNode(),
        ResourceFactory.createTypedLiteral(new Boolean(isWritable)).asNode()) );
    graph.add( create(dsSubj, DESCRIBES.asNode(), contentSubj) );
    graph.add( create(contentSubj, HAS_SIZE.asNode(), ResourceFactory.createPlainLiteral(contentSize).asNode()) );
    graph.add( create(contentSubj, HAS_MIME_TYPE.asNode(), ResourceFactory.createPlainLiteral(mimeType).asNode()) );
    graph.add( create(contentSubj, HAS_ORIGINAL_NAME.asNode(),
        ResourceFactory.createPlainLiteral(filename).asNode()) );
    graph.add( create(contentSubj, REST_API_DIGEST.asNode(), createURI(checksum)) );
    datastream.setGraph( graph );
}
项目:p3-geo-enriching-transformer    文件:SpatialDataEnhancer.java   
private Dataset joinDataset(Dataset baseDataset, File indexDir) throws IOException {
    EntityDefinition entDef = new EntityDefinition("entityField", "geoField");

    // you need JTS lib in the classpath to run the examples
    //entDef.setSpatialContextFactory(SpatialQuery.JTS_SPATIAL_CONTEXT_FACTORY_CLASS);
    // set custom goe predicates

    entDef.addSpatialPredicatePair(ResourceFactory.createResource("http://schema.org/latitude"), ResourceFactory.createResource("http://schema.org/longitude"));
    /*
    entDef.addSpatialPredicatePair(ResourceFactory.createResource("http://localhost/jena_example/#latitude_2"), ResourceFactory.createResource("http://localhost/jena_example/#longitude_2"));
    entDef.addWKTPredicate(ResourceFactory.createResource("http://localhost/jena_example/#wkt_1"));
    entDef.addWKTPredicate(ResourceFactory.createResource("http://localhost/jena_example/#wkt_2"));
    */
    // Lucene, index in File system.
    Directory dir = FSDirectory.open(indexDir);

    // Join together into a dataset
    Dataset ds = SpatialDatasetFactory.createLucene(baseDataset, dir, entDef);

    return ds;
}