public Resource addQualityReport(Model model) { Resource qualityReport = model.createResource(LDQM.QUALITY_REPORT_PREFIX + UUID.randomUUID().toString(),QPRO.QualityReport); qualityReport.addProperty(QPRO.computedOn, ResourceFactory.createTypedLiteral(DATE_FORMAT.format(new Date()), XSDDatatype.XSDdateTime)); Resource noDerefSubjectsProblem = model.createResource(QPRO.QualityProblem); noDerefSubjectsProblem.addProperty(QPRO.isDescribedBy, LDQM.IRIdereferenceability); for (HttpResponse response : responseMap.values()) { if (response.getStatusCode() >= 300 || response.getStatusCode() < 200) { Resource errSubject = model.createResource(LDQM.Defect_UndereferenceableURI); errSubject.addProperty(DCTerms.subject,response.getUri()); if (response.getStatusCode() > 0) { errSubject.addLiteral(HTTP.statusCodeValue, model.createTypedLiteral(response.getStatusCode(), XSDDatatype.XSDint)); } errSubject.addLiteral(HTTP.methodName, response.getMethod()); if (response.getReason() != null) { errSubject.addLiteral(HTTP.reasonPhrase, response.getReason()); } noDerefSubjectsProblem.addProperty(QPRO.problematicThing, errSubject); } } qualityReport.addProperty(QPRO.hasProblem, noDerefSubjectsProblem); qualityReport.addProperty(PROV.wasGeneratedBy, evaluation); return qualityReport; }
/** * @param problem * @param term * @param detailCode Optional error code; indicates a subclass of problems * @param details Optional string containing error details * @param contextResource May be null * @param contextProperty May be null */ public Message(Problem problem, MappingTerm term, String detailCode, String details, Resource contextResource, Property contextProperty) { this.problem = problem; this.subject = contextResource; this.predicates = contextProperty == null ? Collections.<Property>emptyList() : Collections.singletonList(contextProperty); this.objects = term == null ? Collections.<RDFNode>emptyList() : Collections.<RDFNode>singletonList(ResourceFactory.createPlainLiteral(term.toString())); this.detailCode = detailCode; this.details = details; this.cause = null; }
private String toTurtle(RDFNode r) { if (r.isURIResource()) { return PrettyPrinter.qNameOrURI(relativize(r.asResource().getURI()), prefixes); } else if (r.isLiteral()) { StringBuffer result = new StringBuffer(quote(r.asLiteral().getLexicalForm())); if (!"".equals(r.asLiteral().getLanguage())) { result.append("@"); result.append(r.asLiteral().getLanguage()); } else if (r.asLiteral().getDatatype() != null) { result.append("^^"); result.append(toTurtle(ResourceFactory.createResource(r.asLiteral().getDatatypeURI()))); } return result.toString(); } else { if (!blankNodeMap.containsKey(r)) { blankNodeMap.put(r.asResource(), "_:b" + blankNodeCounter++); } return blankNodeMap.get(r); } }
private void printTranslationTable(TranslationTable table) { printMapObject(table, D2RQ.TranslationTable); out.printURIProperty(D2RQ.href, table.getHref()); out.printProperty(D2RQ.javaClass, table.getJavaClass()); Iterator<Translation> it = table.getTranslations().iterator(); List<Map<Property,RDFNode>> values = new ArrayList<Map<Property,RDFNode>>(); while (it.hasNext()) { Translation translation = it.next(); Map<Property,RDFNode> r = new LinkedHashMap<Property,RDFNode>(); r.put(D2RQ.databaseValue, ResourceFactory.createPlainLiteral(translation.dbValue())); r.put(D2RQ.rdfValue, ResourceFactory.createPlainLiteral(translation.rdfValue())); values.add(r); } out.printCompactBlankNodeProperties(D2RQ.translation, values); }
public boolean isDisambiguationResource(String uri) { if(!linksLoaded){ System.out.println(Settings.EN_DBPEDIA_DISAMBIGUATION_DATASET); System.out.println(Settings.DE_DBPEDIA_DISAMBIGUATION_DATASET); System.out.println(Settings.NL_DBPEDIA_DISAMBIGUATION_DATASET); InputStream in1 = FileManager.get().open( Settings.EN_DBPEDIA_DISAMBIGUATION_DATASET ); InputStream in2 = FileManager.get().open( Settings.DE_DBPEDIA_DISAMBIGUATION_DATASET ); InputStream in3 = FileManager.get().open( Settings.NL_DBPEDIA_DISAMBIGUATION_DATASET ); model.read(in1, null, "N-TRIPLES"); System.out.println("Loaded English disambiguation dataset."); model.read(in2, null, "N-TRIPLES"); System.out.println("Loaded German disambiguation dataset."); model.read(in3, null, "N-TRIPLES"); System.out.println("Loaded Dutch disambiguation dataset."); linksLoaded = true; } StmtIterator iter = model.listStatements( new SimpleSelector( ResourceFactory.createResource(uri), ResourceFactory.createProperty("http://dbpedia.org/ontology/wikiPageDisambiguates"), (RDFNode)null)); return iter.hasNext(); }
public Model getHierarchyModel(String uri){ // returns all subclasses for given URI Model m = ModelFactory.createDefaultModel(); OntoRecord initRecord = new OntoRecord(); initRecord.setUri(uri); while(initRecord !=null){ initRecord = getSuperclass(initRecord.getUri()); if(initRecord != null){ StmtIterator iter1 = model.listStatements( new SimpleSelector(ResourceFactory.createResource(uri), RDFS.subClassOf, (RDFNode)null)); m.add(iter1); } } return m; }
public OntoRecord getSubclass(String resourceURI, String lang){ StmtIterator iter1 = model.listStatements( new SimpleSelector(ResourceFactory.createResource(resourceURI), ResourceFactory.createProperty("http://www.w3.org/2000/01/rdf-schema#subClassOf"), (RDFNode)null)); OntoRecord record = new OntoRecord(); StmtIterator iter2; while(iter1.hasNext()) { record.setUri(iter1.next().getObject().toString()); iter2 = model.listStatements( new SimpleSelector(ResourceFactory.createResource(record.getUri()), ResourceFactory.createProperty("http://www.w3.org/2000/01/rdf-schema#label"), (RDFNode)null)); while(iter2.hasNext()){ Literal res = (Literal) iter2.next().getObject(); String tmpLang = res.getLanguage(); if( tmpLang.equals("en") ){ record.setLabel(res.getString()); return record; } } } return null; }
private void addRowToMODEL(List<Statement> sa, String key, String puri) { for (Statement s : sa) { if (MODEL.contains(s)) { continue; } // add to existing resource with same key if exists if (s.getPredicate().getLocalName().equals(key)) { ResIterator it = MODEL.listResourcesWithProperty(s.getPredicate(), s.getObject()); if (it.hasNext()) { // assume all members are equal Resource rsc = it.nextResource(); // get parent Property p = ResourceFactory.createProperty(genOURI(), puri); Statement st = ResourceFactory.createStatement(rsc, p, s.getSubject()); MODEL.add(st); continue; } } MODEL.add(s); } }
private List<Statement> convertRowToStatement(Table table, Row row, Resource rcs) { List<Statement> sa = new ArrayList<Statement>(row.size()); int i = 0; Set<String> attrs = row.keySet(); for (String attr : attrs) { RDFNode attrRcs; Object value = row.get(attr); if (value == null || excludedValue(attr.toString(), value.toString())) { // dealing with empty and excluded values continue; } else { attrRcs = createRDFNode(table.getColumn(attr), value); } Property p = ResourceFactory.createProperty(genOURI(), attr.toString()); Statement s = ResourceFactory.createStatement(rcs, p, attrRcs); sa.add(s); } return sa; }
private void addTopics() { String[] topicNames = { "http://vivoweb.org/ontology/core#FacultyMember", "http://xmlns.com/foaf/0.1/Person", "http://xmlns.com/foaf/0.1/Organization", "http://vivoweb.org/ontology/core#Project", "http://purl.org/ontology/bibo/Conference", "http://purl.org/ontology/bibo/Workshop", "http://vivoweb.org/ontology/core#Grant", "http://purl.org/NET/c4dm/event.owl#Event" }; for (String t : topicNames) { this.topics.add(ResourceFactory.createResource(t)); } }
/** * @param path * @return */ public static HashMap<String, List<Resource>> loadClassList(String path) { HashMap<String, List<Resource>> res = new HashMap<>(); // load specification file Model model = RDFDataMgr.loadModel(path); // get all graphs Iterator<Statement> statIt = model.listStatements((Resource) null, ResourceFactory.createProperty("http://aksw.org/deduplication/relatedGraph"), (RDFNode) null); while(statIt.hasNext()) { Statement s = statIt.next(); Resource dataset = s.getSubject(); String graph = s.getObject().as(Resource.class).getURI(); // get all classes for each graph ArrayList<Resource> classes = new ArrayList<>(); Iterator<RDFNode> nodeIt = model.listObjectsOfProperty(dataset, ResourceFactory.createProperty("http://aksw.org/deduplication/requiredClasses")); while(nodeIt.hasNext()) { Resource c = nodeIt.next().as(Resource.class); classes.add(c); } res.put(graph, classes); } return res; }
/** * Initialize query for electing some columns only. * * @param propertynames * @throws SQLException */ public void getHashes(String tableName, String[] propertynames) throws SQLException { this.aliasesCache = new String[propertynames.length + 1]; aliasesCache[0] = "id"; for(int i=1; i<aliasesCache.length; i++) aliasesCache[i] = "p" + (propRefMap.get(tableName).indexOf(ResourceFactory.createProperty(propertynames[i-1])) + 1); String s = ""; for(int i=1; i<aliasesCache.length; i++) { String p = aliasesCache[i]; s += p + ", "; } s = s.substring(0, s.length() - 2); String query = "select id, " + s + " from " + tableName; this.resSet = statement .executeQuery(query); }
private static Set<Property> getProperties(int i) { HashSet<Property> prop = new HashSet<Property>(); switch(i) { case 1: prop.add(ResourceFactory.createProperty("http://www.okkam.org/ontology_person1.owl#has_address")); break; case 2: prop.add(ResourceFactory.createProperty("http://www.okkam.org/ontology_person1.owl#soc_sec_id")); break; case 3: prop.add(ResourceFactory.createProperty("http://www.okkam.org/ontology_person1.owl#given_name")); prop.add(ResourceFactory.createProperty("http://www.okkam.org/ontology_person1.owl#phone_numer")); prop.add(ResourceFactory.createProperty("http://www.okkam.org/ontology_person1.owl#surname")); break; case 4: prop.add(ResourceFactory.createProperty("http://www.okkam.org/ontology_person1.owl#age")); prop.add(ResourceFactory.createProperty("http://www.okkam.org/ontology_person1.owl#phone_numer")); prop.add(ResourceFactory.createProperty("http://www.okkam.org/ontology_person1.owl#surname")); break; } return prop; }
public RDFNode convertToJenaRDFNode( final BigdataValue v ) { if ( v instanceof BigdataResource ) return convertToJenaResource( (BigdataResource) v ); if ( !(v instanceof BigdataLiteral) ) throw new IllegalArgumentException( v.getClass().getName() ); final BigdataLiteral l = (BigdataLiteral) v; final String lex = l.getLabel(); final URI datatypeURI = l.getDatatype(); final String languageTag = l.getLanguage(); if ( datatypeURI != null ) { final RDFDatatype dt = JENA_TYPE_MAPPER.getSafeTypeByName( datatypeURI.stringValue() ); return ResourceFactory.createTypedLiteral( lex, dt ); } else if ( languageTag != null ) { return ResourceFactory.createLangLiteral( lex, languageTag ); } else { return ResourceFactory.createPlainLiteral( lex ); } }
private void addDatasetToModel(DatasetDB dataset, String subset) { // add distribution to jena model Resource r = outModel.createResource(dataset.getUri()); r.addProperty(RDFProperties.type, ResourceFactory.createResource(NS.VOID_URI + "Dataset")); String name; if (dataset.getTitle() == null) name = dataset.getUri(); else name = dataset.getTitle(); r.addProperty(RDFProperties.title, name); r.addProperty(RDFProperties.triples, String.valueOf(new DatasetQueries().getNumberOfTriples(dataset))); r.addProperty(RDFProperties.subset, outModel.createResource(subset)); }
protected static Set<Node> loadList(String listName) { InputStream is = SpecialClassExtractor.class.getClassLoader().getResourceAsStream(listName); if (is == null) { LOGGER.error("Couldn't load list " + listName + " from resources. Returning empty list."); return new HashSet<Node>(); } List<String> lines; try { lines = IOUtils.readLines(is); } catch (IOException e) { LOGGER.error("Couldn't load list from resources. Returning empty list.", e); return new HashSet<Node>(); } Set<Node> resourceList = new HashSet<Node>((int) 2 * lines.size()); for (String line : lines) { resourceList.add(ResourceFactory.createResource(line.trim()).asNode()); } return resourceList; }
@BeforeMethod public void setUp() { source = ResourceFactory.createResource(); target = ResourceFactory.createResource(); mid = ResourceFactory.createResource(); final Resource y = ResourceFactory.createResource(); final Property p = ResourceFactory.createProperty("urn:example:p"); final Model model = ModelFactory.createDefaultModel(); // a path of length 3 model.add(source, p, mid); model.add(mid, p, y); model.add(y, p, target); // a shorter path of length 2 model.add(source, p, mid); model.add(mid, p, target); finder = new ShortestPathFinder(model, p); }
@Override public Triple next() { //System.out.println("ExecutionResultIterator#next"); try(Transaction tx = graphdb.beginTx()) { Map<String,Object> row = delegate.next(); //System.out.println("In execution iterator subject: " + row.get("subject") + row.get("subject").getClass()); //Node nsubject = (Node) row.get("subject"); // JenaNeoNode neonode = new JenaNeoNode(nsubject); // System.out.println("Node is uri:" + neonode.isURI()); //System.out.println("Subject: "+ new JenaNeoNode((Node)row.get("subject"))); Triple t = new Triple(new JenaNeoNode((Node)row.get("subject")), ResourceFactory.createProperty((String)row.get("type(predicate)")).asNode(), new JenaNeoNode((Node)row.get("object"))); return t; } }
private List<Statement> getNormalizedTermMapStatements(Model triplesMap, Statement statement) { List<Statement> normalizedStatements = new ArrayList<Statement>(); Resource subject = statement.getSubject(); Property shortCutPredicate = statement.getPredicate(); Property generalProperty = short2general.get(shortCutPredicate); RDFNode object = statement.getObject(); Resource termMapBNode = ResourceFactory.createResource(); normalizedStatements.add(ResourceFactory.createStatement( subject, generalProperty, termMapBNode)); normalizedStatements.add(ResourceFactory.createStatement( termMapBNode, RR.constant, object)); return normalizedStatements; }
public TermConstructorConverter(TermConstructorType type, List<Expr> exprs) { this.type = type; if (type.equals(TermConstructorType.typedLiteral)) { Node dtype = ((NodeValueNode) exprs.get(2)).getNode(); this.dtype = ResourceFactory.createResource(((Node_URI) dtype).getURI()); } else { this.dtype = null; } String langStr = ((NodeValueString) exprs.get(1)).asString(); if (type.equals(TermConstructorType.plainLiteral) && !langStr.isEmpty()) { this.langStr = langStr; } else { this.langStr = null; } this.expr = exprs.get(0); }
@Test public void test_getTriplesMaps_1() { Model r2rml = readR2RML(r2rml1); Resource expectedTMResource = ResourceFactory .createResource(prefix + "TriplesMap1"); int expectedNumTMResources = 1; R2RMLSpec spec = new R2RMLSpec(r2rml); Set<TriplesMap> triplesMaps = spec.getTriplesMaps(); Set<Resource> tmResources = new HashSet<Resource>(); for (TriplesMap tm : triplesMaps) { tmResources.add(tm.getResource()); } assertEquals(expectedNumTMResources, triplesMaps.size()); assertEquals(expectedNumTMResources, tmResources.size()); assertTrue(tmResources.contains(expectedTMResource)); }
@Test public void test_getTriplesMaps_2() { Model r2rml = readR2RML(r2rml2); List<Resource> expectedResources = new ArrayList<Resource>(); expectedResources.add(ResourceFactory.createResource(prefix + "TriplesMap2")); expectedResources.add(ResourceFactory.createResource(prefix + "TriplesMap3")); int expectedNumTMResources = 2; R2RMLSpec spec = new R2RMLSpec(r2rml); Set<TriplesMap> triplesMaps = spec.getTriplesMaps(); Set<Resource> tmResources = new HashSet<Resource>(); for (TriplesMap tm : triplesMaps) { tmResources.add(tm.getResource()); } assertEquals(expectedNumTMResources, triplesMaps.size()); assertEquals(expectedNumTMResources, tmResources.size()); for (Resource expctdRes : expectedResources) { assertTrue(tmResources.contains(expctdRes)); } }
@Test public void test_getTriplesMaps_3() { Model r2rml = readR2RML(r2rml3); List<Resource> expectedResources = new ArrayList<Resource>(); expectedResources.add(ResourceFactory.createResource(prefix + "TriplesMap4")); expectedResources.add(ResourceFactory.createResource(prefix + "TriplesMap5")); int expectedNumTMResources = 3; R2RMLSpec spec = new R2RMLSpec(r2rml); Set<TriplesMap> triplesMaps = spec.getTriplesMaps(); Set<Resource> tmResources = new HashSet<Resource>(); for (TriplesMap tm : triplesMaps) { tmResources.add(tm.getResource()); } assertEquals(expectedNumTMResources, triplesMaps.size()); assertEquals(expectedNumTMResources, tmResources.size()); for (Resource expctdRes : expectedResources) { assertTrue(tmResources.contains(expctdRes)); } }
@Test public void test_buildTblToTM_1() { Model r2rml = readR2RML(r2rml1); R2RMLSpec spec = new R2RMLSpec(r2rml); Resource expectedTMSubject = ResourceFactory.createResource(prefix + "TriplesMap1"); NodeIterator tmpRes = r2rml.listObjectsOfProperty(expectedTMSubject, RR.logicalTable); Resource expectedTblSubject = tmpRes.next().asResource(); LogicalTable logTbl = new LogicalTable(r2rml, expectedTblSubject); Pair<LogicalTable, TriplesMap> expectedLtTm = new Pair<LogicalTable, TriplesMap>( logTbl, new TriplesMap(r2rml, expectedTMSubject)); int expectedNumLtTmEntries = 1; Map<LogicalTable, Collection<TriplesMap>> tableToTMs = R2RML2SMLConverter.buildTblToTM(spec); assertEquals(expectedNumLtTmEntries, tableToTMs.size()); assertEquals(expectedNumLtTmEntries, tableToTMs.keySet().size()); Set<LogicalTable> tbls = tableToTMs.keySet(); assertTrue(tbls.contains(expectedLtTm.first)); Collection<TriplesMap> triplesMaps = tableToTMs.get(logTbl); assertTrue(triplesMaps.contains(expectedLtTm.second)); }
@Override public void execute(TridentTuple tuple, TridentCollector collector) { Statement newStatement = ResourceFactory.createStatement(ResourceFactory.createResource(tuple.getString(0)), ResourceFactory.createProperty(tuple.getString(1)), ResourceFactory.createResource(tuple.getString(2))); // The name of the graph is stored and added to the tuple at the emission // If the new triple matches the starting pattern and the graph is not empty, the graph is emitted. if (statementPattern.test(newStatement)) { if (!graph.isEmpty()) { // The values emitted correspond to the name of the graph (earthquake URI), the timestamp of creation, and the graph. collector.emit(new Values(graphName, System.currentTimeMillis(), graph)); graph.clear(); } graphName = tuple.getString(0); } graph.add(newStatement.asTriple()); }
@Override public void prepare(Map conf, TopologyContext context, OutputCollector collector) { this.collector = collector; graph = Factory.createDefaultGraph(); startingPatternId = (String) conf.get("STARTING_PATTERN_ID"); Resource subject = null; if ((conf.get(startingPatternId + "_SUBJECT")) != null) { subject = ResourceFactory.createResource((String) conf.get(startingPatternId + "_SUBJECT")); } Property predicate = null; if ((conf.get(startingPatternId + "_PREDICATE")) != null) { predicate = ResourceFactory.createProperty((String) conf.get(startingPatternId + "_PREDICATE")); } Resource object = null; if ((conf.get(startingPatternId + "_OBJECT")) != null) { object = ResourceFactory.createProperty((String) conf.get(startingPatternId + "_OBJECT")); } startingPattern = new SimpleSelector(subject, predicate, object); }
@Override public void execute(Tuple tuple) { Statement newStatement = ResourceFactory.createStatement(ResourceFactory.createResource(tuple.getString(0)), ResourceFactory.createProperty(tuple.getString(1)), ResourceFactory.createResource(tuple.getString(2))); // The name of the graph is stored and added to the tuple at the emission // If the new triple matches the starting pattern and the graph is not empty, the graph is emitted. if (startingPattern.test(newStatement)) { if (!graph.isEmpty()) { // The values emitted correspond to the name of the graph (earthquake URI), the timestamp of creation, and the graph. // RDFDataMgr.write(System.out, graph, Lang.N3); collector.emit(new Values(graphName, System.currentTimeMillis(), graph)); System.out.println("EMITTED GRAPH: " + graphName); graph.clear(); } graphName = tuple.getString(0); } graph.add(newStatement.asTriple()); collector.ack(tuple); }
private void wireTopology() { String fileName = "data/Earthquakes-Spain-2013.ttl"; String spoutId = "rdfStreamSpout"; String triple2graph = "triple2graph"; String graphCounter = "graphCounter"; String finalCounter = "finalCounter"; // TODO: Get the triple pattern from the configuration/context SimpleSelector startingPattern = new SimpleSelector(null, ResourceFactory.createProperty("http://www.w3.org/1999/02/22-rdf-syntax-ns#type"), ResourceFactory.createResource("http://purl.oclc.org/NET/ssnx/ssn#FeatureOfInterest")); builder.setSpout(spoutId, new RDFStreamSpout(fileName)); builder.setBolt(triple2graph, new Triple2GraphBolt(STARTING_PATTERN_ID)).shuffleGrouping(spoutId); builder.setBolt(graphCounter, new RollingCountBolt(15, 3)).fieldsGrouping(triple2graph, new Fields("name")); builder.setBolt(finalCounter, new AckerPrinterBolt()).globalGrouping(graphCounter); }
@Before public void setUp() throws IOException { initMocks(this); when(mockRepository.getRepositoryUrl()).thenReturn(repositoryURL); resource = new FedoraResourceImpl(mockRepository, mockHelper, path); assertTrue(resource != null); final Graph graph = createDefaultGraph(); graph.add( create(createURI(repositoryURL + "/test"), RdfLexicon.CREATED_DATE.asNode(), ResourceFactory.createPlainLiteral(testDateValue).asNode()) ); graph.add( create(createURI(repositoryURL + "/test"), RdfLexicon.LAST_MODIFIED_DATE.asNode(), ResourceFactory.createPlainLiteral(testDateValue).asNode()) ); graph.add( create(createURI(repositoryURL + "/test"), RdfLexicon.HAS_MIXIN_TYPE.asNode(), createURI(testMixinType)) ); graph.add( create(createURI(repositoryURL + "/test"), RdfLexicon.WRITABLE.asNode(), ResourceFactory.createTypedLiteral(new Boolean(isWritable)).asNode()) ); resource.setGraph( graph ); }
@Before public void setUp() throws IOException, FedoraException { initMocks(this); mockRepository.httpHelper = mockHelper; when(mockRepository.getRepositoryUrl()).thenReturn(repositoryURL); when(mockRepository.getObject(eq("/test"))).thenReturn(mockObject); datastream = new FedoraDatastreamImpl(mockRepository, mockHelper, path); assertTrue(datastream != null); final Graph graph = createDefaultGraph(); graph.add( create(dsSubj, CREATED_DATE.asNode(), ResourceFactory.createPlainLiteral(testDateValue).asNode()) ); graph.add( create(dsSubj, LAST_MODIFIED_DATE.asNode(), ResourceFactory.createPlainLiteral(testDateValue).asNode()) ); graph.add( create(dsSubj, HAS_MIXIN_TYPE.asNode(), createURI(testMixinType)) ); graph.add( create(dsSubj, WRITABLE.asNode(), ResourceFactory.createTypedLiteral(new Boolean(isWritable)).asNode()) ); graph.add( create(dsSubj, DESCRIBES.asNode(), contentSubj) ); graph.add( create(contentSubj, HAS_SIZE.asNode(), ResourceFactory.createPlainLiteral(contentSize).asNode()) ); graph.add( create(contentSubj, HAS_MIME_TYPE.asNode(), ResourceFactory.createPlainLiteral(mimeType).asNode()) ); graph.add( create(contentSubj, HAS_ORIGINAL_NAME.asNode(), ResourceFactory.createPlainLiteral(filename).asNode()) ); graph.add( create(contentSubj, REST_API_DIGEST.asNode(), createURI(checksum)) ); datastream.setGraph( graph ); }
private Dataset joinDataset(Dataset baseDataset, File indexDir) throws IOException { EntityDefinition entDef = new EntityDefinition("entityField", "geoField"); // you need JTS lib in the classpath to run the examples //entDef.setSpatialContextFactory(SpatialQuery.JTS_SPATIAL_CONTEXT_FACTORY_CLASS); // set custom goe predicates entDef.addSpatialPredicatePair(ResourceFactory.createResource("http://schema.org/latitude"), ResourceFactory.createResource("http://schema.org/longitude")); /* entDef.addSpatialPredicatePair(ResourceFactory.createResource("http://localhost/jena_example/#latitude_2"), ResourceFactory.createResource("http://localhost/jena_example/#longitude_2")); entDef.addWKTPredicate(ResourceFactory.createResource("http://localhost/jena_example/#wkt_1")); entDef.addWKTPredicate(ResourceFactory.createResource("http://localhost/jena_example/#wkt_2")); */ // Lucene, index in File system. Directory dir = FSDirectory.open(indexDir); // Join together into a dataset Dataset ds = SpatialDatasetFactory.createLucene(baseDataset, dir, entDef); return ds; }