public boolean isDisambiguationResource(String uri) { if(!linksLoaded){ System.out.println(Settings.EN_DBPEDIA_DISAMBIGUATION_DATASET); System.out.println(Settings.DE_DBPEDIA_DISAMBIGUATION_DATASET); System.out.println(Settings.NL_DBPEDIA_DISAMBIGUATION_DATASET); InputStream in1 = FileManager.get().open( Settings.EN_DBPEDIA_DISAMBIGUATION_DATASET ); InputStream in2 = FileManager.get().open( Settings.DE_DBPEDIA_DISAMBIGUATION_DATASET ); InputStream in3 = FileManager.get().open( Settings.NL_DBPEDIA_DISAMBIGUATION_DATASET ); model.read(in1, null, "N-TRIPLES"); System.out.println("Loaded English disambiguation dataset."); model.read(in2, null, "N-TRIPLES"); System.out.println("Loaded German disambiguation dataset."); model.read(in3, null, "N-TRIPLES"); System.out.println("Loaded Dutch disambiguation dataset."); linksLoaded = true; } StmtIterator iter = model.listStatements( new SimpleSelector( ResourceFactory.createResource(uri), ResourceFactory.createProperty("http://dbpedia.org/ontology/wikiPageDisambiguates"), (RDFNode)null)); return iter.hasNext(); }
public Model getHierarchyModel(String uri){ // returns all subclasses for given URI Model m = ModelFactory.createDefaultModel(); OntoRecord initRecord = new OntoRecord(); initRecord.setUri(uri); while(initRecord !=null){ initRecord = getSuperclass(initRecord.getUri()); if(initRecord != null){ StmtIterator iter1 = model.listStatements( new SimpleSelector(ResourceFactory.createResource(uri), RDFS.subClassOf, (RDFNode)null)); m.add(iter1); } } return m; }
public OntoRecord getSubclass(String resourceURI, String lang){ StmtIterator iter1 = model.listStatements( new SimpleSelector(ResourceFactory.createResource(resourceURI), ResourceFactory.createProperty("http://www.w3.org/2000/01/rdf-schema#subClassOf"), (RDFNode)null)); OntoRecord record = new OntoRecord(); StmtIterator iter2; while(iter1.hasNext()) { record.setUri(iter1.next().getObject().toString()); iter2 = model.listStatements( new SimpleSelector(ResourceFactory.createResource(record.getUri()), ResourceFactory.createProperty("http://www.w3.org/2000/01/rdf-schema#label"), (RDFNode)null)); while(iter2.hasNext()){ Literal res = (Literal) iter2.next().getObject(); String tmpLang = res.getLanguage(); if( tmpLang.equals("en") ){ record.setLabel(res.getString()); return record; } } } return null; }
@Override public void prepare(Map conf, TopologyContext context, OutputCollector collector) { this.collector = collector; graph = Factory.createDefaultGraph(); startingPatternId = (String) conf.get("STARTING_PATTERN_ID"); Resource subject = null; if ((conf.get(startingPatternId + "_SUBJECT")) != null) { subject = ResourceFactory.createResource((String) conf.get(startingPatternId + "_SUBJECT")); } Property predicate = null; if ((conf.get(startingPatternId + "_PREDICATE")) != null) { predicate = ResourceFactory.createProperty((String) conf.get(startingPatternId + "_PREDICATE")); } Resource object = null; if ((conf.get(startingPatternId + "_OBJECT")) != null) { object = ResourceFactory.createProperty((String) conf.get(startingPatternId + "_OBJECT")); } startingPattern = new SimpleSelector(subject, predicate, object); }
private void wireTopology() { String fileName = "data/Earthquakes-Spain-2013.ttl"; String spoutId = "rdfStreamSpout"; String triple2graph = "triple2graph"; String graphCounter = "graphCounter"; String finalCounter = "finalCounter"; // TODO: Get the triple pattern from the configuration/context SimpleSelector startingPattern = new SimpleSelector(null, ResourceFactory.createProperty("http://www.w3.org/1999/02/22-rdf-syntax-ns#type"), ResourceFactory.createResource("http://purl.oclc.org/NET/ssnx/ssn#FeatureOfInterest")); builder.setSpout(spoutId, new RDFStreamSpout(fileName)); builder.setBolt(triple2graph, new Triple2GraphBolt(STARTING_PATTERN_ID)).shuffleGrouping(spoutId); builder.setBolt(graphCounter, new RollingCountBolt(15, 3)).fieldsGrouping(triple2graph, new Fields("name")); builder.setBolt(finalCounter, new AckerPrinterBolt()).globalGrouping(graphCounter); }
public boolean ignoreMessage(Message rcvMessage) { boolean hasToIgnore = true; try { Model model = listenerAdapter.handleIgnoreMessage(rcvMessage); StmtIterator stmtiterator = model .listStatements(new SimpleSelector(null, RDF.type, this.adapter.getResource())); while (stmtiterator.hasNext()) { hasToIgnore = false; break; } } catch (Exception excep) { } return hasToIgnore; }
protected DimensionSet getGroupDimSet(Resource groupResource) throws EmptyDimensionException, NotFoundException { Selector selector = new SimpleSelector(groupResource, Datatype.hasDimension, (RDFNode) null); StmtIterator iter = model.listStatements( selector ); DimensionSet dimSet = new DimensionSet(); if (iter.hasNext()) { while (iter.hasNext()) { Statement s = iter.nextStatement(); Resource dimensionResource = s.getObject().asResource(); Dimension dim = CarbonOntology.getInstance().getDimension(getId(dimensionResource)); dimSet.add(dim); if (groupResource.hasProperty(Datatype.hasHorizontalDimension, dimensionResource)) { dimSet.setDimensionOrientation(dim, Orientation.HORIZONTAL); } else if (groupResource.hasProperty(Datatype.hasVerticalDimension, dimensionResource)) { dimSet.setDimensionOrientation(dim, Orientation.VERTICAL); } } } return dimSet; }
protected TreeSet<Keyword> getElementKeywords(Resource elementResource) throws MalformedOntologyException { Selector selector = new SimpleSelector(elementResource, Datatype.hasTag, (RDFNode) null); StmtIterator iter = model.listStatements(selector); TreeSet<Keyword> keywords = new TreeSet<>(); if (iter.hasNext()) { while (iter.hasNext()) { Statement s = iter.nextStatement(); keywords.add(RepoFactory.getKeywordRepo().getKeyword(s.getObject().asResource())); } } else { throw new MalformedOntologyException("The element " + elementResource + " has no keyword"); } return keywords; }
public static boolean checkPrecedence(CompanyModel c) { StmtIterator stmtit = c.getModel().listStatements( new SimpleSelector(null, c.DEPTS, (RDFNode) null)); List<Resource> depts = new LinkedList<Resource>(); while (stmtit.hasNext()) { NodeIterator subDeptsIt = stmtit.next().getBag().iterator(); while (subDeptsIt.hasNext()) depts.add(subDeptsIt.next().asResource()); } for (Resource dept : depts) { // get manager's salary double managerSalary = dept.getProperty(c.MANAGER).getProperty( c.SALARY).getDouble(); NodeIterator employeeIt = dept.getProperty(c.EMPLOYEES).getBag() .iterator(); while (employeeIt.hasNext()) if (!(employeeIt.next().asResource().getProperty(c.SALARY) .getDouble() < managerSalary)) return false; } return true; }
private void addAnon(Model modelBase, Model modelResource, List<Statement> statementList) { for (int i = 0; i < statementList.size(); i++) { Statement statement = statementList.get(i); if (statement.getObject().isAnon()) { List<Statement> newStatementList = modelBase.listStatements(new SimpleSelector(statement.getObject().asResource(), null, null, null)).toList(); modelResource.add(newStatementList); addAnon(modelBase, modelResource, newStatementList); } } }
public static String getStringLiteral(Model model, Resource resource, Property property) { String result=null; StmtIterator listStatements = model.listStatements(new SimpleSelector(resource,property,(String)null)); if(listStatements.hasNext()) { result=((Statement)listStatements.next()).getLiteral().getString(); } logger.debug("String literal: "+result); return result; }
public static long getLongLiteral(Model model, Resource resource, Property property) { long result=0; StmtIterator listStatements = model.listStatements(new SimpleSelector(resource,property,(Long)null)); if(listStatements.hasNext()) { result=((Statement)listStatements.next()).getLiteral().getLong(); } logger.debug("Long literal: "+result); return result; }
public String mapEnResource(String h) { StmtIterator iter = enModel.listStatements( new SimpleSelector(ResourceFactory.createResource(h), ResourceFactory.createProperty("http://www.w3.org/2000/01/rdf-schema#subClassOf"), (RDFNode)null)); String ontoType = null; while(iter.hasNext()) { ontoType = iter.next().getObject().toString(); } return ontoType; }
public String mapDeResource(String h) { StmtIterator iter = deModel.listStatements( new SimpleSelector(ResourceFactory.createResource(h), ResourceFactory.createProperty("http://www.w3.org/2000/01/rdf-schema#subClassOf"), (RDFNode)null)); String ontoType = null; while(iter.hasNext()) { ontoType = iter.next().getObject().toString(); } return ontoType; }
public String mapNlResource(String h) { StmtIterator iter = nlModel.listStatements( new SimpleSelector(ResourceFactory.createResource(h), ResourceFactory.createProperty("http://www.w3.org/2000/01/rdf-schema#subClassOf"), (RDFNode)null)); String ontoType = null; while(iter.hasNext()) { ontoType = iter.next().getObject().toString(); } return ontoType; }
public OntoRecord getSuperclass(String resourceURI){ StmtIterator iter1 = model.listStatements( new SimpleSelector(ResourceFactory.createResource(resourceURI), RDFS.subClassOf, (RDFNode)null)); OntoRecord record = new OntoRecord(); while(iter1.hasNext()) { record.setUri(iter1.next().getObject().toString()); record.setLabel("Test label"); return record; } return null; }
public static void test2(){ StmtIterator iter1 = model.listStatements( new SimpleSelector(ResourceFactory.createResource("http://dbpedia.org/ontology/RouteOfTransportation"), ResourceFactory.createProperty("http://www.w3.org/2000/01/rdf-schema#label"), (RDFNode)null)); Literal res = (Literal)iter1.next().getObject(); if(res.getLanguage().equals("de")){ } }
@Override public void prepare(Map conf, TridentOperationContext context) { super.prepare(conf, context); graph = Factory.createDefaultGraph(); // TODO: Get the triple pattern from the configuration/context statementPattern = new SimpleSelector(null, ResourceFactory.createProperty("http://www.w3.org/1999/02/22-rdf-syntax-ns#type"), ResourceFactory.createResource("http://purl.oclc.org/NET/ssnx/ssn#FeatureOfInterest")); // System.out.println("PRINTED PATTERN: " + statementPattern.toString()); // Statement aux = ResourceFactory.createStatement(ResourceFactory.createResource("http://earthquakes.linkeddata.es/1185937"), // ResourceFactory.createProperty("http://www.w3.org/1999/02/22-rdf-syntax-ns#type"), // ResourceFactory.createResource("http://purl.oclc.org/NET/ssnx/ssn#FeatureOfInterest")); // System.out.println("PRINTED TRIPLE: " + aux); // System.out.println("MATCHING?? " + statementPattern.test(aux)); }
public void extendResourcesProperties(Model returnModel, Model resultModel){ StmtIterator statementIterator_createModel = this.createModel.listStatements(new SimpleSelector((Resource)null, Omn.hasResource, (Object)null)); while(statementIterator_createModel.hasNext()){ Statement requestedResourceStatement = statementIterator_createModel.nextStatement(); Resource requestedResource = requestedResourceStatement.getObject().asResource(); String resource_id = this.createModel.getRequiredProperty(requestedResource, Omn_lifecycle.hasID).getString(); Resource resourceProperties = this.createModel.getResource(requestedResource.getURI()); StmtIterator stmtIterator = resultModel.listStatements(new SimpleSelector((Resource)null, Omn_lifecycle.hasID, (Object) resource_id)); while(stmtIterator.hasNext()){ Statement statement = stmtIterator.nextStatement(); Resource createdResource = statement.getSubject(); StmtIterator stmtIter = resultModel.listStatements(new SimpleSelector(createdResource, (Property)null, (RDFNode)null)); while(stmtIter.hasNext()){ Statement createdStatement = stmtIter.nextStatement(); if(!resourceProperties.hasProperty(createdStatement.getPredicate(), createdStatement.getObject())){ Statement stmt = new StatementImpl(requestedResource, createdStatement.getPredicate(), createdStatement.getObject()); returnModel.add(stmt); } } } } }
@Override public Model handleCreate(Model createModel) { StringWriter sw = new StringWriter(); StmtIterator iter = createModel.listStatements(new SimpleSelector(null, RDF.type, epcClientResource)); while (iter.hasNext()) { Resource currentResource = iter.nextStatement().getSubject(); for (Property currentProperty : epcClientProperties) { StmtIterator iter2 = currentResource .listProperties(currentProperty); while (iter2.hasNext()) { if (currentProperty == username_property) { setNewUser((String) iter2.nextStatement().getObject() .asLiteral().getString()); } else if (currentProperty == sshKey_property) { setSshKey((String) iter2.nextStatement().getObject() .asLiteral().getString()); } } } } start(); Model newModel = ModelFactory.createDefaultModel(); Resource epcClientRes = newModel .createResource("http://fiteagleinternal#"); epcClientRes.addProperty(RDFS.label, "epcClient"); epcClientRes.addProperty( RDFS.comment, modelGeneral.createLiteral("a new user called " + getNewUser() + " is added to the directory. IP " + getIp())); return newModel; }
@Override public Model handleRelease(Model model) { StmtIterator iter = model.listStatements(new SimpleSelector(null, RDF.type, epcClientResource)); while (iter.hasNext()) { Resource currentResource = iter.nextStatement().getSubject(); for (Property currentProperty : epcClientProperties) { StmtIterator iter2 = currentResource .listProperties(currentProperty); while (iter2.hasNext()) { if (currentProperty == username_property) { setNewUser((String) iter2.nextStatement().getObject() .asLiteral().getString()); } } } } stop(); Model newModel = ModelFactory.createDefaultModel(); Resource epcClientRes = newModel .createResource("http://fiteagleinternal#"); epcClientRes.addProperty(RDFS.label, "epcClient"); epcClientRes.addProperty( RDFS.comment, modelGeneral.createLiteral("The user " + getNewUser() + " is deleted from the directory ")); return newModel; }
@Override public Model handleCreate(Model createModel) { StmtIterator iter = createModel.listStatements(new SimpleSelector(null, RDF.type, epcMeasurementServerResource)); while (iter.hasNext()) { Resource currentResource = iter.nextStatement().getSubject(); for (Property currentProperty : epcMeasurementServerProperties) { StmtIterator iter2 = currentResource .listProperties(currentProperty); while (iter2.hasNext()) { if (currentProperty == username_property) { setNewUser((String) iter2.nextStatement().getObject() .asLiteral().getString()); } else if (currentProperty == sshKey_property) { setSshKey((String) iter2.nextStatement().getObject() .asLiteral().getString()); } } } } start(); Model newModel = ModelFactory.createDefaultModel(); Resource epcMeasurementServerRes = newModel .createResource("http://fiteagleinternal#"); epcMeasurementServerRes.addProperty(RDFS.label, "epcMeasurementServer"); epcMeasurementServerRes.addProperty( RDFS.comment, modelGeneral.createLiteral("a new user called " + getNewUser() + " is added to the directory. IP " + getIp())); return newModel; }
@Override public Model handleRelease(Model model) { StmtIterator iter = model.listStatements(new SimpleSelector(null, RDF.type, epcMeasurementServerResource)); while (iter.hasNext()) { Resource currentResource = iter.nextStatement().getSubject(); for (Property currentProperty : epcMeasurementServerProperties) { StmtIterator iter2 = currentResource .listProperties(currentProperty); while (iter2.hasNext()) { if (currentProperty == username_property) { setNewUser((String) iter2.nextStatement().getObject() .asLiteral().getString()); } } } } stop(); Model newModel = ModelFactory.createDefaultModel(); Resource epcMeasurementServerRes = newModel .createResource("http://fiteagleinternal#"); epcMeasurementServerRes.addProperty(RDFS.label, "epcMeasurementServer"); epcMeasurementServerRes.addProperty( RDFS.comment, modelGeneral.createLiteral("The user " + getNewUser() + " is deleted from the directory ")); return newModel; }
private Model runInference(Model data, URL rules, int lineLength, int maxLineLength) throws IOException { Reasoner reasoner = new GenericRuleReasoner(Rule.rulesFromURL(rules.toString())); InfModel inf = ModelFactory.createInfModel(reasoner, data); // Break long literals (more than lineLength chars) using carriage returns Model remove = ModelFactory.createDefaultModel(); Model add = ModelFactory.createDefaultModel(); Selector sel = new SimpleSelector(null, null, (String)null); for(StmtIterator sIt = inf.listStatements(sel); sIt.hasNext();) { Statement s = sIt.nextStatement(); if(!s.getObject().isLiteral()) continue; String l = s.getString(); String lp = paginate(l, lineLength, maxLineLength); if (lp.length() != l.length()) { remove.add(s); add.add(s.getSubject(), s.getPredicate(), lp, s.getLanguage()); } } inf.remove(remove); inf.add(add); return inf; }
protected TreeSet<Keyword> getGroupCommonKeywords(Resource groupResource) { Selector selector = new SimpleSelector(groupResource, Datatype.hasCommonTag, (RDFNode) null); StmtIterator iter = model.listStatements( selector ); TreeSet<Keyword> keywords = new TreeSet<>(); if (iter.hasNext()) { while (iter.hasNext()) { Statement s = iter.nextStatement(); keywords.add(RepoFactory.getKeywordRepo().getKeyword(s.getObject().asResource())); } } return keywords; }
public HashSet<RelationType> getRelationTypes() { Selector selector = new SimpleSelector(null, RDF.type, Datatype.RelationType); StmtIterator iter = model.listStatements(selector); HashSet<RelationType> types = new HashSet<>(); if (iter.hasNext()) { while (iter.hasNext()) { Statement s = iter.nextStatement(); types.add(getRelationType(s.getSubject())); } } return types; }
public HashMap<String, Reference> getReferences() { Selector selector = new SimpleSelector(null, RDF.type, Datatype.Reference); StmtIterator iter = model.listStatements(selector); if (iter.hasNext()) { while (iter.hasNext()) { getReference(iter.next().getSubject()); } } return refCache; }
private Set<String> getTypesOfSubject(Resource subject) { Set<String> ret = new HashSet<String>(); StmtIterator stiter2 = model.listStatements(new SimpleSelector(subject, RDF.type, (RDFNode) null)); while (stiter2.hasNext()) { String uri = stiter2.next().getObject().asResource().getURI(); if (uri.startsWith("http://sw.opencyc.org")) { ret.add(uri); } } return ret; }
public static List<Statement> getBestMatchingStatements(OntModel ontology, StringMetric metric, String term) { StmtIterator iter = ontology.listStatements(new SimpleSelector(null, RDFS.label, (RDFNode) null)); double maxSimilarity = Double.MIN_VALUE; List<Statement> bestChoices = new LinkedList<Statement>(); while (iter.hasNext()) { Statement st = iter.next(); String objectStr = st.getObject().asLiteral().getString(); double similarity = metric.getSimilarity(term, objectStr); if (similarity <= 0) { continue; } if (similarity > maxSimilarity) { maxSimilarity = similarity; bestChoices.clear(); } else if (similarity == maxSimilarity) { bestChoices.add(st); } } return bestChoices; }
@Override public void update(Set<Statement> stmts) throws IllegalStatementException, InconsistentOntologyException, OntologyServerException { Set<Statement> stmtsToRemove = new HashSet<Statement>(); for (Statement stmt : stmts) { if(functionalProperties.contains(stmt.getPredicate())) { Selector selector = new SimpleSelector(stmt.getSubject(), stmt.getPredicate(), (RDFNode)null); StmtIterator stmtsToRemoveIt = null; try { stmtsToRemoveIt = onto.listStatements(selector); stmtsToRemove.addAll(stmtsToRemoveIt.toSet()); } catch (org.mindswap.pellet.exceptions.InconsistentOntologyException ioe) { Logger.log("The ontology is in an inconsistent state! I couldn't " + "update any statements.\n ", VerboseLevel.WARNING); throw new InconsistentOntologyException("The ontology is in an inconsistent state! I couldn't " + "update any statements."); } } } remove(stmtsToRemove); add(stmts, MemoryProfile.DEFAULT, false); }
public HashSet getYagoHypernyms(String entityTitle, String entityURI, String lang, String origin){ HashSet hypernymsList = new HashSet(); try { DBCursor cursor = db.getCollection("entities_yago").find(new BasicDBObject().append("uri", entityURI)); Model mainModel = ModelFactory.createDefaultModel(); while(cursor.hasNext()){ DBObject resObj = cursor.next(); BasicDBList types = (BasicDBList) resObj.get("types"); if(types != null){ for(int i=0; i<types.size(); i++){ DBObject type = (DBObject) types.get(i); // yago type (pointer in yago taxonomy) Hypernym hyp1 = new Hypernym(); hyp1.setEntityURL(entityURI); hyp1.setEntity(entityTitle); hyp1.setTypeURL(type.get("uri").toString()); hyp1.setType(type.get("label").toString()); hyp1.setOrigin(origin); hyp1.setAccuracy("-1.0"); hypernymsList.add(hyp1); mainModel.union(getHierarchyModel(type.get("uri").toString())); } } } StmtIterator iter = mainModel.listStatements( new SimpleSelector((Resource)null, null, (RDFNode)null)); while(iter.hasNext()){ Statement stm = iter.next(); Hypernym hyp = new Hypernym(); hyp.setEntityURL(entityURI); hyp.setEntity(entityTitle); hyp.setTypeURL(stm.getObject().toString()); hyp.setOrigin(origin); hyp.setAccuracy("-1.0"); String typeLabel = getYagoTypeLabel(stm.getObject().toString()); if(typeLabel != null){ hyp.setType(typeLabel); hypernymsList.add(hyp); } } }catch(Exception ex){ Logger.getLogger(YagoOntologyManager.class.getName()).log(Level.SEVERE, "Problem with the mongodb client.", ex); } return hypernymsList; }
public void test(){ StmtIterator iter = model.listStatements( new SimpleSelector(ResourceFactory.createResource("http://dbpedia.org/ontology/Stadium"), null, (RDFNode)null)); while (iter.hasNext()) { } }
@Override public void clear(PartialStatement partialStmt) throws OntologyServerException { Logger.log("Clearing statements matching ["+ partialStmt + "]\n"); Selector selector = new SimpleSelector(partialStmt.getSubject(), partialStmt.getPredicate(), partialStmt.getObject()); StmtIterator stmtsToRemove = null; Set<Statement> setToRemove = new HashSet<Statement>(); stmtsToRemove = onto.listStatements(selector); if (stmtsToRemove != null) setToRemove = stmtsToRemove.toSet(); remove(setToRemove); }
public void test3(){ StmtIterator iter = model.listStatements( new SimpleSelector(ResourceFactory.createResource("http://yago-knowledge.org/resource/wikicategory_Category:Music_competitions"), (Property) RDFS.subClassOf, (RDFNode)null)); while (iter.hasNext()) { } }