public Set<String> initializeCategories() { Model model = ModelFactory.createDefaultModel(); model.read("/home/zwicklbauer/HDTGeneration/skos_categories_en.nt"); StmtIterator it = model.listStatements(); Set<String> set = new HashSet<String>(); System.out.println("Los gehts"); while (it.hasNext()) { Statement s = it.next(); Resource r = s.getSubject(); Property p = s.getPredicate(); RDFNode n = s.getObject(); if (p.getURI().equalsIgnoreCase( "http://www.w3.org/2004/02/skos/core#broader") && n.isResource()) { Resource target = n.asResource(); if(!hasSubCategory(target.getURI())) set.add(target.getURI()); if(!hasSubCategory(r.getURI())) set.add(r.getURI()); } } return set; }
public static void main(String[] args) { // Set up the ModelD2RQ using a mapping file ModelD2RQ m = new ModelD2RQ("file:doc/example/mapping-iswc.ttl"); // Find anything with an rdf:type of iswc:InProceedings StmtIterator paperIt = m.listStatements(null, RDF.type, ISWC.InProceedings); // List found papers and print their titles while (paperIt.hasNext()) { Resource paper = paperIt.nextStatement().getSubject(); System.out.println("Paper: " + paper.getProperty(DC.title).getString()); // List authors of the paper and print their names StmtIterator authorIt = paper.listProperties(DC.creator); while (authorIt.hasNext()) { Resource author = authorIt.nextStatement().getResource(); System.out.println("Author: " + author.getProperty(FOAF.name).getString()); } System.out.println(); } m.close(); }
public Collection<Resource> getUndefinedResources(Model model) { Set<Resource> result = new HashSet<Resource>(); StmtIterator it = model.listStatements(); while (it.hasNext()) { Statement stmt = it.nextStatement(); if (stmt.getSubject().isURIResource() && stmt.getSubject().getURI().startsWith(namespace) && !resources.contains(stmt.getSubject())) { result.add(stmt.getSubject()); } if (stmt.getPredicate().equals(RDF.type)) continue; if (stmt.getObject().isURIResource() && stmt.getResource().getURI().startsWith(namespace) && !resources.contains(stmt.getResource())) { result.add(stmt.getResource()); } } return result; }
private void parseConfiguration() { Iterator<Individual> it = this.model.listIndividuals(D2RQ.Configuration); if (it.hasNext()) { Resource configResource = it.next(); Configuration configuration = new Configuration(configResource); StmtIterator stmts = configResource.listProperties(D2RQ.serveVocabulary); while (stmts.hasNext()) { configuration.setServeVocabulary(stmts.nextStatement().getBoolean()); } stmts = configResource.listProperties(D2RQ.useAllOptimizations); while (stmts.hasNext()) { configuration.setUseAllOptimizations(stmts.nextStatement().getBoolean()); } this.mapping.setConfiguration(configuration); if (it.hasNext()) throw new D2RQException("Only one configuration block is allowed"); } }
private void parseDownloadMap(DownloadMap dm, Resource r) { StmtIterator stmts; stmts = r.listProperties(D2RQ.dataStorage); while (stmts.hasNext()) { dm.setDatabase(mapping.database( stmts.nextStatement().getResource())); } stmts = r.listProperties(D2RQ.belongsToClassMap); while (stmts.hasNext()) { dm.setBelongsToClassMap(mapping.classMap( stmts.nextStatement().getResource())); } stmts = r.listProperties(D2RQ.contentDownloadColumn); while (stmts.hasNext()) { dm.setContentDownloadColumn(stmts.nextStatement().getString()); } stmts = r.listProperties(D2RQ.mediaType); while (stmts.hasNext()) { dm.setMediaType(stmts.nextStatement().getString()); } }
public List<RDFNode> getRDFNodes(Resource r, Property p, NodeType acceptableNodes) { List<RDFNode> result = new ArrayList<RDFNode>(); StmtIterator it = r.listProperties(p); while (it.hasNext()) { Statement stmt = it.next(); remainingTriples.remove(stmt); if (acceptableNodes.isTypeOf(stmt.getObject())) { result.add(stmt.getObject()); } else { if (acceptableNodes.coerce(stmt.getObject()) != null) { result.add(acceptableNodes.coerce(stmt.getObject())); } report.report(acceptableNodes.ifNot, r, p, stmt.getObject()); } } Collections.sort(result, RDFComparator.getRDFNodeComparator()); return result; }
/** * Runs a given Jena Query on a given instance and adds the inferred triples * to a given Model. * @param queryWrapper the wrapper of the CONSTRUCT query to execute * @param queryModel the query Model * @param newTriples the Model to write the triples to * @param instance the instance to run the inferences on * @param checkContains true to only call add if a Triple wasn't there yet * @return true if changes were done (only meaningful if checkContains == true) */ public static boolean runQueryOnInstance(QueryWrapper queryWrapper, Model queryModel, Model newTriples, Resource instance, boolean checkContains) { boolean changed = false; QueryExecution qexec = ARQFactory.get().createQueryExecution(queryWrapper.getQuery(), queryModel); QuerySolutionMap bindings = new QuerySolutionMap(); bindings.add(SPIN.THIS_VAR_NAME, instance); Map<String,RDFNode> initialBindings = queryWrapper.getTemplateBinding(); if(initialBindings != null) { for(String varName : initialBindings.keySet()) { RDFNode value = initialBindings.get(varName); bindings.add(varName, value); } } qexec.setInitialBinding(bindings); Model cm = qexec.execConstruct(); StmtIterator cit = cm.listStatements(); while(cit.hasNext()) { Statement s = cit.nextStatement(); if(!checkContains || !queryModel.contains(s)) { changed = true; newTriples.add(s); } } return changed; }
private Integer getNumberOfLinks(String nif) { Model model = ModelFactory.createDefaultModel(); model.read(new ByteArrayInputStream(nif.getBytes()), null, "TTL"); StmtIterator iter = model.listStatements(); Integer result = 0; while (iter.hasNext()) { Statement stm = iter.nextStatement(); if (NIF21Format.RDF_PROPERTY_IDENTREF.equals(stm.getPredicate().toString())) { result += 1; } } return result; }
static ArrayList<String> getAllEntityEvents (Dataset dataset, String entity) { ArrayList<String> events = new ArrayList<String>(); Iterator<String> it = dataset.listNames(); while (it.hasNext()) { String name = it.next(); if (!name.equals(instanceGraph) && (!name.equals(provenanceGraph))) { Model namedModel = dataset.getNamedModel(name); StmtIterator siter = namedModel.listStatements(); while (siter.hasNext()) { Statement s = siter.nextStatement(); String object = getObjectValue(s).toLowerCase(); if (object.indexOf(entity.toLowerCase()) > -1) { String subject = s.getSubject().getURI(); if (!events.contains(subject)) { events.add(subject); } } } } } return events; }
public ResourceDescriptor[] getAllNoteDescriptors(String outlineUri) { Model outlineModel; try { outlineModel = MindRaider.outlineCustodian.getModel(outlineUri); Property property = RDF.type; String literal = MindRaiderConstants.MR_OWL_CLASS_CONCEPT; StmtIterator i = outlineModel.listStatements((com.hp.hpl.jena.rdf.model.Resource)null,property,outlineModel.getResource(literal)); ArrayList<String> noteUris=new ArrayList<String>(); while(i.hasNext()) { Statement s=i.nextStatement(); noteUris.add(s.getSubject().getURI()); } return getDescriptorsForNoteUris(true, outlineModel, noteUris.toArray(new String[noteUris.size()])); } catch (Exception e) { logger.debug("Unable to get resource descriptors",e); // {{debug}} } return null; }
/** * Get URIs of all folders. * * @return the uri String array */ public String[] getFolderUris() { // TODO cope with discarded folders ArrayList<String> result = new ArrayList<String>(); StmtIterator i = labelsModel.getModel().listStatements(null, RDF.type, labelsModel.getResource(MindRaiderConstants.MR_OWL_CLASS_FOLDER)); while (i.hasNext()) { String folderUri = i.nextStatement().getSubject().getURI(); if (!LABEL_TRASH_URI.equals(folderUri)) { result.add(folderUri); } } if (result.size() == 0) { return null; } return (String[]) (result.toArray(new String[result.size()])); }
/** * Create a statement. * * @param subject * the resource * @param predicateUri * the predicate url * @param objectUri * the object url * @param literal * the literal flag * @return a Statement object */ public Statement createStatement(Resource subject, String predicateUri, String objectUri, boolean literal) { RDFNode objectResource; Property predicateResource = model.createProperty(predicateUri); if (literal) { objectResource = model.createLiteral(objectUri); } else { objectResource = model.createResource(objectUri); } subject.addProperty(predicateResource, objectResource); StmtIterator i = model.listStatements(subject, predicateResource, objectResource); if (i.hasNext()) { return i.nextStatement(); } JOptionPane.showMessageDialog(MindRaider.mainJFrame, "Unable to fetch statement.", "Error", JOptionPane.ERROR_MESSAGE); return null; }
<T> List<T> getPropertyValues(final Resource resource, final String property, final Function<Statement,Optional<T>> filter) { Preconditions.checkState(this.model!=null); final StmtIterator stmts = resource.listProperties(this.model.createProperty(property)); try { final List<T> values=Lists.newArrayList(); while(stmts.hasNext()) { final Statement st=stmts.nextStatement(); final Optional<T> result = filter.apply(st); if(result.isPresent()) { values.add(result.get()); } } return values; } finally { stmts.close(); } }
@Override public Void execute() throws RuntimeException { final StmtIterator iterator= this.model. listStatements( null, this.model.createProperty("http://www.w3.org/1999/02/22-rdf-syntax-ns#type"), this.model.createProperty(this.propertyName)); try { while(iterator.hasNext()) { handler(iterator.next()); } return null; } finally { iterator.close(); } }
public boolean isDisambiguationResource(String uri) { if(!linksLoaded){ System.out.println(Settings.EN_DBPEDIA_DISAMBIGUATION_DATASET); System.out.println(Settings.DE_DBPEDIA_DISAMBIGUATION_DATASET); System.out.println(Settings.NL_DBPEDIA_DISAMBIGUATION_DATASET); InputStream in1 = FileManager.get().open( Settings.EN_DBPEDIA_DISAMBIGUATION_DATASET ); InputStream in2 = FileManager.get().open( Settings.DE_DBPEDIA_DISAMBIGUATION_DATASET ); InputStream in3 = FileManager.get().open( Settings.NL_DBPEDIA_DISAMBIGUATION_DATASET ); model.read(in1, null, "N-TRIPLES"); System.out.println("Loaded English disambiguation dataset."); model.read(in2, null, "N-TRIPLES"); System.out.println("Loaded German disambiguation dataset."); model.read(in3, null, "N-TRIPLES"); System.out.println("Loaded Dutch disambiguation dataset."); linksLoaded = true; } StmtIterator iter = model.listStatements( new SimpleSelector( ResourceFactory.createResource(uri), ResourceFactory.createProperty("http://dbpedia.org/ontology/wikiPageDisambiguates"), (RDFNode)null)); return iter.hasNext(); }
public Model getHierarchyModel(String uri){ // returns all subclasses for given URI Model m = ModelFactory.createDefaultModel(); OntoRecord initRecord = new OntoRecord(); initRecord.setUri(uri); while(initRecord !=null){ initRecord = getSuperclass(initRecord.getUri()); if(initRecord != null){ StmtIterator iter1 = model.listStatements( new SimpleSelector(ResourceFactory.createResource(uri), RDFS.subClassOf, (RDFNode)null)); m.add(iter1); } } return m; }
public OntoRecord getSubclass(String resourceURI, String lang){ StmtIterator iter1 = model.listStatements( new SimpleSelector(ResourceFactory.createResource(resourceURI), ResourceFactory.createProperty("http://www.w3.org/2000/01/rdf-schema#subClassOf"), (RDFNode)null)); OntoRecord record = new OntoRecord(); StmtIterator iter2; while(iter1.hasNext()) { record.setUri(iter1.next().getObject().toString()); iter2 = model.listStatements( new SimpleSelector(ResourceFactory.createResource(record.getUri()), ResourceFactory.createProperty("http://www.w3.org/2000/01/rdf-schema#label"), (RDFNode)null)); while(iter2.hasNext()){ Literal res = (Literal) iter2.next().getObject(); String tmpLang = res.getLanguage(); if( tmpLang.equals("en") ){ record.setLabel(res.getString()); return record; } } } return null; }
private void addSkosBroaderToGraph() { Model m = ModelFactory.createDefaultModel(); m.read(SKOSBROADER); StmtIterator it = m.listStatements(); while (it.hasNext()) { Statement s = it.next(); Resource subject = s.getSubject(); Property pra = s.getPredicate(); RDFNode object = s.getObject(); if (object.isResource()) { Resource obj = object.asResource(); if (pra.isResource() && obj.getURI().startsWith( "http://dbpedia.org/resource/")) { if (!subject.getURI().equalsIgnoreCase(obj.getURI())) { graph.addVertex(subject.getURI()); graph.addVertex(obj.getURI()); graph.addEdge(subject.getURI(), obj.getURI()); } } } } }
public static void main(String[] args) { HDT hdt = null; try { hdt = HDTManager.mapIndexedHDT(TYPES, null); } catch (IOException e) { e.printStackTrace(); } HDTGraph graph = new HDTGraph(hdt); Model m = ModelFactory.createModelForGraph(graph); StmtIterator iter = m.listStatements(); HashMap<String, Integer> hash = new HashMap<String, Integer>(); int number = 0; while (iter.hasNext()) { if (number % 50000 == 0) { System.out.println("Processed Entries: " + number); } Statement stmt = iter.next(); RDFNode object = stmt.getObject(); String s = null; s = object.asResource().getURI(); hash.put(s, 0); number++; } System.out.println("Anzahl an Typen: " +hash.size()); }
public void sportsTeamsSurfaceForms() { Model m = ModelFactory.createDefaultModel(); m.read(INSTANCEMAPPINGTYPES_NT); StmtIterator it = m.listStatements(); while (it.hasNext()) { Statement s = it.next(); Resource subject = s.getSubject(); RDFNode object = s.getObject(); if (object.isResource()) { Resource obj = object.asResource(); if (obj.getURI().equalsIgnoreCase("http://dbpedia.org/ontology/SportsTeam")) { teams.add(subject.getURI()); } } } }
/** * Look up <code>uri</code> in the ontology and return a list of parent * concepts (URIs). Synonyms are not considered. The list contains no * duplicates. Never returns <code>null</code>. * * @param term * term to be looked up * @return a list of parent concepts URIs */ // TODO add all synonyms of the parents to the result public List<String> getParents(String uri) { Resource resource = model.getResource(uri); if (resource == null) return Collections.emptyList(); List<String> result = new ArrayList<String>(); StmtIterator parent = resource.listProperties(RDFS.subClassOf); while(parent.hasNext()) { RDFNode child = parent.nextStatement().getObject(); if (child.isResource() && !child.isAnon() /*&& !((Resource)child).hasLiteral(Jura.invisible, true)*/) { result.add(((Resource)child).getURI()); } } return result; }
protected static Set<Resource> findUnlinkedDolceClasses(Model classesModel, Model dolceClassModel) { Set<Resource> unlinkedClasses = new HashSet<Resource>(); StmtIterator stmtIterator = dolceClassModel.listStatements(null, RDF.type, RDFS.Class); Statement statement; Resource resource; while (stmtIterator.hasNext()) { statement = stmtIterator.next(); resource = statement.getSubject(); // If this class has no equivalent class and no sub classes if ((!classesModel.contains(resource, OWL.equivalentClass)) && (!classesModel.contains(null, OWL.equivalentClass, resource)) && (!classesModel.contains(null, RDFS.subClassOf, resource))) { unlinkedClasses.add(resource); } } // We are searching the higher classes on the complete model to make // sure, that higher DOLCE classes are only added to the list of // unlinked classes, if they do not have a YAGO sub class // FIXME If a DOLCE class has an equal YAGO class and both have no // children, the DOLCE class will be added to the list of unlinked // classes. (very unlikely, but possible) addHigherIncludedDolceClass(unlinkedClasses, classesModel, unlinkedClasses); return unlinkedClasses; }
private static void parseN3(GrabMappingsHandler handler, String infileurl) { Model model = ModelFactory.createDefaultModel(); model.read(infileurl, "N3"); AResourceImpl sub = new AResourceImpl(); AResourceImpl pred = new AResourceImpl(); AResourceImpl objres = new AResourceImpl(); ALiteralImpl objlit = new ALiteralImpl(); StmtIterator it = model.listStatements(); while (it.hasNext()) { Statement stmt = it.nextStatement(); RDFNode object = stmt.getObject(); sub.setResource(stmt.getSubject()); pred.setResource(stmt.getPredicate()); if (object instanceof Literal) { objlit.setLiteral((Literal) object); handler.statement(sub, pred, objlit); } else { objres.setResource((Resource) object); handler.statement(sub, pred, objres); } } }
private void writeResource(Resource rsrc, PrintStream out) { Resource type = getType(rsrc); if ( (type == null) || !_classes.contains(type.getURI())) { return; } String name = getQName(type); out.println(SEPARATOR + "<" + name + " rdf:about=\"" + getURI(rsrc) + "\">"); StmtIterator iter = rsrc.listProperties(); while ( iter.hasNext() ) { Statement stmt = iter.next(); if ( isType(stmt, type) ) { continue; } writeStatement(stmt, out); } out.println(SEPARATOR + "</" + name + ">"); }
private void fetchLabels(Model m) { Map<String,String> map = new HashMap(); Property p = m.getProperty(SKOS_PREF_LABEL); ResIterator rIter = m.listResourcesWithProperty(m.getProperty(RDF_TYPE)); while ( rIter.hasNext() ) { Resource r = rIter.next(); fetchAlternatives(map, r); StmtIterator sIter = r.listProperties(p); while ( sIter.hasNext() ) { Statement stmt = sIter.next(); put(stmt.getSubject(), getKey(stmt.getString(), map)); } map.clear(); } }
private List getRequest(Resource pcho) { List l = new ArrayList(); StmtIterator iter = pcho.listProperties(); while ( iter.hasNext() ) { Statement stmt = iter.nextStatement(); RDFNode node = stmt.getObject(); String uri = stmt.getPredicate().getURI(); if ( node.isResource() || !_fields.containsKey(uri) ) { continue; } String prop = getQName(stmt.getPredicate()); String value = node.asLiteral().getString(); String[] vocs = _fields.get(uri); for ( String key : normalizeInternal(value) ) { Map m = new HashMap(3); m.put("originalField", prop + ";" + value); m.put("value", key); m.put("vocabularies", vocs); l.add(m); } } return l; }
public static void fixLanguage(StmtIterator iter, String sLang) { if ( (sLang == null) || sLang.trim().isEmpty() ) { return; } List<Statement> list = iter.toList(); for ( Statement stmt : list ) { RDFNode n = stmt.getObject(); if ( !n.isLiteral() ) { continue; } Literal l = n.asLiteral(); String sL = l.getLanguage(); if ( (sL != null) && !sL.trim().isEmpty() ) { continue; } stmt.changeObject(l.getString(), sLang); } }
public static void importSome(Resource src, Property prop, Property... pSel) { Collection<Resource> ra = new HashSet<Resource>(); StmtIterator iter = src.listProperties(prop); while ( iter.hasNext() ) { ra.add((Resource)iter.next().getObject()); } Model m = src.getModel(); for ( Resource r : ra ) { loadModel(m, r.getURI()); transferProperty(r, src, pSel); } }
public static void mergeResources(Resource src, Resource trg) { Property sameAs = src.getModel().getProperty(URI_SAMEAS); StmtIterator iter = src.listProperties(); while ( iter.hasNext() ) { Statement stmt = iter.nextStatement(); Property p = stmt.getPredicate(); if ( p.equals(sameAs) ) { continue; } trg.addProperty(p, stmt.getObject()); iter.remove(); } }
private void enrichImpl(StmtIterator iter, Collection<Resource> toRemove, CSVWriter printer) { for ( Statement stmt : iter.toList()) { RDFNode node = stmt.getObject(); if ( !node.isResource() ) { continue; } Resource target = node.asResource(); if ( !isEntity(target) ) { continue; } String label = getLabel(target); if ( label == null ) { continue; } printEnrichment(stmt, label, printer); stmt.changeObject(label); toRemove.add(target); } }
public String getSourceValue() { String rsrc = getSourceURL(); String val = _result.getValue(); Property prop = _source.getProperty(getSourcePropertyURI()); StmtIterator iter = _source.getResource(rsrc).listProperties(prop); while ( iter.hasNext() ) { Statement stmt = iter.next(); if ( !stmt.getObject().isLiteral() ) { continue; } String str = stmt.getString(); if ( str.contains(val) ) { return str; } } return ""; }
/** * Method that will parse all distributions from a description file * @return list of distribution objects * @throws LODVaderNoDatasetFoundException * @throws LODVaderFormatNotAcceptedException * @throws LODVaderLODGeneralException */ public List<DistributionDB> parseDistributions() throws LODVaderNoDatasetFoundException, LODVaderFormatNotAcceptedException, LODVaderLODGeneralException { // select dataset StmtIterator datasetsStmt = getFirstStmt(); if (datasetsStmt.hasNext()) iterateSubsetsNew(datasetsStmt, 0, 0,null, true); else throw new LODVaderNoDatasetFoundException( "We could not parse any datasets."); return distributionsLinks; }
public static boolean checkPrecedence(CompanyModel c) { StmtIterator stmtit = c.getModel().listStatements( new SimpleSelector(null, c.DEPTS, (RDFNode) null)); List<Resource> depts = new LinkedList<Resource>(); while (stmtit.hasNext()) { NodeIterator subDeptsIt = stmtit.next().getBag().iterator(); while (subDeptsIt.hasNext()) depts.add(subDeptsIt.next().asResource()); } for (Resource dept : depts) { // get manager's salary double managerSalary = dept.getProperty(c.MANAGER).getProperty( c.SALARY).getDouble(); NodeIterator employeeIt = dept.getProperty(c.EMPLOYEES).getBag() .iterator(); while (employeeIt.hasNext()) if (!(employeeIt.next().asResource().getProperty(c.SALARY) .getDouble() < managerSalary)) return false; } return true; }
/** * Retrieve the dhus system supported items for file scanning processing. * Is considered supported all classes having * <code>http://www.gael.fr/dhus#metadataExtractor</code> property * connection. * @return the list of supported class names. */ public static synchronized String[] getDefaultCortexSupport () { DrbCortexModel model; try { model = DrbCortexModel.getDefaultModel (); } catch (IOException e) { throw new UnsupportedOperationException ( "Drb cortex not properly initialized."); } ExtendedIterator it=model.getCortexModel ().getOntModel ().listClasses (); List<String>list = new ArrayList<String> (); while (it.hasNext ()) { OntClass cl = (OntClass)it.next (); OntProperty metadata_extractor_p = cl.getOntModel().getOntProperty( "http://www.gael.fr/dhus#support"); StmtIterator properties = cl.listProperties (metadata_extractor_p); while (properties.hasNext ()) { Statement stmt = properties.nextStatement (); LOGGER.debug ("Scanner Support Added for " + stmt.getSubject ().toString ()); list.add (stmt.getSubject ().toString ()); } } return list.toArray (new String[list.size ()]); }