public Set<String> initializeCategories() { Model model = ModelFactory.createDefaultModel(); model.read("/home/zwicklbauer/HDTGeneration/skos_categories_en.nt"); StmtIterator it = model.listStatements(); Set<String> set = new HashSet<String>(); System.out.println("Los gehts"); while (it.hasNext()) { Statement s = it.next(); Resource r = s.getSubject(); Property p = s.getPredicate(); RDFNode n = s.getObject(); if (p.getURI().equalsIgnoreCase( "http://www.w3.org/2004/02/skos/core#broader") && n.isResource()) { Resource target = n.asResource(); if(!hasSubCategory(target.getURI())) set.add(target.getURI()); if(!hasSubCategory(r.getURI())) set.add(r.getURI()); } } return set; }
private List<Statement> expandSubClasses(Model model){ List<Statement> stmts = new ArrayList<Statement>(); String sparql = "PREFIX rdfs: <" + RDFS.getURI() + ">" + "SELECT DISTINCT ?class ?synonym " + "WHERE { " + "?class rdfs:subClassOf+ ?subClass . " + "?subClass <" + synonym + "> ?synonym" + "}"; Query query = QueryFactory.create(sparql, Syntax.syntaxARQ); QueryExecution queryExecution = QueryExecutionFactory.create(query, model); ResultSet resultSet = queryExecution.execSelect(); resultSet.forEachRemaining(querySolution -> { stmts.add(new StatementImpl(querySolution.getResource("class"), synonym, querySolution.getLiteral("synonym"))); }); return stmts; }
private List<Statement> expandSubProperties(Model model){ List<Statement> stmts = new ArrayList<Statement>(); String sparql = "PREFIX rdfs: <" + RDFS.getURI() + ">" + "SELECT DISTINCT ?property ?synonym " + "WHERE { " + "?property rdfs:subPropertyOf+ ?subProperty . " + "?subProperty <" + synonym + "> ?synonym" + "}"; Query query = QueryFactory.create(sparql, Syntax.syntaxARQ); QueryExecution queryExecution = QueryExecutionFactory.create(query, model); ResultSet resultSet = queryExecution.execSelect(); resultSet.forEachRemaining(querySolution -> { stmts.add(new StatementImpl(querySolution.getResource("property"), synonym, querySolution.getLiteral("synonym"))); }); return stmts; }
public Collection<Resource> getUndefinedResources(Model model) { Set<Resource> result = new HashSet<Resource>(); StmtIterator it = model.listStatements(); while (it.hasNext()) { Statement stmt = it.nextStatement(); if (stmt.getSubject().isURIResource() && stmt.getSubject().getURI().startsWith(namespace) && !resources.contains(stmt.getSubject())) { result.add(stmt.getSubject()); } if (stmt.getPredicate().equals(RDF.type)) continue; if (stmt.getObject().isURIResource() && stmt.getResource().getURI().startsWith(namespace) && !resources.contains(stmt.getResource())) { result.add(stmt.getResource()); } } return result; }
public Object open(Assembler ignore, Resource description, Mode ignore2) { if (!description.hasProperty(D2RQ.mappingFile)) { throw new D2RQException("Error in assembler specification " + description + ": missing property d2rq:mappingFile"); } if (!description.getProperty(D2RQ.mappingFile).getObject().isURIResource()) { throw new D2RQException("Error in assembler specification " + description + ": value of d2rq:mappingFile must be a URI"); } String mappingFileURI = ((Resource) description.getProperty(D2RQ.mappingFile).getObject()).getURI(); String resourceBaseURI = null; Statement stmt = description.getProperty(D2RQ.resourceBaseURI); if (stmt != null) { if (!stmt.getObject().isURIResource()) { throw new D2RQException("Error in assembler specification " + description + ": value of d2rq:resourceBaseURI must be a URI"); } resourceBaseURI = ((Resource) stmt.getObject()).getURI(); } return new ModelD2RQ(mappingFileURI, resourceBaseURI); }
public List<RDFNode> getRDFNodes(Resource r, Property p, NodeType acceptableNodes) { List<RDFNode> result = new ArrayList<RDFNode>(); StmtIterator it = r.listProperties(p); while (it.hasNext()) { Statement stmt = it.next(); remainingTriples.remove(stmt); if (acceptableNodes.isTypeOf(stmt.getObject())) { result.add(stmt.getObject()); } else { if (acceptableNodes.coerce(stmt.getObject()) != null) { result.add(acceptableNodes.coerce(stmt.getObject())); } report.report(acceptableNodes.ifNot, r, p, stmt.getObject()); } } Collections.sort(result, RDFComparator.getRDFNodeComparator()); return result; }
public Object open(Assembler ignore, Resource description, Mode ignore2) { if (!description.hasProperty(D2RQ.mappingFile)) { throw new D2RQException("Error in assembler specification " + description + ": missing property d2rq:mappingFile"); } if (!description.getProperty(D2RQ.mappingFile).getObject().isURIResource()) { throw new D2RQException("Error in assembler specification " + description + ": value of d2rq:mappingFile must be a URI"); } String mappingFileURI = ((Resource) description.getProperty(D2RQ.mappingFile).getObject()).getURI(); String resourceBaseURI = null; Statement stmt = description.getProperty(D2RQ.resourceBaseURI); if (stmt != null) { if (!stmt.getObject().isURIResource()) { throw new D2RQException("Error in assembler specification " + description + ": value of d2rq:resourceBaseURI must be a URI"); } resourceBaseURI = ((Resource) stmt.getObject()).getURI(); } return new ModelD2RQ(mappingFileURI, null, resourceBaseURI); }
/** * Runs a given Jena Query on a given instance and adds the inferred triples * to a given Model. * @param queryWrapper the wrapper of the CONSTRUCT query to execute * @param queryModel the query Model * @param newTriples the Model to write the triples to * @param instance the instance to run the inferences on * @param checkContains true to only call add if a Triple wasn't there yet * @return true if changes were done (only meaningful if checkContains == true) */ public static boolean runQueryOnInstance(QueryWrapper queryWrapper, Model queryModel, Model newTriples, Resource instance, boolean checkContains) { boolean changed = false; QueryExecution qexec = ARQFactory.get().createQueryExecution(queryWrapper.getQuery(), queryModel); QuerySolutionMap bindings = new QuerySolutionMap(); bindings.add(SPIN.THIS_VAR_NAME, instance); Map<String,RDFNode> initialBindings = queryWrapper.getTemplateBinding(); if(initialBindings != null) { for(String varName : initialBindings.keySet()) { RDFNode value = initialBindings.get(varName); bindings.add(varName, value); } } qexec.setInitialBinding(bindings); Model cm = qexec.execConstruct(); StmtIterator cit = cm.listStatements(); while(cit.hasNext()) { Statement s = cit.nextStatement(); if(!checkContains || !queryModel.contains(s)) { changed = true; newTriples.add(s); } } return changed; }
public static void main(String[] args) { Model m = ModelFactory.createDefaultModel(); m.read(args[0]); StmtIterator iter = m.listStatements(); HashSet<String> hash = new HashSet<String>(); while (iter.hasNext()) { Statement stmt = iter.next(); RDFNode node = stmt.getObject(); String uri = node.asResource().getURI(); hash.add(uri); } File output = new File(args[1]); try { PrintWriter writer = new PrintWriter(output); for(String s : hash) { writer.println(s); } writer.close(); } catch (FileNotFoundException e) { e.printStackTrace(); } }
static JSONObject getLabelsJSONObjectFromInstanceStatement (ArrayList<Statement> statements) throws JSONException { JSONObject jsonClassesObject = new JSONObject(); ArrayList<String> coveredValues = new ArrayList<String>(); for (int i = 0; i < statements.size(); i++) { Statement statement = statements.get(i); String predicate = statement.getPredicate().getURI(); if (predicate.endsWith("#label")) { String object = ""; if (statement.getObject().isLiteral()) { object = statement.getObject().asLiteral().toString(); } else if (statement.getObject().isURIResource()) { object = statement.getObject().asResource().getURI(); } String [] values = object.split(","); for (int j = 0; j < values.length; j++) { String value = values[j]; if (!coveredValues.contains(value)) { coveredValues.add(value); jsonClassesObject.append("labels", value); } } } } return jsonClassesObject; }
private void populateVersionInfo(final ModuleHelper helper, final Result<Module> report, final Module module, final Resource resource) { final Function<Statement, Optional<String>> stringFilter=new StringFilter(report); final List<String> versionInfos = helper.getPropertyValues(resource, owl("versionInfo"),stringFilter); if(!module.isVersion()) { if(!versionInfos.isEmpty()) { report.warn("Only ontology versions can have associated version info (%s)",Joiner.on(", ").join(versionInfos)); } } else { if(versionInfos.isEmpty()) { report.warn("No version info defined"); } else if(versionInfos.size()>1) { report.warn("Multiple version info defined (%s)",Joiner.on(", ").join(versionInfos)); } else { module.withVersionInfo(versionInfos.get(0)); } } }
private void addRowToMODEL(List<Statement> sa, String key, String puri) { for (Statement s : sa) { if (MODEL.contains(s)) { continue; } // add to existing resource with same key if exists if (s.getPredicate().getLocalName().equals(key)) { ResIterator it = MODEL.listResourcesWithProperty(s.getPredicate(), s.getObject()); if (it.hasNext()) { // assume all members are equal Resource rsc = it.nextResource(); // get parent Property p = ResourceFactory.createProperty(genOURI(), puri); Statement st = ResourceFactory.createStatement(rsc, p, s.getSubject()); MODEL.add(st); continue; } } MODEL.add(s); } }
private void fetchLabels(Model m) { Map<String,String> map = new HashMap(); Property p = m.getProperty(SKOS_PREF_LABEL); ResIterator rIter = m.listResourcesWithProperty(m.getProperty(RDF_TYPE)); while ( rIter.hasNext() ) { Resource r = rIter.next(); fetchAlternatives(map, r); StmtIterator sIter = r.listProperties(p); while ( sIter.hasNext() ) { Statement stmt = sIter.next(); put(stmt.getSubject(), getKey(stmt.getString(), map)); } map.clear(); } }
private void writeStatement(Statement stmt, PrintStream out) { String name = getQName(stmt.getPredicate()); Map<String,String> attrs = null; String value = null; RDFNode node = stmt.getObject(); if ( node.isLiteral() ) { Literal l = node.asLiteral(); value = l.getString(); String lang = l.getLanguage(); if ( !lang.isEmpty() ) { attrs = Collections.singletonMap("xml:lang", lang); } String datatype = l.getDatatypeURI(); if ( datatype != null ) { attrs = Collections.singletonMap("rdf:datatype", datatype); } } else { attrs = Collections.singletonMap("rdf:resource", getURI(node.asResource())); } writeProperty(name, attrs, value, out); }
protected void copyAllowedProperties(Model readModel, Model model, Set<Resource> classes, Set<Property> allowedProperties) { StmtIterator stmtIterator = readModel.listStatements(); Statement s; Literal label; while (stmtIterator.hasNext()) { s = stmtIterator.next(); if (classes.contains(s.getSubject()) && allowedProperties.contains(s.getPredicate()) && (!s.getObject().isAnon())) { if (s.getPredicate().equals(RDFS.label)) { label = s.getObject().asLiteral(); if (label.getLanguage().equals("en")) { model.add(s.getSubject(), RDFS.label, model.createLiteral(label.getString().toLowerCase(), "en")); } } else { model.add(s); } } } }
private void writeResource(Resource rsrc, PrintStream out) { Resource type = getType(rsrc); if ( (type == null) || !_classes.contains(type.getURI())) { return; } String name = getQName(type); out.println(SEPARATOR + "<" + name + " rdf:about=\"" + getURI(rsrc) + "\">"); StmtIterator iter = rsrc.listProperties(); while ( iter.hasNext() ) { Statement stmt = iter.next(); if ( isType(stmt, type) ) { continue; } writeStatement(stmt, out); } out.println(SEPARATOR + "</" + name + ">"); }
private Content readContent(Model modelTpl, String hostAbout, String about, String inputFormat, String outputFormat, boolean isImport) throws UnsupportedEncodingException, ConfigurationException, URISyntaxException { Iterator<Statement> iterator = modelTpl.getResource(hostAbout).listProperties(RDF.type); Content result = null; while (iterator.hasNext()) { Resource resourceType = iterator.next().getResource(); if (resourceType.toString().equalsIgnoreCase(LDPVoc.Container.stringValue())) { result = new Content(about, BygleSystemUtils.RESOURCE_TYPE_RDF_CONTAINER, resourceType, modelTpl, inputFormat, outputFormat != null ? outputFormat : BygleSystemUtils.defaultOutputFormat); break; } else if (resourceType.toString().equalsIgnoreCase(LDPVoc.BasicContainer.stringValue())) { result = new Content(about, BygleSystemUtils.RESOURCE_TYPE_RDF_BASIC_CONTAINER, resourceType, modelTpl, inputFormat, outputFormat != null ? outputFormat : BygleSystemUtils.defaultOutputFormat); break; } else if (resourceType.toString().equalsIgnoreCase(LDPVoc.DirectContainer.stringValue())) { result = new Content(about, BygleSystemUtils.RESOURCE_TYPE_RDF_DIRECT_CONTAINER, resourceType, modelTpl, inputFormat, outputFormat != null ? outputFormat : BygleSystemUtils.defaultOutputFormat); break; } else if (resourceType.toString().equalsIgnoreCase(LDPVoc.IndirectContainer.stringValue())) { result = new Content(about, BygleSystemUtils.RESOURCE_TYPE_RDF_INDIRECT_CONTAINER, resourceType, modelTpl, inputFormat, outputFormat != null ? outputFormat : BygleSystemUtils.defaultOutputFormat); break; } else if (resourceType.toString().equalsIgnoreCase(LDPVoc.RDFSource.stringValue())) { result = new Content(about, BygleSystemUtils.RESOURCE_TYPE_RDF_RESOURCE, resourceType, modelTpl, inputFormat, outputFormat != null ? outputFormat : BygleSystemUtils.defaultOutputFormat); } else if (isImport) { result = new Content(about, BygleSystemUtils.RESOURCE_TYPE_RDF_RESOURCE, resourceType, modelTpl, inputFormat, outputFormat != null ? outputFormat : BygleSystemUtils.defaultOutputFormat); } } return result; }
static ArrayList<String> getAllEntityEvents (Dataset dataset, String entity) { ArrayList<String> events = new ArrayList<String>(); Iterator<String> it = dataset.listNames(); while (it.hasNext()) { String name = it.next(); if (!name.equals(instanceGraph) && (!name.equals(provenanceGraph))) { Model namedModel = dataset.getNamedModel(name); StmtIterator siter = namedModel.listStatements(); while (siter.hasNext()) { Statement s = siter.nextStatement(); String object = getObjectValue(s).toLowerCase(); if (object.indexOf(entity.toLowerCase()) > -1) { String subject = s.getSubject().getURI(); if (!events.contains(subject)) { events.add(subject); } } } } } return events; }
static boolean isEventInstance (Statement statement) { String predicate = statement.getPredicate().getURI(); if (predicate.endsWith("#type")) { String object = ""; if (statement.getObject().isLiteral()) { object = statement.getObject().asLiteral().toString(); } else if (statement.getObject().isURIResource()) { object = statement.getObject().asResource().getURI(); } String[] values = object.split(","); for (int j = 0; j < values.length; j++) { String value = values[j]; String property = getNameSpaceString(value); // System.out.println("value = " + value); // System.out.println("property = " + property); if (value.endsWith("Event") && property.equalsIgnoreCase("sem")) { return true; } } } return false; }
public static void fixLanguage(StmtIterator iter, String sLang) { if ( (sLang == null) || sLang.trim().isEmpty() ) { return; } List<Statement> list = iter.toList(); for ( Statement stmt : list ) { RDFNode n = stmt.getObject(); if ( !n.isLiteral() ) { continue; } Literal l = n.asLiteral(); String sL = l.getLanguage(); if ( (sL != null) && !sL.trim().isEmpty() ) { continue; } stmt.changeObject(l.getString(), sLang); } }
public static void mergeResources(Resource src, Resource trg) { Property sameAs = src.getModel().getProperty(URI_SAMEAS); StmtIterator iter = src.listProperties(); while ( iter.hasNext() ) { Statement stmt = iter.nextStatement(); Property p = stmt.getPredicate(); if ( p.equals(sameAs) ) { continue; } trg.addProperty(p, stmt.getObject()); iter.remove(); } }
/** * Retrieve the dhus system supported items for file scanning processing. * Is considered supported all classes having * <code>http://www.gael.fr/dhus#metadataExtractor</code> property * connection. * @return the list of supported class names. */ public static synchronized String[] getDefaultCortexSupport () { DrbCortexModel model; try { model = DrbCortexModel.getDefaultModel (); } catch (IOException e) { throw new UnsupportedOperationException ( "Drb cortex not properly initialized."); } ExtendedIterator it=model.getCortexModel ().getOntModel ().listClasses (); List<String>list = new ArrayList<String> (); while (it.hasNext ()) { OntClass cl = (OntClass)it.next (); OntProperty metadata_extractor_p = cl.getOntModel().getOntProperty( "http://www.gael.fr/dhus#support"); StmtIterator properties = cl.listProperties (metadata_extractor_p); while (properties.hasNext ()) { Statement stmt = properties.nextStatement (); LOGGER.debug ("Scanner Support Added for " + stmt.getSubject ().toString ()); list.add (stmt.getSubject ().toString ()); } } return list.toArray (new String[list.size ()]); }
private List<Statement> getUsage(Property property, Model model){ List<Statement> stmts = new ArrayList<Statement>(); String sparql = "PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> " + "PREFIX owl: <http://www.w3.org/2002/07/owl#> " + "SELECT DISTINCT ?concept " + "WHERE{" + " {<" + property.getURI() + "> rdfs:domain ?concept} " + " UNION " + " { " + " ?concept rdfs:subClassOf|owl:equivalentClass ?restriction . " + " ?restriction a owl:Restriction; " + " owl:onProperty <" + property.getURI() + "> " + " } " + "}"; Query query = QueryFactory.create(sparql, Syntax.syntaxARQ); QueryExecution queryExecution = QueryExecutionFactory.create(query, model); ResultSet resultSet = queryExecution.execSelect(); while(resultSet.hasNext()){ QuerySolution querySolution = resultSet.next(); Resource concept = querySolution.getResource("concept"); stmts.add(new StatementImpl(property, usage, concept)); } return stmts; }
private List<Statement> getUsage(OntClass ontClass, Model model){ List<Statement> stmts = new ArrayList<Statement>(); try{ String sparql = "PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> " + "PREFIX owl: <http://www.w3.org/2002/07/owl#> " + "SELECT DISTINCT ?concept " + "WHERE{" + " {?prop rdfs:range <" + ontClass.getURI() + ">; " + " rdfs:domain ?concept" + " }" + " UNION " + " { " + " ?concept rdfs:subClassOf|owl:equivalentClass ?restriction . " + " ?restriction a owl:Restriction; " + " ?p <" + ontClass.getURI() + "> " + " } " + "}"; Query query = QueryFactory.create(sparql, Syntax.syntaxARQ); QueryExecution queryExecution = QueryExecutionFactory.create(query, model); ResultSet resultSet = queryExecution.execSelect(); while(resultSet.hasNext()){ QuerySolution querySolution = resultSet.next(); Resource concept = querySolution.getResource("concept"); stmts.add(new StatementImpl(ontClass, usage, concept)); } } catch(Exception e){ log.error(e.getMessage(), e); } return stmts; }
/** * Method called to Chunk the triples into N-Sized batches and post to VIVO. * This is designed to work around / handle errors when posting sets of triples * over 10,000 to the API. * * @param namedGraph String with named graph. * @param changeModel Jena model with set of changes to sync to store. * @param changeType Either add or remove. * @return Boolean true if update was successful. * @throws IOException */ private Boolean bulkUpdate(String namedGraph, Model changeModel, String changeType) throws IOException { // Temporary model to hold Model tmpModel = ModelFactory.createDefaultModel(); Integer bSize = Integer.parseInt(batchSize); // Use an integer to count triples rather than calling size on the model // during each loop. Integer size = 0; StmtIterator iter = changeModel.listStatements(); while (iter.hasNext()) { Statement stmt = iter.nextStatement(); // get next statement tmpModel.add(stmt); size++; if (size >= bSize) { // Submit log.info("Submitting " + size + " triples to " + namedGraph); submitBatch(tmpModel, namedGraph, changeType); // Reset the tmp model. tmpModel.removeAll(); // Reset the counter. size = 0; } } log.info("model size:" + tmpModel.size()); // Submit the remaining statements, if any. if (tmpModel.size() > 0) { submitBatch(tmpModel, namedGraph, changeType); } return true; }
public boolean nextInMemoryTripleKeyValue() throws IOException, InterruptedException { if(statementIter == null) return false; if (!statementIter.hasNext()) { hasNext = false; return false; } setKey(); write("<sem:triples xmlns:sem='http://marklogic.com/semantics'>\n"); write("<sem:origin>" + origFn + "</sem:origin>\n"); int max = MAXTRIPLESPERDOCUMENT; while (max > 0 && statementIter.hasNext()) { Statement stmt = statementIter.nextStatement(); write("<sem:triple>"); write(subject(stmt.getSubject())); write(predicate(stmt.getPredicate())); write(object(stmt.getObject())); write("</sem:triple>\n"); notifyUser(); max--; } write("</sem:triples>\n"); if (!statementIter.hasNext()) { pos = 1; } writeValue(); return true; }
public boolean nextInMemoryQuadKeyValueWithCollections() throws IOException, InterruptedException { if(statementIter == null) return false; while (!statementIter.hasNext()) { if (graphNameIter.hasNext()) { collection = graphNameIter.next(); statementIter = dataset.getNamedModel(collection).listStatements(); } else { hasNext = false; collection = null; return false; } } setKey(); write("<sem:triples xmlns:sem='http://marklogic.com/semantics'>"); int max = MAXTRIPLESPERDOCUMENT; while (max > 0 && statementIter.hasNext()) { Statement stmt = statementIter.nextStatement(); write("<sem:triple>"); write(subject(stmt.getSubject())); write(predicate(stmt.getPredicate())); write(object(stmt.getObject())); write("</sem:triple>"); max--; notifyUser(); } write("</sem:triples>\n"); if (!statementIter.hasNext()) { pos = 1; } writeValue(collection); return true; }
public Collection<Resource> getUndefinedClasses(Model model) { Set<Resource> result = new HashSet<Resource>(); StmtIterator it = model.listStatements(null, RDF.type, (RDFNode) null); while (it.hasNext()) { Statement stmt = it.nextStatement(); if (stmt.getObject().isURIResource() && stmt.getResource().getURI().startsWith(namespace) && !classes.contains(stmt.getObject())) { result.add(stmt.getResource()); } } return result; }
public Collection<Property> getUndefinedProperties(Model model) { Set<Property> result = new HashSet<Property>(); StmtIterator it = model.listStatements(); while (it.hasNext()) { Statement stmt = it.nextStatement(); if (stmt.getPredicate().getURI().startsWith(namespace) && !properties.contains(stmt.getPredicate())) { result.add(stmt.getPredicate()); } } return result; }
public boolean usesVocabulary(Model model) { StmtIterator it = model.listStatements(); while (it.hasNext()) { Statement stmt = it.nextStatement(); if (stmt.getPredicate().getURI().startsWith(namespace)) { return true; } if (stmt.getPredicate().equals(RDF.type) && stmt.getResource().getURI().startsWith(namespace)) { return true; } } return false; }
public Model triplesInvolvingVocabulary(Model model) { Model result = ModelFactory.createDefaultModel(); result.getNsPrefixMap().putAll(model.getNsPrefixMap()); StmtIterator it = model.listStatements(); while (it.hasNext()) { Statement stmt = it.next(); if (properties.contains(stmt.getPredicate()) || (stmt.getPredicate().equals(RDF.type) && classes.contains(stmt.getObject()))) { result.add(stmt); } } return result; }
private void parsePropertyBridges() { StmtIterator stmts = this.model.listStatements(null, D2RQ.belongsToClassMap, (RDFNode) null); while (stmts.hasNext()) { Statement stmt = stmts.nextStatement(); ClassMap classMap = this.mapping.classMap(stmt.getResource()); Resource r = stmt.getSubject(); PropertyBridge bridge = new PropertyBridge(r); bridge.setBelongsToClassMap(classMap); parseResourceMap(bridge, r); parsePropertyBridge(bridge, r); } }
/** * Helper method to add definitions from a ResourceMap to its underlying resource * @param map * @param targetResource */ private void addDefinitions(ResourceMap map, Resource targetResource) { /* Infer rdfs:Class or rdf:Property type */ Statement s = vocabularyModel.createStatement(targetResource, RDF.type, map instanceof ClassMap ? RDFS.Class : RDF.Property); if (!this.vocabularyModel.contains(s)) this.vocabularyModel.add(s); /* Apply labels */ for (Literal propertyLabel: map.getDefinitionLabels()) { s = vocabularyModel.createStatement(targetResource, RDFS.label, propertyLabel); if (!this.vocabularyModel.contains(s)) this.vocabularyModel.add(s); } /* Apply comments */ for (Literal propertyComment: map.getDefinitionComments()) { s = vocabularyModel.createStatement(targetResource, RDFS.comment, propertyComment); if (!this.vocabularyModel.contains(s)) this.vocabularyModel.add(s); } /* Apply additional properties */ for (Resource additionalProperty: map.getAdditionalDefinitionProperties()) { s = vocabularyModel.createStatement(targetResource, (Property)(additionalProperty.getProperty(D2RQ.propertyName).getResource().as(Property.class)), additionalProperty.getProperty(D2RQ.propertyValue).getObject()); if (!this.vocabularyModel.contains(s)) this.vocabularyModel.add(s); } }
private void checkForSpuriousTriples() { StmtIterator it = remainingTriples.listStatements(); while (it.hasNext()) { Statement stmt = it.next(); report.report(Problem.SPURIOUS_TRIPLE, stmt.getSubject(), stmt.getPredicate(), stmt.getObject()); } }
@Test public void testListStatements() { StmtIterator iter = model.listStatements(); int count = 0; while (iter.hasNext()) { Statement stmt = iter.nextStatement(); stmt.toString(); count++; } assertEquals(322, count); }
@Test public void testAsk() { StmtIterator it = expectedTriples.listStatements(); while (it.hasNext()) { Statement stmt = it.next(); assertTrue("Missing statement: " + stmt, loader.getModelD2RQ().contains(stmt)); } }
public int getIndividualsCount() { List<Statement> statementList = new ArrayList<Statement>(); StmtIterator stmtIterator = model.listStatements(); while (stmtIterator.hasNext()) { statementList.add(stmtIterator.next()); } return statementList.size(); }
protected static String printIndividual(Individual individual) { StringBuilder stringBuilder = new StringBuilder(); stringBuilder.append("Individual: " + individual.getLocalName() + "\n"); StmtIterator properties = individual.listProperties(); while (properties.hasNext()) { Statement s = properties.next(); stringBuilder.append(" " + s.getPredicate().getLocalName() + " : " + s.getObject().toString() + "\n"); } properties.close(); stringBuilder.append("\n"); stringBuilder.append(StringUtils.repeat("-", 70)); return stringBuilder.toString(); }