public Set<String> initializeCategories() { Model model = ModelFactory.createDefaultModel(); model.read("/home/zwicklbauer/HDTGeneration/skos_categories_en.nt"); StmtIterator it = model.listStatements(); Set<String> set = new HashSet<String>(); System.out.println("Los gehts"); while (it.hasNext()) { Statement s = it.next(); Resource r = s.getSubject(); Property p = s.getPredicate(); RDFNode n = s.getObject(); if (p.getURI().equalsIgnoreCase( "http://www.w3.org/2004/02/skos/core#broader") && n.isResource()) { Resource target = n.asResource(); if(!hasSubCategory(target.getURI())) set.add(target.getURI()); if(!hasSubCategory(r.getURI())) set.add(r.getURI()); } } return set; }
/** * Parse specific nodes in order to create an {@link Event} object. * Check the date from startDate and endDate RDF nodes. * Retrieve the label and description from labelNode and descriptionNode. * * @param latitudeNode RDF formatted latitude node * @param longitudeNode RDF formatted longitude node * @param startDate filter start date * @param endDate filter end date * @param descriptionNode contains the event * @param label tags * @return null if the checkRDFNodes is false or a ParseException has been caught, else the {@link Event} object * @see LatLong * @see Event */ private static Event getEventFromRDFNodes(RDFNode latitudeNode, RDFNode longitudeNode, RDFNode startDate, RDFNode endDate, RDFNode descriptionNode, String label) { if (checkRDFNodes(latitudeNode, longitudeNode, startDate, endDate, descriptionNode)) { try { LatLong latLong = getLatlongFromRDFNodes(latitudeNode, longitudeNode); DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd"); Date start = dateFormat.parse(startDate.asLiteral().getString()); Date end = dateFormat.parse(endDate.asLiteral().getString()); String desc = descriptionNode.asLiteral().getString(); return new Event(latLong, start, end, desc, label); } catch (ParseException e) { LOGGER.error(e.getMessage()); return null; } } return null; }
/** * Parse RDF input as string * * @param input RDF values as String * @return an {@link Request} object which contains information about latitude, longitude and date * @throws IllegalStateException if RDF is not literal * @throws NullPointerException if input is null */ public static Request parse(String input) { Objects.requireNonNull(input); Model model = ModelFactory.createDefaultModel(); model.read(new ByteArrayInputStream(input.getBytes()), null, "TURTLE"); Map<String, Object> map = new HashMap<>(); model.listStatements().forEachRemaining(statement -> { RDFNode rdfNode = statement.getObject(); if (rdfNode.isLiteral()) { try { map.put(statement.getPredicate().getLocalName(), statement.getObject().asLiteral().getValue()); } catch (Exception e) { LOGGER.error("RDF statement is not literal"); throw new IllegalStateException(e.getMessage()); } } }); model.close(); return getDataFromMap(map); }
public static long executeQueryForLong(String queryString, String serviceEndpoint, String var) { logger.debug("Executing query: {}", queryString); //Execute the query Map<String, RDFNode> nodeMap = executeQueryForMap(queryString, serviceEndpoint, Sets.newHashSet(var)); RDFNode rdfNode = nodeMap.get(var); if (rdfNode == null) { throw new IllegalStateException(String.format("The result does not have a binding for the variable '%s' ", var)); } if (rdfNode.isLiteral()) { return rdfNode.asLiteral().getLong(); } else { throw new IllegalStateException(String.format("The binding for the variable '%s' is not a literal", var)); } }
public static void main(String[] args) { String query = QueryCollection.TOTAL_TRIPLES; try (QueryExecution qexec = QueryExecutionFactory.sparqlService("http://localhost:3030/ldqm/query", query)) { { ResultSet results = qexec.execSelect(); // We only return the first solutions, may be a map of map for (; results.hasNext(); ) { QuerySolution soln = results.nextSolution(); Map<String, RDFNode> resultsMap = new HashMap<String, RDFNode>(); if (soln.contains("c")) { System.out.println(soln.get("c").asLiteral().getInt()); } } } } }
public static List<RDFNode> executeQueryForList(String queryString, String serviceEndpoint, String var) { List<RDFNode> resultsList = new ArrayList<>(); Query query = QueryFactory.create(queryString); try (QueryExecution qexec = QueryExecutionFactory.sparqlService(serviceEndpoint, query)) { { ResultSet results = qexec.execSelect(); // collect all the values for (; results.hasNext(); ) { QuerySolution soln = results.nextSolution(); if (soln.contains(var)) { resultsList.add(soln.get(var)); } } return resultsList; } } }
private String nodeType(RDFNode node) { if (node.isURIResource()) { return "IRI"; } if (node.isAnon()) { return "blank node"; } if (!"".equals(node.asLiteral().getLanguage())) { return "language-tagged string"; } if (node.asLiteral().getDatatypeURI() == null) { return "plain literal"; } if (XSD.xstring.getURI().equals(node.asLiteral().getDatatypeURI())) { return "string literal"; } return "non-string typed literal"; }
/** * @param problem * @param subject May be null * @param predicates May be null * @param objects May be null */ public Message(Problem problem, Resource subject, Property[] predicates, RDFNode[] objects) { this.problem = problem; this.subject = subject; this.predicates = predicates == null ? Collections.<Property>emptyList() : Arrays.asList(predicates); Collections.sort(this.predicates, RDFComparator.getRDFNodeComparator()); this.objects = objects == null ? Collections.<RDFNode>emptyList() : Arrays.asList(objects); Collections.sort(this.objects, RDFComparator.getRDFNodeComparator()); this.detailCode = null; this.details = null; this.cause = null; }
/** * @param problem * @param term * @param detailCode Optional error code; indicates a subclass of problems * @param details Optional string containing error details * @param contextResource May be null * @param contextProperty May be null */ public Message(Problem problem, MappingTerm term, String detailCode, String details, Resource contextResource, Property contextProperty) { this.problem = problem; this.subject = contextResource; this.predicates = contextProperty == null ? Collections.<Property>emptyList() : Collections.singletonList(contextProperty); this.objects = term == null ? Collections.<RDFNode>emptyList() : Collections.<RDFNode>singletonList(ResourceFactory.createPlainLiteral(term.toString())); this.detailCode = detailCode; this.details = details; this.cause = null; }
private String toTurtle(RDFNode r) { if (r.isURIResource()) { return PrettyPrinter.qNameOrURI(relativize(r.asResource().getURI()), prefixes); } else if (r.isLiteral()) { StringBuffer result = new StringBuffer(quote(r.asLiteral().getLexicalForm())); if (!"".equals(r.asLiteral().getLanguage())) { result.append("@"); result.append(r.asLiteral().getLanguage()); } else if (r.asLiteral().getDatatype() != null) { result.append("^^"); result.append(toTurtle(ResourceFactory.createResource(r.asLiteral().getDatatypeURI()))); } return result.toString(); } else { if (!blankNodeMap.containsKey(r)) { blankNodeMap.put(r.asResource(), "_:b" + blankNodeCounter++); } return blankNodeMap.get(r); } }
private void printTranslationTable(TranslationTable table) { printMapObject(table, D2RQ.TranslationTable); out.printURIProperty(D2RQ.href, table.getHref()); out.printProperty(D2RQ.javaClass, table.getJavaClass()); Iterator<Translation> it = table.getTranslations().iterator(); List<Map<Property,RDFNode>> values = new ArrayList<Map<Property,RDFNode>>(); while (it.hasNext()) { Translation translation = it.next(); Map<Property,RDFNode> r = new LinkedHashMap<Property,RDFNode>(); r.put(D2RQ.databaseValue, ResourceFactory.createPlainLiteral(translation.dbValue())); r.put(D2RQ.rdfValue, ResourceFactory.createPlainLiteral(translation.rdfValue())); values.add(r); } out.printCompactBlankNodeProperties(D2RQ.translation, values); }
public int compare(RDFNode n1, RDFNode n2) { if (n1.isURIResource()) { if (!n2.isURIResource()) return -1; return n1.asResource().getURI().compareTo(n2.asResource().getURI()); } if (n1.isAnon()) { if (n2.isURIResource()) return 1; if (n2.isLiteral()) return -1; return n1.asResource().getId().getLabelString().compareTo(n2.asResource().getId().getLabelString()); } if (!n2.isLiteral()) return 1; int cmpLex = n1.asLiteral().getLexicalForm().compareTo(n2.asLiteral().getLexicalForm()); if (cmpLex != 0) return cmpLex; if (n1.asLiteral().getDatatypeURI() == null) { if (n2.asLiteral().getDatatypeURI() != null) return -1; return n1.asLiteral().getLanguage().compareTo(n2.asLiteral().getLanguage()); } if (n2.asLiteral().getDatatypeURI() == null) return 1; return n1.asLiteral().getDatatypeURI().compareTo(n2.asLiteral().getDatatypeURI()); }
private PredicateObjectMap createPredicateObjectMap(Resource r) { PredicateObjectMap result = new PredicateObjectMap(); result.getPredicateMaps().addAll(getResources(r, RR.predicateMap)); for (RDFNode predicate: getRDFNodes(r, RR.predicate)) { result.getPredicates().add(ConstantShortcut.create(predicate)); } result.getObjectMaps().addAll(getResources(r, RR.objectMap)); for (RDFNode object: getRDFNodes(r, RR.object)) { result.getObjects().add(ConstantShortcut.create(object)); } result.getGraphMaps().addAll(getResources(r, RR.graphMap)); for (RDFNode graph: getRDFNodes(r, RR.graph)) { result.getGraphs().add(ConstantShortcut.create(graph)); } return result; }
public List<RDFNode> getRDFNodes(Resource r, Property p, NodeType acceptableNodes) { List<RDFNode> result = new ArrayList<RDFNode>(); StmtIterator it = r.listProperties(p); while (it.hasNext()) { Statement stmt = it.next(); remainingTriples.remove(stmt); if (acceptableNodes.isTypeOf(stmt.getObject())) { result.add(stmt.getObject()); } else { if (acceptableNodes.coerce(stmt.getObject()) != null) { result.add(acceptableNodes.coerce(stmt.getObject())); } report.report(acceptableNodes.ifNot, r, p, stmt.getObject()); } } Collections.sort(result, RDFComparator.getRDFNodeComparator()); return result; }
public static Map<String,RDFNode> solutionToMap(QuerySolution solution, List<String> variables) { Map<String,RDFNode> result = new HashMap<String,RDFNode>(); Iterator<String> it = solution.varNames(); while (it.hasNext()) { String variableName = it.next(); if (!variables.contains(variableName)) { continue; } RDFNode value = solution.get(variableName); int size = value.toString().length(); if (size>250) { bigStringInResultLogger.debug("Big string (" + size + ") in resultBinding:\n" + value); } result.put(variableName,value); } return result; }
/** * If class has a label AbraCadabraQualityPerformance --> Abra Cadabra --> abra cadabra : will be returned as a label in the set. * * @param ocls * @return */ public Set<String> getClassLabels(OntClass ocls) { String name = ocls.getLocalName(); ExtendedIterator<RDFNode> iterator = ocls.listLabels(null); List<String> labels = new ArrayList<String>(3); while (iterator.hasNext()) { labels.add(iterator.next().asLiteral().getLexicalForm()); } // get trimmed name,labels then split camel case. name = trimName(name); name = LingUtil.splitCamelCase(name).toLowerCase(); for (int i = 0; i < labels.size(); i++) { labels.set(i, LingUtil.splitCamelCase(trimName(labels.get(i))).toLowerCase());//trim then split camel case . then make lowercase } Set<String> lbls = new HashSet<String>(3); lbls.add(name); lbls.addAll(labels); return lbls; }
/** * Runs a given Jena Query on a given instance and adds the inferred triples * to a given Model. * @param queryWrapper the wrapper of the CONSTRUCT query to execute * @param queryModel the query Model * @param newTriples the Model to write the triples to * @param instance the instance to run the inferences on * @param checkContains true to only call add if a Triple wasn't there yet * @return true if changes were done (only meaningful if checkContains == true) */ public static boolean runQueryOnInstance(QueryWrapper queryWrapper, Model queryModel, Model newTriples, Resource instance, boolean checkContains) { boolean changed = false; QueryExecution qexec = ARQFactory.get().createQueryExecution(queryWrapper.getQuery(), queryModel); QuerySolutionMap bindings = new QuerySolutionMap(); bindings.add(SPIN.THIS_VAR_NAME, instance); Map<String,RDFNode> initialBindings = queryWrapper.getTemplateBinding(); if(initialBindings != null) { for(String varName : initialBindings.keySet()) { RDFNode value = initialBindings.get(varName); bindings.add(varName, value); } } qexec.setInitialBinding(bindings); Model cm = qexec.execConstruct(); StmtIterator cit = cm.listStatements(); while(cit.hasNext()) { Statement s = cit.nextStatement(); if(!checkContains || !queryModel.contains(s)) { changed = true; newTriples.add(s); } } return changed; }
public void compute(String fres, Model m) { // collect objects for(Property p : map.keySet()) { Resource s = m.getResource(fres); Set<RDFNode> objset = m.listObjectsOfProperty(s, p).toSet(); LOGGER.debug("<"+s+"> <"+p+">"); LOGGER.debug("size(objset) = "+objset.size()); map.put(p, objset); // TODO if(objset.isEmpty()) ; } LOGGER.debug(map); }
private TopicIF getType(RDFNode rdfprop, Model model) throws JenaException, MalformedURLException { Resource subject = (Resource) rdfprop; Property prop = model.getProperty(RTM_TYPE); NodeIterator it = model.listObjectsOfProperty(subject, prop); while (it.hasNext()) { Resource obj = (Resource) it.next(); LocatorIF loc = new URILocator(obj.getURI()); TopicIF topic = topicmap.getTopicBySubjectIdentifier(loc); if (topic == null) { topic = builder.makeTopic(); topic.addSubjectIdentifier(loc); } return topic; } return null; }
private void enrichImpl(StmtIterator iter, Collection<Resource> toRemove, CSVWriter printer) { for ( Statement stmt : iter.toList()) { RDFNode node = stmt.getObject(); if ( !node.isResource() ) { continue; } Resource target = node.asResource(); if ( !isEntity(target) ) { continue; } String label = getLabel(target); if ( label == null ) { continue; } printEnrichment(stmt, label, printer); stmt.changeObject(label); toRemove.add(target); } }
public static void main(String[] args) { HDT hdt = null; try { hdt = HDTManager.mapIndexedHDT(TYPES, null); } catch (IOException e) { e.printStackTrace(); } HDTGraph graph = new HDTGraph(hdt); Model m = ModelFactory.createModelForGraph(graph); StmtIterator iter = m.listStatements(); HashMap<String, Integer> hash = new HashMap<String, Integer>(); int number = 0; while (iter.hasNext()) { if (number % 50000 == 0) { System.out.println("Processed Entries: " + number); } Statement stmt = iter.next(); RDFNode object = stmt.getObject(); String s = null; s = object.asResource().getURI(); hash.put(s, 0); number++; } System.out.println("Anzahl an Typen: " +hash.size()); }
private static void rendererResultSet(ResultSet rs, String... queryFields) { System.out.println("Result:"); int queryFieldSize = queryFields.length; for (int i = 0; i < queryFieldSize; i++) { System.out.print(queryFields[i] + TAB); } System.out.println(); while (rs.hasNext()) { QuerySolution qs = rs.nextSolution(); for (int i = 0; i < queryFieldSize; i++) { RDFNode name = qs.get(queryFields[i]); if (name != null) { System.out.print(name + TAB); } else { System.out.print("NULL" + TAB); } } System.out.println(); } }
public static final void main(String[] args) throws MalformedURLException, FileNotFoundException { Model model = ModelFactory.createDefaultModel(); model.read(args[0]); StmtIterator iter = model.listStatements(); System.out.println("Start"); while (iter.hasNext()) { Statement stmt = iter.next(); Resource subject = stmt.getSubject(); String url[] = subject.getURI().split("/"); String subjectIdent = url[url.length - 1]; RDFNode object = stmt.getObject(); String s[] = object.asResource().getURI().split("/"); String objIdent = s[s.length - 1]; try { System.out.println(objIdent+" "+subjectIdent); HBaseOperations.getInstance().addRecord("DBPEDIA_CatToEnts", objIdent, "data", String.valueOf(subjectIdent.hashCode()), subjectIdent); } catch (IOException e) { e.printStackTrace(); } } }
private static ResultSet getAssociatedTimes(RDFNode eventId, Model m) { String multiPointQuery = "SELECT ?begin ?end WHERE { { <" + eventId + "> <http://semanticweb.cs.vu.nl/2009/11/sem/hasEarliestBeginTimeStamp> ?t1 . ?t1 <http://www.w3.org/TR/owl-time#inDateTime> ?begin . OPTIONAL { <" + eventId + "> <http://semanticweb.cs.vu.nl/2009/11/sem/hasEarliestEndTimeStamp> ?t2 . ?t2 <http://www.w3.org/TR/owl-time#inDateTime> ?end . } } UNION "; multiPointQuery += "{ <" + eventId + "> <http://semanticweb.cs.vu.nl/2009/11/sem/hasEarliestEndTimeStamp> ?t2 . ?t2 <http://www.w3.org/TR/owl-time#inDateTime> ?end . OPTIONAL { <" + eventId + "> <http://semanticweb.cs.vu.nl/2009/11/sem/hasEarliestBeginTimeStamp> ?t1 . ?t1 <http://www.w3.org/TR/owl-time#inDateTime> ?begin . } } }"; Query mpQuery = QueryFactory.create(multiPointQuery); // Create a single execution of this query, apply to a model // which is wrapped up as a Dataset QueryExecution mpQexec = QueryExecutionFactory.create(mpQuery, m); try { // Assumption: it’s a SELECT query. ResultSet mprs = mpQexec.execSelect(); mpQexec.close(); return mprs; } finally { } }
@Test public void testStatements() throws Exception { String bio_ns = "http://example.com/biology#"; KBCollection human = new KBCollectionImpl(bio_ns + "human"); Variable v = new VariableImpl ("person"); KBObjectImpl.getBaseContextModel().createList(new RDFNode[] {((Resource)human.getCore()), ((Resource)v.getCore())}); FileWriter fw = new FileWriter("/scratch/WORK2/jena/testModels/test5.xml"); KBPredicate father = new KBPredicateImpl(Constants.CYC_TBOX_NS.concat("father")); Statement s; s = KBObjectImpl.getBaseContextModel().createStatement(((KBCollectionImpl)Constants.owlThing()).getCore(), (OntProperty)((KBPredicateImpl)father).getCore(), ((VariableImpl)v).getCore()); KBObjectImpl.getBaseContextModel().add(s); KBObjectImpl.getBaseContextModel().createReifiedStatement(s); RDFDataMgr.write(fw, KBObjectImpl.getDataset(), RDFFormat.NQUADS); }
/** * RDF Navigation using SPARQL Query * * @param model * the RDF model * @param query * SPARQL Query String * @param field * the placeholder of filed in parameter query */ private static void sparql(Model model, String query, String field) { Query q = QueryFactory.create(query); QueryExecution qexec = QueryExecutionFactory.create(q, model); System.out.println("Plan to run SPARQL query: "); System.out.println(BOUNDARY); System.out.println(query); System.out.println(BOUNDARY); ResultSet rs = qexec.execSelect(); while (rs.hasNext()) { QuerySolution qs = rs.nextSolution(); RDFNode name = qs.get(field);// using RDFNode currently if (name != null) { System.out.println("Hello to " + name); } else { System.out.println("No friends found!"); } } qexec.close(); }
/** * Create new resource. * * @param uri * the uri String * @param literal * the literal flag */ public void newResource(String uri, boolean literal) { if (uri == null || "".equals(uri)) { uri = MindRaiderConstants.MR_RDF_NS + "anonymousResource"; } RDFNode rdfNode = rdfModel.newResource(uri, literal); NodeDescriptor nodeDescriptor; if (literal) { // TODO get triplet id generator method nodeDescriptor = new NodeDescriptor("literal_" + MindRaiderConstants.MR + ++renderedTriplets, uri, uri); } else { nodeDescriptor = getNodeDescriptor((Resource) rdfNode); } try { Node node = createSubjectNode(nodeDescriptor); node.rdfNode = rdfNode; tgPanel.addNode(node); tgPanel.setSelect(node); } catch (TGException e) { logger.error("newResource(String, boolean)", e); } }
public static void fixLanguage(StmtIterator iter, String sLang) { if ( (sLang == null) || sLang.trim().isEmpty() ) { return; } List<Statement> list = iter.toList(); for ( Statement stmt : list ) { RDFNode n = stmt.getObject(); if ( !n.isLiteral() ) { continue; } Literal l = n.asLiteral(); String sL = l.getLanguage(); if ( (sL != null) && !sL.trim().isEmpty() ) { continue; } stmt.changeObject(l.getString(), sLang); } }
/** * Finds all RTM_IN_SCOPE properties for this property and returns a * collection containing the RDF URIs of the values as URILocators. */ private Collection getScope(RDFNode rdfprop, Model model) throws JenaException, MalformedURLException { Resource subject = (Resource) rdfprop; Property prop = model.getProperty(RTM_IN_SCOPE); NodeIterator it = model.listObjectsOfProperty(subject, prop); ArrayList scope = new ArrayList(); while (it.hasNext()) { Object o = it.next(); if (!(o instanceof Resource)) throw new RDFMappingException("Scoping topic must be specified by a resource, not by " + o); Resource obj = (Resource) o; LocatorIF loc = new URILocator(obj.getURI()); scope.add(loc); } return scope; }
/** * @param path * @return */ public static HashMap<String, List<Resource>> loadClassList(String path) { HashMap<String, List<Resource>> res = new HashMap<>(); // load specification file Model model = RDFDataMgr.loadModel(path); // get all graphs Iterator<Statement> statIt = model.listStatements((Resource) null, ResourceFactory.createProperty("http://aksw.org/deduplication/relatedGraph"), (RDFNode) null); while(statIt.hasNext()) { Statement s = statIt.next(); Resource dataset = s.getSubject(); String graph = s.getObject().as(Resource.class).getURI(); // get all classes for each graph ArrayList<Resource> classes = new ArrayList<>(); Iterator<RDFNode> nodeIt = model.listObjectsOfProperty(dataset, ResourceFactory.createProperty("http://aksw.org/deduplication/requiredClasses")); while(nodeIt.hasNext()) { Resource c = nodeIt.next().as(Resource.class); classes.add(c); } res.put(graph, classes); } return res; }
private SourceTypes getSourceType(Resource mappingResource) { Property sourceNameProp = model.getProperty(Uris.KM_SOURCE_TYPE_URI); Statement s = model.getProperty(mappingResource, sourceNameProp); String sourceType = null; if(s != null) { RDFNode node = s.getObject(); if(node != null && node.isLiteral()) { sourceType = node.asLiteral().getString(); return SourceTypes.valueOf(sourceType); } } return SourceTypes.CSV; }
public OntoRecord getSubclass(String resourceURI, String lang){ StmtIterator iter1 = model.listStatements( new SimpleSelector(ResourceFactory.createResource(resourceURI), ResourceFactory.createProperty("http://www.w3.org/2000/01/rdf-schema#subClassOf"), (RDFNode)null)); OntoRecord record = new OntoRecord(); StmtIterator iter2; while(iter1.hasNext()) { record.setUri(iter1.next().getObject().toString()); iter2 = model.listStatements( new SimpleSelector(ResourceFactory.createResource(record.getUri()), ResourceFactory.createProperty("http://www.w3.org/2000/01/rdf-schema#label"), (RDFNode)null)); while(iter2.hasNext()){ Literal res = (Literal) iter2.next().getObject(); String tmpLang = res.getLanguage(); if( tmpLang.equals("en") ){ record.setLabel(res.getString()); return record; } } } return null; }
private List<Statement> convertRowToStatement(Table table, Row row, Resource rcs) { List<Statement> sa = new ArrayList<Statement>(row.size()); int i = 0; Set<String> attrs = row.keySet(); for (String attr : attrs) { RDFNode attrRcs; Object value = row.get(attr); if (value == null || excludedValue(attr.toString(), value.toString())) { // dealing with empty and excluded values continue; } else { attrRcs = createRDFNode(table.getColumn(attr), value); } Property p = ResourceFactory.createProperty(genOURI(), attr.toString()); Statement s = ResourceFactory.createStatement(rcs, p, attrRcs); sa.add(s); } return sa; }
public static boolean checkPrecedence(CompanyModel c) { StmtIterator stmtit = c.getModel().listStatements( new SimpleSelector(null, c.DEPTS, (RDFNode) null)); List<Resource> depts = new LinkedList<Resource>(); while (stmtit.hasNext()) { NodeIterator subDeptsIt = stmtit.next().getBag().iterator(); while (subDeptsIt.hasNext()) depts.add(subDeptsIt.next().asResource()); } for (Resource dept : depts) { // get manager's salary double managerSalary = dept.getProperty(c.MANAGER).getProperty( c.SALARY).getDouble(); NodeIterator employeeIt = dept.getProperty(c.EMPLOYEES).getBag() .iterator(); while (employeeIt.hasNext()) if (!(employeeIt.next().asResource().getProperty(c.SALARY) .getDouble() < managerSalary)) return false; } return true; }
public static List<Map<String, RDFNode>> executeQueryForList(String queryString, String serviceEndpoint, Set<String> vars) { logger.debug("Executing query: {}", queryString); List<Map<String, RDFNode>> resultsList = new ArrayList<>(); Query query = QueryFactory.create(queryString); try (QueryExecution qexec = QueryExecutionFactory.sparqlService(serviceEndpoint, query)) { { ResultSet results = qexec.execSelect(); // collect all the values for (; results.hasNext(); ) { QuerySolution soln = results.nextSolution(); Map<String, RDFNode> resultsMap = new HashMap<String, RDFNode>(); for (String var : vars) { if (soln.contains(var)) { resultsMap.put(var, soln.get(var)); } } resultsList.add(resultsMap); } return resultsList; } } }
/*** * A utility method to execute SPARQL query when only single valued * @param queryString The SPARQL query string * @param serviceEndpoint The endpoint to run the query against * @param vars the variables from the result * @return Map of the the variable and the value from the solution */ public static Map<String, RDFNode> executeQueryForMap(String queryString, String serviceEndpoint, Set<String> vars) { logger.debug("Executing query: {}", queryString); Query query = QueryFactory.create(queryString); try (QueryExecution qexec = QueryExecutionFactory.sparqlService(serviceEndpoint, query)) { { ResultSet results = qexec.execSelect(); // We only return the first solutions, may be a map of map for (; results.hasNext(); ) { QuerySolution soln = results.nextSolution(); Map<String, RDFNode> resultsMap = new HashMap<String, RDFNode>(); for (String var : vars) { if (soln.contains(var)) { resultsMap.put(var, soln.get(var)); } } return resultsMap; } } // No solutions found return Collections.EMPTY_MAP; } }
private String object(RDFNode node) { if (node.isLiteral()) { Literal lit = node.asLiteral(); String text = lit.getString(); String lang = lit.getLanguage(); String type = lit.getDatatypeURI(); if (lang == null || "".equals(lang)) { lang = ""; } else { lang = " xml:lang='" + escapeXml(lang) + "'"; } if ("".equals(lang)) { if (type == null) { type = "http://www.w3.org/2001/XMLSchema#string"; } type = " datatype='" + escapeXml(type) + "'"; } else { type = ""; } return "<sem:object" + type + lang + ">" + escapeXml(text) + "</sem:object>"; } else if (node.isAnon()) { return "<sem:object>http://marklogic.com/semantics/blank/" + Long.toHexString( fuse(scramble((long)node.hashCode()),fuse(scramble(milliSecs),randomValue))) +"</sem:object>"; } else { return "<sem:object>" + escapeXml(node.toString()) + "</sem:object>"; } }
public void printProperty(Property property, RDFNode term) { if (term == null) return; if (term.isResource()) { printPropertyTurtle(term != null, property, toTurtle(term.asResource())); } else { printPropertyTurtle(term != null, property, toTurtle(term.asLiteral())); } }
private String toTurtleCompact(Map<Property, RDFNode> resource) { if (resource.isEmpty()) return "[]"; StringBuilder result = new StringBuilder("[ "); for (Property property: resource.keySet()) { result.append(toTurtle(property)); result.append(" "); result.append(toTurtle(resource.get(property))); result.append("; "); } result.append("]"); return result.toString(); }
public Collection<Resource> getUndefinedClasses(Model model) { Set<Resource> result = new HashSet<Resource>(); StmtIterator it = model.listStatements(null, RDF.type, (RDFNode) null); while (it.hasNext()) { Statement stmt = it.nextStatement(); if (stmt.getObject().isURIResource() && stmt.getResource().getURI().startsWith(namespace) && !classes.contains(stmt.getObject())) { result.add(stmt.getResource()); } } return result; }