/** * Runs a given Jena Query on a given instance and adds the inferred triples * to a given Model. * @param queryWrapper the wrapper of the CONSTRUCT query to execute * @param queryModel the query Model * @param newTriples the Model to write the triples to * @param instance the instance to run the inferences on * @param checkContains true to only call add if a Triple wasn't there yet * @return true if changes were done (only meaningful if checkContains == true) */ public static boolean runQueryOnInstance(QueryWrapper queryWrapper, Model queryModel, Model newTriples, Resource instance, boolean checkContains) { boolean changed = false; QueryExecution qexec = ARQFactory.get().createQueryExecution(queryWrapper.getQuery(), queryModel); QuerySolutionMap bindings = new QuerySolutionMap(); bindings.add(SPIN.THIS_VAR_NAME, instance); Map<String,RDFNode> initialBindings = queryWrapper.getTemplateBinding(); if(initialBindings != null) { for(String varName : initialBindings.keySet()) { RDFNode value = initialBindings.get(varName); bindings.add(varName, value); } } qexec.setInitialBinding(bindings); Model cm = qexec.execConstruct(); StmtIterator cit = cm.listStatements(); while(cit.hasNext()) { Statement s = cit.nextStatement(); if(!checkContains || !queryModel.contains(s)) { changed = true; newTriples.add(s); } } return changed; }
public boolean askcontainsStatement(Statement st, Model m) { String q = "ASK{?subject ?predicate ?object."; if (st.getSubject().isAnon()) q += " FILTER isBlank(?subject)"; if (st.getObject().isAnon()) q += " FILTER isBlank(?object)"; q += "}"; QuerySolutionMap args = new QuerySolutionMap(); if (!st.getSubject().isAnon()) args.add("subject", st.getSubject()); if (!st.getObject().isAnon()) args.add("object", st.getObject()); // QueryExecution qe = QueryExecutionFactory.create(q, m, args); Query query = QueryFactory.create(q); QueryExecution qe = QueryExecutionFactory.create(query, m, args); return qe.execAsk(); }
@Override public void loadVocabularies() { QuerySolutionMap binding = new QuerySolutionMap(); binding.add("linkset", this.dataset); Query query = QueryFactory.create(linksetVocabularyQuery); QueryExecution qexec = QueryExecutionFactory.create(query, voidInstance.getVoidModel(), binding); try { ResultSet results = qexec.execSelect(); for (; results.hasNext();) { QuerySolution soln = results.nextSolution(); OntResource vocabulary = soln.getResource("vocabulary").as( OntResource.class); vocabularies.add(vocabulary); } } catch (Exception e) { Log.debug(Linkset.class, "Failed linksetVocabularyQuery"); Log.debug(Linkset.class, e.getStackTrace().toString()); } finally { qexec.close(); } }
public void loadVocabularies() { QuerySolutionMap binding = new QuerySolutionMap(); binding.add("dataset", dataset); Query query = QueryFactory.create(datasetVocabularyQuery); QueryExecution qexec = QueryExecutionFactory.create(query, voidInstance.getVoidModel(), binding); try { ResultSet results = qexec.execSelect(); for (; results.hasNext();) { QuerySolution soln = results.nextSolution(); OntResource vocabulary = soln.getResource("vocabulary").as( OntResource.class); vocabularies.add(vocabulary); } } catch (Exception e) { Log.debug(Dataset.class, "Failed datasetVocabularyQuery"); Log.debug(Dataset.class, e.getStackTrace().toString()); } finally { qexec.close(); } }
public List<Resource> getURIs(String queryFile, String paramVariable, Resource paramValue, String resultVariable) { Query query = getQuery(queryFile); QuerySolutionMap args = new QuerySolutionMap(); if (paramVariable != null && paramValue != null) { args.add(paramVariable, paramValue); } ArrayList<Resource> result = new ArrayList<Resource>(); ResultSet rs = QueryExecutionFactory.create(query, dataset, args).execSelect(); while (rs.hasNext()) { RDFNode n = rs.next().get(resultVariable); if (n == null || !n.isURIResource()) continue; result.add(n.asResource()); } Collections.sort(result, new Comparator<Resource>() { public int compare(Resource r1, Resource r2) { return r1.getURI().compareTo(r2.getURI()); } }); return result; }
@Override protected QuerySolutionMap getQuerySolutionMap(Triple t) { QuerySolutionMap map = new QuerySolutionMap(); map.add(SUBJECT, model.asRDFNode(t.getSubject())); map.add(PREDICATE, model.asRDFNode(t.getPredicate())); map.add(OBJECT, model.asRDFNode(t.getObject())); return map; }
public String getLangString(String queryFile, Resource term, String resultVariable) { Query query = getQuery(queryFile); QuerySolutionMap args = new QuerySolutionMap(); args.add("term", term); args.add("prefLang", ResourceFactory.createPlainLiteral("en")); ResultSet rs = QueryExecutionFactory.create(query, dataset, args).execSelect(); if (!rs.hasNext()) return null; RDFNode n = rs.next().get(resultVariable); if (n == null || !n.isLiteral()) return null; return n.asLiteral().getLexicalForm(); }
public QuerySolution getOneSolution(String queryFile, String paramVariable, Resource paramValue) { Query query = getQuery(queryFile); QuerySolutionMap args = new QuerySolutionMap(); if (paramVariable != null && paramValue != null) { args.add(paramVariable, paramValue); } QueryExecution qe = QueryExecutionFactory.create(query, dataset, args); ResultSet rs = qe.execSelect(); if (!rs.hasNext()) return null; QuerySolution result = rs.next(); qe.close(); return result; }
@Override public QuerySolution next() { T t = triples.next(); QuerySolutionMap map = getQuerySolutionMap(t); return map; }
@Override protected QuerySolutionMap getQuerySolutionMap(Boolean t) { QuerySolutionMap map = new QuerySolutionMap(); map.add(VALUE, model.createTypedLiteral(t)); return map; }
public static String execSparQLQuery(String query) { System.out.println("execSPINQuery"); Model model = getUqModel(); // Register system functions (such as sp:gt (>)) SPINModuleRegistry.get().init(); Query arqQuery = ARQFactory.get().createQuery(model, query); ARQ2SPIN arq2SPIN = new ARQ2SPIN(model); Select spinQuery = (Select) arq2SPIN.createQuery(arqQuery, null); System.out.println("SPIN query in Turtle:"); model.write(System.out, FileUtils.langTurtle); System.out.println("-----"); String str = spinQuery.toString(); System.out.println("SPIN query:\n" + str); // Now turn it back into a Jena Query Query parsedBack = ARQFactory.get().createQuery(spinQuery); System.out.println("Jena query:\n" + parsedBack); com.hp.hpl.jena.query.Query arq = ARQFactory.get().createQuery(spinQuery); QueryExecution qexec = ARQFactory.get().createQueryExecution(arq, model); QuerySolutionMap arqBindings = new QuerySolutionMap(); arqBindings.add("predicate", RDFS.label); qexec.setInitialBinding(arqBindings); // Pre-assign the arguments ResultSet rs = qexec.execSelect(); // System.out.println("#####################################################################"); // // if (rs.hasNext()) { // QuerySolution row = rs.next(); // System.out.println("Row: " +row.toString()); // RDFNode user = row.get("User"); // Literal label = row.getLiteral("label"); // System.out.println(user.toString()); // } // RDFNode object = rs.next().get("object"); // System.out.println("Label is " + object); Collection<User> users = Sparql.exec(getUqModel(), User.class, query); String usersString = ""; for (User user : users) { System.out.println("User: " +user.toString()); usersString += user.toString() +"<br/>"; } System.out.println("execSPINQuery() done."); return usersString; }
@Override protected void _run(String[] strings) throws Exception { Query q1=sparqlService.getQuery("com/ontology2/telepathReports/pairedImportanceCount.sparql"); ResultSet rs2=sparqlService.select(q1); int rowCount=rs2.next().getLiteral("cnt").getInt(); int j=0; for(int i=0;i<rowCount;i+=BLOCKSIZE) { final Model m= ModelFactory.createDefaultModel(); int limit=rowCount-BLOCKSIZE; limit = limit>BLOCKSIZE ? BLOCKSIZE : limit; final int ii=i; final int llimit=limit; Query q=sparqlService.getQuery("com/ontology2/telepathReports/pairedImportance.sparql",new QuerySolutionMap() {{ add("o",m.asRDFNode(NodeFactoryExtra.intToNode(ii))); add("l",m.asRDFNode(NodeFactoryExtra.intToNode(llimit))); }}); ResultSet results=sparqlService.select(q); while(results.hasNext()) { QuerySolution that=results.next(); j++; String uri=that.get("s").toString(); String eye=Float.toString(that.get("eye").asLiteral().getFloat()); String pr=Float.toString(that.get("pr").asLiteral().getFloat()); String shortUri=uri.substring(baseUri.length()); System.out.print(shortUri); System.out.print("\t"); System.out.print(eye); System.out.print("\t"); System.out.print(pr); System.out.println(); } } if (j!=rowCount) { throw new Exception("Expected to get "+rowCount+" results but really got "+j+" results"); } }
public static void main( String[] args ) { // load some data that uses RDFS Model data = FileManager.get().loadModel("file:data/input/turtle/ex6-data.ttl"); Reasoner reasoner = ReasonerRegistry.getRDFSReasoner(); reasoner.setParameter(ReasonerVocabulary.PROPsetRDFSLevel, ReasonerVocabulary.RDFS_DEFAULT); InfModel infmodel = ModelFactory.createInfModel(reasoner, data ); Resource theFirmNode = infmodel.createResource("http://www.example.org/example#TheFirm"); String queryString = "SELECT ?s ?z WHERE { ?s <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> ?z }" ; QuerySolutionMap initialBinding = new QuerySolutionMap(); initialBinding.add("s", theFirmNode ); Query query = QueryFactory.create(queryString) ; QueryExecution qexec = QueryExecutionFactory.create(query, infmodel, initialBinding) ; QueryExecUtils.executeQuery(qexec); System.out.println( "\n----------\ndone" ); }
protected abstract QuerySolutionMap getQuerySolutionMap(T t);