private List<Statement> expandSubClasses(Model model){ List<Statement> stmts = new ArrayList<Statement>(); String sparql = "PREFIX rdfs: <" + RDFS.getURI() + ">" + "SELECT DISTINCT ?class ?synonym " + "WHERE { " + "?class rdfs:subClassOf+ ?subClass . " + "?subClass <" + synonym + "> ?synonym" + "}"; Query query = QueryFactory.create(sparql, Syntax.syntaxARQ); QueryExecution queryExecution = QueryExecutionFactory.create(query, model); ResultSet resultSet = queryExecution.execSelect(); resultSet.forEachRemaining(querySolution -> { stmts.add(new StatementImpl(querySolution.getResource("class"), synonym, querySolution.getLiteral("synonym"))); }); return stmts; }
private List<Statement> expandSubProperties(Model model){ List<Statement> stmts = new ArrayList<Statement>(); String sparql = "PREFIX rdfs: <" + RDFS.getURI() + ">" + "SELECT DISTINCT ?property ?synonym " + "WHERE { " + "?property rdfs:subPropertyOf+ ?subProperty . " + "?subProperty <" + synonym + "> ?synonym" + "}"; Query query = QueryFactory.create(sparql, Syntax.syntaxARQ); QueryExecution queryExecution = QueryExecutionFactory.create(query, model); ResultSet resultSet = queryExecution.execSelect(); resultSet.forEachRemaining(querySolution -> { stmts.add(new StatementImpl(querySolution.getResource("property"), synonym, querySolution.getLiteral("synonym"))); }); return stmts; }
private void runQueryButtonActionPerformed(java.awt.event.ActionEvent evt) {// GEN-FIRST:event_runQueryButtonActionPerformed List<String> lines = null; try { lines = Files.readAllLines(queryPath); } catch (IOException ex) { Logger.getLogger(Ontogui.class.getName()).log(Level.SEVERE, null, ex); } String queryString = ""; for (String line : lines) { queryString += line + System.lineSeparator(); } Query query = QueryFactory.create(queryString, Syntax.syntaxARQ); queryResultArea.setText("Starting query: " + queryPath.toFile().getName() + "\n"); Thread t = new Thread(new QueryProcessor(query, new QueryAreaStream( queryResultArea), dataset, checkbox1.getState())); t.start(); }
private void runSmellAnalysisButtonActionPerformed( java.awt.event.ActionEvent evt) {// GEN-FIRST:event_runSmellAnalysisButtonActionPerformed String filename = smellName; File smellFile = new File(System.getProperty("user.dir") + "/sparql/smells/" + filename.replaceAll(" ", "") + ".sparql"); List<String> lines = null; try { lines = Files.readAllLines(smellFile.toPath()); } catch (IOException ex) { Logger.getLogger(Ontogui.class.getName()).log(Level.SEVERE, null, ex); } String queryString = ""; for (String line : lines) { queryString += line + System.lineSeparator(); } Query query = QueryFactory.create(queryString, Syntax.syntaxARQ); queryResultArea.setText("Starting analysis: " + smellName + "\n"); Thread t = new Thread(new QueryProcessor(query, new QueryAreaStream( queryResultArea), dataset, checkbox1.getState())); t.start(); }
private void runMetricsButtonActionPerformed(java.awt.event.ActionEvent evt) {// GEN-FIRST:event_runMetricsButtonActionPerformed String folder = metricName.split(":")[0].toLowerCase(); String filename = metricName.split(":")[1]; File metricFile = new File(System.getProperty("user.dir") + "/sparql/metrics/" + folder + "/" + filename + ".sparql"); List<String> lines = null; try { lines = Files.readAllLines(metricFile.toPath()); } catch (IOException ex) { Logger.getLogger(Ontogui.class.getName()).log(Level.SEVERE, null, ex); } String queryString = ""; for (String line : lines) { queryString += line + System.lineSeparator(); } Query query = QueryFactory.create(queryString, Syntax.syntaxARQ); queryResultArea.setText("Starting analysis:" + metricName + "\n"); System.err.println(checkbox1.isEnabled()); Thread t = new Thread(new QueryProcessor(query, new QueryAreaStream( queryResultArea), dataset, checkbox1.getState())); t.start(); }
public String executeQuery() { Query query = QueryFactory.create(this.getQuery(), Syntax.syntaxARQ); Op op = Algebra.compile(query); try { if(new String("internal").equals(this.service)) { this.results = jenaService.runLocalOp(op); } else if (new String("external").equals(this.service)) { this.results = jenaService.runExternalOp(op); } } catch (Exception e) { e.printStackTrace(); } finally { } return ("success"); }
public static void getJob(GraphDatabaseService njgraph) { NeoGraph graph = new NeoGraph(njgraph); Model njmodel = ModelFactory.createModelForGraph(graph); ST descJob = TemplateLoader.getQueriesGroup().getInstanceOf("getGraph"); String queryASString = Constants.QUERY_PREFIX+ descJob.render(); Query query = QueryFactory.create(queryASString, Syntax.syntaxSPARQL_11); QueryExecution qexec = QueryExecutionFactory.create(query, njmodel); ResultSet res = qexec.execSelect(); int count=0; while(res.hasNext()){ //System.out.println("in while"+count); QuerySolution sol = res.next(); System.out.println(sol.get("?Z")); count++; } //log.info("Record fetched:"+ count); System.out.println("Record fetched:"+ count); }
private Model getCBD(String uri, String endpoint, String graph) { String query = "DESCRIBE <" + uri + ">"; System.out.println(query); Query sparqlQuery = QueryFactory.create(query, Syntax.syntaxARQ); QueryExecution qexec = QueryExecutionFactory.sparqlService( endpoint, sparqlQuery, graph); Model m2; try { m2 = qexec.execDescribe(); } catch (Exception e) { // the result vector is too large: create empty model m2 = ModelFactory.createDefaultModel(); } return m2; }
private ArrayList<Dataset> fetchDatasets() { String query = "select * where { ?x a <http://rdfs.org/ns/void#Dataset> . " + "?x <http://rdfs.org/ns/void#uriSpace> ?urispace . " + "?x <http://www.w3.org/2000/01/rdf-schema#label> ?label } " + "ORDER BY ?label"; Query sparqlQuery = QueryFactory.create(query, Syntax.syntaxARQ); QueryExecution qexec = QueryExecutionFactory.sparqlService(endpoint, sparqlQuery, graph); ResultSet results = qexec.execSelect(); ArrayList<Dataset> arr = new ArrayList<>(); while (results.hasNext()) { QuerySolution n = results.next(); Resource v = n.getResource("x"); Literal label = n.getLiteral("label"); try { // FIXME wrong URIs (as literals) give an exception RDFNode uri = n.get("urispace"); arr.add(new Dataset(v.getURI(), label.getString(), uri.toString())); } catch (ClassCastException e) { } } return arr; }
private ArrayList<FrameworkVersion> fetchFrameworks() { String query = "select * where { ?v a <http://www.linklion.org/ontology#LDFrameworkVersion> . ?f a <http://www.linklion.org/ontology#LDFramework> . " + "?f <http://usefulinc.com/ns/doap#release> ?v . ?f <http://xmlns.com/foaf/0.1/homepage> ?url . " + "?v <http://www.w3.org/2000/01/rdf-schema#label> ?label . ?v <http://usefulinc.com/ns/doap#revision> ?ver } " + "ORDER BY ?label"; Query sparqlQuery = QueryFactory.create(query, Syntax.syntaxARQ); QueryExecution qexec = QueryExecutionFactory.sparqlService(endpoint, sparqlQuery, graph); ResultSet results = qexec.execSelect(); ArrayList<FrameworkVersion> arr = new ArrayList<>(); while (results.hasNext()) { QuerySolution n = results.next(); Resource v = n.getResource("v"); Resource url = n.getResource("url"); Literal label = n.getLiteral("label"); Literal ver = n.getLiteral("ver"); arr.add(new FrameworkVersion(v.getURI(), label.getString(), url.getURI(), ver.getString())); } return arr; }
private ArrayList<Algorithm> fetchAlgorithms() { String query = "select * where { ?x a <http://www.linklion.org/ontology#Algorithm> . " + "?x <http://xmlns.com/foaf/0.1/homepage> ?url . " + "?x <http://www.w3.org/2000/01/rdf-schema#label> ?label } " + "ORDER BY ?label"; Query sparqlQuery = QueryFactory.create(query, Syntax.syntaxARQ); QueryExecution qexec = QueryExecutionFactory.sparqlService(endpoint, sparqlQuery, graph); ResultSet results = qexec.execSelect(); ArrayList<Algorithm> arr = new ArrayList<>(); while (results.hasNext()) { QuerySolution n = results.next(); Resource v = n.getResource("x"); Resource url = n.getResource("url"); Literal label = n.getLiteral("label"); arr.add(new Algorithm(v.getURI(), label.getString(), url.getURI())); } return arr; }
private HashMap<String, HashMap<String, String>> fetchDistinctMappingDataset() { String query = "select distinct * where {" + "?m a <http://www.linklion.org/ontology#Mapping> ." + "?m <http://www.linklion.org/ontology#hasSource> ?s ." + "?m <http://www.linklion.org/ontology#hasTarget> ?t ." + " } ORDER BY ?m ?s ?t"; Query sparqlQuery = QueryFactory.create(query, Syntax.syntaxARQ); QueryExecution qexec = QueryExecutionFactory.sparqlService(endpoint, sparqlQuery, graph); ResultSet results = qexec.execSelect(); HashMap<String, HashMap<String, String>> map = new HashMap<String, HashMap<String, String>>(); while (results.hasNext()) { HashMap<String, String> st = new HashMap<String, String>(); QuerySolution nextLine = results.next(); Resource mapping = nextLine.getResource("m"); Resource s = nextLine.getResource("s"); Resource t = nextLine.getResource("t"); st.put(s.getURI(), t.getURI()); map.put(mapping.getURI(), st); } return map; }
private HashMap<String, Integer> linksPerMapping() { HashMap<String, Integer> lpm = new HashMap<>(); String query = "select ?x (count(?l) as ?links) where { " + "?x a <http://www.linklion.org/ontology#Mapping> . " + "?l <http://www.w3.org/ns/prov#wasDerivedFrom> ?x " + "} GROUP BY ?x"; Query sparqlQuery = QueryFactory.create(query, Syntax.syntaxARQ); QueryExecution qexec = QueryExecutionFactory.sparqlService(endpoint, sparqlQuery, graph); ResultSet results = qexec.execSelect(); while (results.hasNext()) { QuerySolution n = results.next(); Resource x = n.getResource("x"); Literal links = n.getLiteral("links"); lpm.put(x.getURI(), links.getInt()); } return lpm; }
private List<Statement> getUsage(Property property, Model model){ List<Statement> stmts = new ArrayList<Statement>(); String sparql = "PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> " + "PREFIX owl: <http://www.w3.org/2002/07/owl#> " + "SELECT DISTINCT ?concept " + "WHERE{" + " {<" + property.getURI() + "> rdfs:domain ?concept} " + " UNION " + " { " + " ?concept rdfs:subClassOf|owl:equivalentClass ?restriction . " + " ?restriction a owl:Restriction; " + " owl:onProperty <" + property.getURI() + "> " + " } " + "}"; Query query = QueryFactory.create(sparql, Syntax.syntaxARQ); QueryExecution queryExecution = QueryExecutionFactory.create(query, model); ResultSet resultSet = queryExecution.execSelect(); while(resultSet.hasNext()){ QuerySolution querySolution = resultSet.next(); Resource concept = querySolution.getResource("concept"); stmts.add(new StatementImpl(property, usage, concept)); } return stmts; }
private List<Statement> getUsage(OntClass ontClass, Model model){ List<Statement> stmts = new ArrayList<Statement>(); try{ String sparql = "PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> " + "PREFIX owl: <http://www.w3.org/2002/07/owl#> " + "SELECT DISTINCT ?concept " + "WHERE{" + " {?prop rdfs:range <" + ontClass.getURI() + ">; " + " rdfs:domain ?concept" + " }" + " UNION " + " { " + " ?concept rdfs:subClassOf|owl:equivalentClass ?restriction . " + " ?restriction a owl:Restriction; " + " ?p <" + ontClass.getURI() + "> " + " } " + "}"; Query query = QueryFactory.create(sparql, Syntax.syntaxARQ); QueryExecution queryExecution = QueryExecutionFactory.create(query, model); ResultSet resultSet = queryExecution.execSelect(); while(resultSet.hasNext()){ QuerySolution querySolution = resultSet.next(); Resource concept = querySolution.getResource("concept"); stmts.add(new StatementImpl(ontClass, usage, concept)); } } catch(Exception e){ log.error(e.getMessage(), e); } return stmts; }
private void getDistantEntities(){ File metricFile = new File( System.getProperty("user.dir") + "/sparql/smells/SemanticallyDistantEntity.sparql"); List<String> lines = null; try { lines = Files.readAllLines(metricFile.toPath()); } catch (IOException ex) { Logger.getLogger(Ontogui.class.getName()).log(Level.SEVERE, null, ex); } String queryString = ""; for (String line : lines) { queryString += line + System.lineSeparator(); } data.begin(ReadWrite.READ); List<QuerySolution> rlist = null; Query query = QueryFactory.create(queryString, Syntax.syntaxARQ); try (QueryExecution qe = QueryExecutionFactory.create(query, data)) { ResultSet results = qe.execSelect(); rlist = ResultSetFormatter.toList(results); } catch (Exception e) { JOptionPane.showMessageDialog(null, "Writting to textarea failed!"); e.printStackTrace(); } instances = new String[rlist.size()]; for(int j = 0; j < rlist.size(); j++){ instances[j] = rlist.get(j).getLiteral("entityname").getString(); } data.end(); }
private void getDistantTypes(){ File metricFile = new File( System.getProperty("user.dir") + "/sparql/smells/SemanticallyDistantType.sparql"); List<String> lines = null; try { lines = Files.readAllLines(metricFile.toPath()); } catch (IOException ex) { Logger.getLogger(Ontogui.class.getName()).log(Level.SEVERE, null, ex); } String queryString = ""; for (String line : lines) { queryString += line + System.lineSeparator(); } data.begin(ReadWrite.READ); List<QuerySolution> rlist = null; Query query = QueryFactory.create(queryString, Syntax.syntaxARQ); try (QueryExecution qe = QueryExecutionFactory.create(query, data)) { ResultSet results = qe.execSelect(); rlist = ResultSetFormatter.toList(results); } catch (Exception e) { JOptionPane.showMessageDialog(null, "Writting to textarea failed!"); e.printStackTrace(); } subtypes = new String[rlist.size()]; for(int j = 0; j < rlist.size(); j++){ subtypes[j] = rlist.get(j).getLiteral("typename").getString(); } data.end(); }
@Override public Query parse() throws SyntaxError { try { return new SparqlQuery(QueryFactory.create(qstr, Syntax.syntaxARQ), isHybrid(req)); } catch (final Exception exception) { throw new SyntaxError(exception); } }
/** * Perform SPARQL query against an endpoint on a given graph. * * @param query * @param endpoint * @param graph * @return */ public static ResultSet sparql(String query, String endpoint, String graph) { Query sparqlQuery = QueryFactory.create(query, Syntax.syntaxARQ); QueryExecution qexec = QueryExecutionFactory.sparqlService(endpoint, sparqlQuery, graph); return qexec.execSelect(); }
private ResultSet executeSelect(String query) { dataset.begin(ReadWrite.READ); try { ResultSet rs = QueryExecutionFactory.create(QueryFactory.create(query, Syntax.syntaxARQ), dataset).execSelect(); return rs; } finally { dataset.end(); } }
public String getNumFrameworks() { String query = "select (count(*) as ?c) where {" + "?x a <http://www.linklion.org/ontology#LDFramework> ." + "}"; Query sparqlQuery = QueryFactory.create(query, Syntax.syntaxARQ); QueryExecution qexec = QueryExecutionFactory.sparqlService(endpoint, sparqlQuery, graph); ResultSet results = qexec.execSelect(); numFrameworks = results.next().getLiteral("c").getInt(); return NumberFormat.getInstance().format(numFrameworks); }
public String getNumMappings() { String query = "select (count(*) as ?c) where {" + "?x a <http://www.linklion.org/ontology#Mapping> ." + "}"; Query sparqlQuery = QueryFactory.create(query, Syntax.syntaxARQ); QueryExecution qexec = QueryExecutionFactory.sparqlService(endpoint, sparqlQuery, graph); ResultSet results = qexec.execSelect(); numMappings = results.next().getLiteral("c").getInt(); return NumberFormat.getInstance().format(numMappings); }
public String getNumDatasets() { String query = "select (count(*) as ?c) where {" + "?x a <http://rdfs.org/ns/void#Dataset> ." + "?x <http://rdfs.org/ns/void#uriSpace> ?uri ." + "FILTER isURI(?uri)" + "}"; Query sparqlQuery = QueryFactory.create(query, Syntax.syntaxARQ); QueryExecution qexec = QueryExecutionFactory.sparqlService(endpoint, sparqlQuery, graph); ResultSet results = qexec.execSelect(); numDatasets = results.next().getLiteral("c").getInt(); return NumberFormat.getInstance().format(numDatasets); }
public String getNumLinks() { String query = "select (count(*) as ?c) where {" + "?x a <http://www.linklion.org/ontology#Link> ." + "}"; Query sparqlQuery = QueryFactory.create(query, Syntax.syntaxARQ); QueryExecution qexec = QueryExecutionFactory.sparqlService(endpoint, sparqlQuery, graph); ResultSet results = qexec.execSelect(); numLinks = results.next().getLiteral("c").getInt(); return NumberFormat.getInstance().format(numLinks); }
public String getNumLinkTypes() { String query = "select (count(distinct ?o) as ?c) where {" + "?x a <http://www.linklion.org/ontology#Link> ." + "?x <http://www.w3.org/1999/02/22-rdf-syntax-ns#predicate> ?o ." + "}"; Query sparqlQuery = QueryFactory.create(query, Syntax.syntaxARQ); QueryExecution qexec = QueryExecutionFactory.sparqlService(endpoint, sparqlQuery, graph); ResultSet results = qexec.execSelect(); numLinkTypes = results.next().getLiteral("c").getInt(); return NumberFormat.getInstance().format(numLinkTypes); }
public String getNumTriples() { String query = "select (count(*) as ?c) where {" + "?s ?p ?o ." + "}"; Query sparqlQuery = QueryFactory.create(query, Syntax.syntaxARQ); QueryExecution qexec = QueryExecutionFactory.sparqlService(endpoint, sparqlQuery, graph); ResultSet results = qexec.execSelect(); numTriples = results.next().getLiteral("c").getInt(); return NumberFormat.getInstance().format(numTriples); }
private ArrayList<RsDataset> fetchMappingsPerDataset() { System.out.println("fetchMappingsPerDataset()"); String query = "select ?ds ?label ?uri (count(?m) as ?mcount) where " + " { " + " ?ds <http://www.w3.org/2000/01/rdf-schema#label> ?label . " + " ?ds <http://rdfs.org/ns/void#uriSpace> ?uri . " + " ?ds a <http://rdfs.org/ns/void#Dataset> . " + " { ?m <http://www.linklion.org/ontology#hasSource> ?ds . } " + " UNION " + " { ?m <http://www.linklion.org/ontology#hasTarget> ?ds . } " + " FILTER isURI(?uri) " + " } GROUP BY ?ds ?label ?uri ORDER BY desc(count(?m))"; Query sparqlQuery = QueryFactory.create(query, Syntax.syntaxARQ); QueryExecution qexec = QueryExecutionFactory.sparqlService(endpoint, sparqlQuery, graph); ResultSet results = qexec.execSelect(); ArrayList<RsDataset> arr = new ArrayList<>(); while (results.hasNext()) { QuerySolution nextLine = results.next(); Literal mCount = nextLine.getLiteral("mcount"); String mString = mCount.toString(); int number = Integer.parseInt(mString.substring(0, mString.indexOf("^"))); Resource llUri = nextLine.getResource("ds"); Literal label = nextLine.getLiteral("label"); RDFNode uri; try { // FIXME wrong URIs (as literals) give an exception uri = nextLine.get("uri"); arr.add(new RsDataset(uri.toString(), label.toString(), number, llUri.getURI())); } catch (ClassCastException e) { } } return arr; }
private ArrayList<Mapping> fetchMappings() { String query = "select * where {" + "?x a <http://www.linklion.org/ontology#Mapping> ." + "?x <http://www.linklion.org/ontology#hasSource> ?s ." + "?x <http://www.linklion.org/ontology#hasTarget> ?t ." + "?x <http://www.linklion.org/ontology#storedAt> ?store ." + "}"; // "?s <http://www.w3.org/2000/01/rdf-schema#label> ?src ." + // "?t <http://www.w3.org/2000/01/rdf-schema#label> ?tgt" + // "} ORDER BY ?src ?tgt"; Query sparqlQuery = QueryFactory.create(query, Syntax.syntaxARQ); QueryExecution qexec = QueryExecutionFactory.sparqlService(endpoint, sparqlQuery, graph); ResultSet results = qexec.execSelect(); ArrayList<Mapping> arr = new ArrayList<>(); while (results.hasNext()) { QuerySolution n = results.next(); Resource m = n.getResource("x"); // TODO restore after having reinstalled Virtuoso // Literal src = n.getLiteral("src"); // Literal tgt = n.getLiteral("tgt"); Resource store = n.getResource("store"); String s = n.getResource("s").getURI(); String t = n.getResource("t").getURI(); String src1 = s.substring(s.lastIndexOf("/")+1); String tgt1 = t.substring(t.lastIndexOf("/")+1); arr.add(new Mapping(m.getURI(), src1, tgt1, store.getURI())); } return arr; }
public ResultSetRewindable runLocalOp(Op op) { long startTime = System.currentTimeMillis(); Query q = OpAsQuery.asQuery(op); logger.debug("Running query on the local dataset" + ":" // + "\n\nORIGINAL OP:\n" // + op.toString() // + "\n\nOPTIMIZED OP\n" // + Algebra.optimize(op) + "\n\nSPARQL QUERY\n" + q.toString(Syntax.syntaxARQ)); try { Integer key = op.toString().hashCode(); if (cache.containsKey(key)) { logger.debug("The query was cached."); return cache.get(key); } ds.begin(ReadWrite.READ); QueryIterator qIter = Algebra.exec(op, this.ds); List<String> vars = new LinkedList<String>(); for (Var var : OpAsQuery.asQuery(op).getProjectVars()) { vars.add(var.getVarName()); } ResultSetRewindable results = ResultSetFactory .copyResults(ResultSetFactory.create(qIter, vars)); long endTime = System.currentTimeMillis(); String timeString = new SimpleDateFormat("mm:ss:SSS") .format(new Date(endTime - startTime)); // cache disabled // cache.put(op.toString().hashCode(), results); logger.info("The query returned after " + timeString + " with " + results.size() + " results"); return results; } finally { ds.end(); } }
public ResultSetRewindable runExternalOp(Op op) { long startTime = System.currentTimeMillis(); Query q = OpAsQuery.asQuery(op); logger.info("Running query on the external dataset:\n\n" + "SPARQL QUERY\n" + q.toString(Syntax.syntaxARQ)); QueryExecution qexec = QueryExecutionFactory.sparqlService( Constants.getSparqlService(), q); ResultSetRewindable results = ResultSetFactory.copyResults(qexec .execSelect()); long endTime = System.currentTimeMillis(); String timeString = new SimpleDateFormat("mm:ss:SSS").format(new Date( endTime - startTime)); logger.info("The query returned after " + timeString + " with " + results.size() + " results"); qexec.close(); return results; }
public com.hp.hpl.jena.query.ResultSet consultarAvanzada(String consulta) { query = QueryFactory.create(consulta, Syntax.syntaxARQ); qexec = QueryExecutionFactory.create(query, model); com.hp.hpl.jena.query.ResultSet results = qexec.execSelect(); return results; }
/** * Converts an OWL class expression into a SPARQL query with * <code>rootVariable</code> as projection variable. It's possible to * return it as COUNT query if wanted. * * @param ce the OWL class expression to convert * @param rootVariable the name of the projection variable in the SPARQL * query * @param countQuery whether to return a SELECT (COUNT(?var) as ?cnt) query * @return the SPARQL query */ public Query asQuery(String rootVariable, OWLClassExpression ce, boolean countQuery){ String queryString = convert(ce, rootVariable, countQuery); return QueryFactory.create(queryString, Syntax.syntaxARQ); }
/** * Converts an OWL axiom into a SPARQL query with <code>targetSubjectVariable</code> * as single projection variable. * @param axiom the OWL axiom to convert * @param targetSubjectVariable the name of the subject projection variable in the SPARQL * query * @param targetObjectVariable the name of the object projection variable in the SPARQL * query * @return the SPARQL query */ public Query asQuery(OWLAxiom axiom, String targetSubjectVariable, String targetObjectVariable){ String queryString = convert(axiom, targetSubjectVariable, targetObjectVariable); return QueryFactory.create(queryString, Syntax.syntaxARQ); }