/** * Positive test. */ @Test public void range() { final int offset = 120; final int size = 24; final ResultSetRewindable cut = new PagedResultSet(executeQuery(), size, offset); final ResultSet resultSet = executeQuery(); while (cut.hasNext()) { cut.next(); } assertEquals(size, cut.size()); cut.reset(); for (int i = 0; i < offset; i++) { resultSet.next(); } for (int i = 0; i < size; i++) { assertEquals(resultSet.nextBinding(), cut.nextBinding()); } assertFalse(cut.hasNext()); }
@PostConstruct public void constructor() { // create the dataset for the copy of vivo String vivoDirectory = "/resources/jena/vivo"; String realVivoDirectory = context.getRealPath(vivoDirectory); logger.info("The vivo dataset is stored in " + realVivoDirectory); this.ds = TDBFactory.createDataset(realVivoDirectory); this.m = ds.getDefaultModel(); this.cache = new HashMap<Integer, ResultSetRewindable>(); }
/** * If the required size (+offset) is greater than the underlying resultset size then only the available rows will be returned. */ @Test public void sizeGreaterThanEffectiveSize() { final ResultSetRewindable cut = new PagedResultSet(executeQuery(), 10, (int)(model.size() - 5)); while (cut.hasNext()) { cut.next(); } assertEquals(5, cut.size()); cut.reset(); }
/** * In case the required size + offset denotes an unavailable set of results, then an exception will be thrown. */ @Test public void outOfAvailableRange() { final ResultSetRewindable cut = new PagedResultSet(executeQuery(), 10, (int)(model.size() + 1)); while (cut.hasNext()) { cut.next(); } assertEquals(0, cut.size()); cut.reset(); assertEquals(0, cut.size()); }
/** * With a size equals to 0, the {@link PagedResultSet} will be empty, * but at least one iteration step needs to be done on the wrapped {@link ResultSet}. */ @Test public void zeroSize() { final ResultSet rs = mock(ResultSet.class); when(rs.hasNext()).thenReturn(true); final ResultSetRewindable cut = new PagedResultSet(rs, 0, 10); verify(rs).next(); assertFalse(cut.hasNext()); }
private QueryInfo query(String epURL, String operation) { QueryInfo info = new QueryInfo(); info.setURL(epURL); info.setOperation(operation); String queryString = query.replaceAll("%%s", "<"+_ep.getUri()+">"); HashSet<String> voidAset= new HashSet<String>(); ArrayList<CharSequence> voidA = new ArrayList<CharSequence>(); info.setResults(voidA); // initializing queryExecution factory with remote service. QueryExecution qexec = null; try { qexec = QueryManager.getExecution(epURL, queryString); boolean results = false; ResultSet resSet = qexec.execSelect(); ResultSetRewindable reswind = ResultSetFactory.makeRewindable(resSet); while(reswind.hasNext()){ RDFNode dataset = reswind.next().get("ds"); voidAset.add(dataset.toString()); } voidA.addAll(voidAset); log.info("Found {} results",reswind.getRowNumber()); } catch (Exception e1) { info.setException(ExceptionHandler.logAndtoString(e1)); log.debug("[EXEC] SPARQL query to "+epURL+" for "+_epURI, e1); } finally { if(qexec!=null)qexec.close(); } return info; }
public SelectionResult runSelectionQuery(Op op) { SelectionResult result = new SelectionResult(); ResultSetRewindable results = jenaService.runExternalOp(op); results.reset(); for (; results.hasNext();) { QuerySolution soln = results.next(); RDFNode predicate = soln.get("predicate"); RDFNode object = soln.get("value"); Literal literal = object.asLiteral(); if (!literal.getString().isEmpty()) { result.addResult(predicate, literal); } } return result; }
public ResultSetRewindable runLocalOp(Op op) { long startTime = System.currentTimeMillis(); Query q = OpAsQuery.asQuery(op); logger.debug("Running query on the local dataset" + ":" // + "\n\nORIGINAL OP:\n" // + op.toString() // + "\n\nOPTIMIZED OP\n" // + Algebra.optimize(op) + "\n\nSPARQL QUERY\n" + q.toString(Syntax.syntaxARQ)); try { Integer key = op.toString().hashCode(); if (cache.containsKey(key)) { logger.debug("The query was cached."); return cache.get(key); } ds.begin(ReadWrite.READ); QueryIterator qIter = Algebra.exec(op, this.ds); List<String> vars = new LinkedList<String>(); for (Var var : OpAsQuery.asQuery(op).getProjectVars()) { vars.add(var.getVarName()); } ResultSetRewindable results = ResultSetFactory .copyResults(ResultSetFactory.create(qIter, vars)); long endTime = System.currentTimeMillis(); String timeString = new SimpleDateFormat("mm:ss:SSS") .format(new Date(endTime - startTime)); // cache disabled // cache.put(op.toString().hashCode(), results); logger.info("The query returned after " + timeString + " with " + results.size() + " results"); return results; } finally { ds.end(); } }
public ResultSetRewindable runExternalOp(Op op) { long startTime = System.currentTimeMillis(); Query q = OpAsQuery.asQuery(op); logger.info("Running query on the external dataset:\n\n" + "SPARQL QUERY\n" + q.toString(Syntax.syntaxARQ)); QueryExecution qexec = QueryExecutionFactory.sparqlService( Constants.getSparqlService(), q); ResultSetRewindable results = ResultSetFactory.copyResults(qexec .execSelect()); long endTime = System.currentTimeMillis(); String timeString = new SimpleDateFormat("mm:ss:SSS").format(new Date( endTime - startTime)); logger.info("The query returned after " + timeString + " with " + results.size() + " results"); qexec.close(); return results; }
/** * In case the decorated {@link ResultSet} is null then an empty wrapper will be returned. */ @Test public void nullResultSet() { final ResultSetRewindable cut = new PagedResultSet(null, 10, 0); assertFalse(cut.hasNext()); }