Java 类com.hp.hpl.jena.query.ARQ 实例源码

项目:aliada-tool    文件:AliadaRDFStoreDAO.java   
public String crm2AliadaClass(final String crmClass) {
    final Query query = QueryFactory.create(CRM_TO_ALIADA_CLASS_P1 + crmClass + CRM_TO_ALIADA_CLASS_P2);
    ARQ.getContext().setTrue(ARQ.useSAX);

    QueryExecution execution = null;
    try {
        execution = QueryExecutionFactory.sparqlService("http://172.25.5.15:8890/sparql", query);
        execution.setTimeout(2000, 5000);
        final ResultSet results = execution.execSelect();
        //Iterating over the SPARQL Query results
        while (results.hasNext()) {
            QuerySolution soln = results.nextSolution();
            //Printing DBpedia entries' abstract.
            System.out.println(soln.get("?abstract"));   
            return soln.get("?abstract").asResource().getURI();
        }
        return "NULL";
       } finally {
        try {
            execution.close();
        } catch (Exception exception) {
            // TODO: handle exception
        }
       }

}
项目:com.inova8.remediator    文件:EvaluatorDispatch.java   
@Override
public void visit(OpService opService) {
    Table input = pop();
    if (!input.isEmpty()) {

        TableFactory tableFactory = new TableFactory();
        ArrayList<Var>  vars= new ArrayList<Var>( OpVars.mentionedVars(opService));
        //Table inputVars = TableFactory.create(input,vars);
        Table inputVars = new TableFiltered(input,vars);

        Op op1 = OpSequence.create(opService.getSubOp(),
                OpTable.create(inputVars));
        opService = new OpService(opService.getService(), op1,
                opService.getSilent());
    }
    QueryIterator qIter = Service.exec(opService, ARQ.getContext());
    Table table = TableFactory.create(qIter);
    push(table);
}
项目:r2rml-kit    文件:D2RQOptions.java   
public static Context getContext(boolean fastMode) {
    Context result = ARQ.getContext().copy();
    String defaultValue = fastMode ? "true" : "false";
    result.set(D2RQOptions.MULTIPLEX_QUERIES, defaultValue);
    result.set(D2RQOptions.FILTER_TO_SQL, defaultValue);
    return result;
}
项目:exist-sparql    文件:TDBRDFIndex.java   
@Override
    public void configure(BrokerPool pool, Path dataDir, Element config) throws DatabaseConfigurationException {
        super.configure(pool, dataDir, config);

    if (LOG.isDebugEnabled()) {
            LOG.debug("Configuring SPARQL index");
    }

        /*
         * Some configurables.
         */
        NamedNodeMap attributes = config.getAttributes();
        for (int i = 0; i < attributes.getLength(); i++) {
            Attr attr = (Attr) attributes.item(i);
            if (attr.getName().equals(CFG_FILE_MODE)) {
                if (attr.getValue().equals(CFG_FILE_MODE_MAPPED)) {
                    SystemTDB.setFileMode(FileMode.mapped);
                } else if (attr.getValue().equals(CFG_FILE_MODE_DIRECT)) {
                    SystemTDB.setFileMode(FileMode.direct);
                }
            } else if (attr.getName().equals(CFG_LOG_EXEC)) {
                if (attr.getValue().equals(CFG_LOG_EXEC_TRUE)) {
                    ARQ.isTrue(ARQ.symLogExec);
                }
            }
        }

//        TDB.transactionJournalWriteBlockMode
    }
项目:com.inova8.remediator    文件:Eval.java   
private static QueryIterator executeBGP(BasicPattern pattern, QueryIterator input, ExecutionContext execCxt) {
    if (pattern.isEmpty())
        return input ;

    boolean hideBNodeVars = execCxt.getContext().isTrue(ARQ.hideNonDistiguishedVariables) ;

    StageGenerator gen = StageBuilder.executeInline ;
    QueryIterator qIter = gen.execute(pattern, input, execCxt) ;

    // Remove non-distinguished variables here.
    // Project out only named variables.
    if (hideBNodeVars)
        qIter = new QueryIterDistinguishedVars(qIter, execCxt) ;
    return qIter ;
}
项目:earthcube-EAGER    文件:Endpoints.java   
protected ResultSet queryEndpoint ( String endpoint, String sparqlQueryString ) 
{
    Query query = QueryFactory.create(sparqlQueryString);
    ARQ.getContext().setTrue(ARQ.useSAX);
    QueryExecution qexec = QueryExecutionFactory.sparqlService(endpoint, query);
    ResultSet results = qexec.execSelect();                                       
    qexec.close();
    return results; 
}
项目:quack    文件:tdbindexdump.java   
protected tdbindexdump(String[] argv) {
    super(argv) ;
    super.modVersion.addClass(ARQ.class) ;
    super.modVersion.addClass(RIOT.class) ;
    super.modVersion.addClass(TDB.class) ;
    super.add(argLocation) ;
}
项目:quack    文件:tdbindexcopy.java   
protected tdbindexcopy(String[] argv) {
    super(argv) ;
    super.modVersion.addClass(ARQ.class) ;
    super.modVersion.addClass(RIOT.class) ;
    super.modVersion.addClass(TDB.class) ;
    super.add(argLocation) ;
}
项目:quack    文件:QueryEngineMain2.java   
@Override
protected Op modifyOp(Op op)
{ 
    if ( context.isFalse(ARQ.optimization) )
        return minimalModifyOp(op) ;
    return Algebra.optimize(op, super.context) ;
}
项目:quack    文件:QueryEngineFactoryQuackTDB.java   
@Override
public Plan create(Op op, DatasetGraph dataset, Binding input, Context context)
{
    if ( context == null )
        context = ARQ.getContext().copy() ;
    DatasetGraphTDB dsgtdb = dsgToQuery(dataset) ;
    setup(dsgtdb, context) ;
    // This is the route for op execution, not from a Query.
    QueryEngineQuackTDB engine = new QueryEngineQuackTDB(op, dsgtdb, input, context) ;
    return engine.getPlan() ;
}
项目:quack    文件:Explain2.java   
public static void explain(ExplainCategory eCat, Object obj) {
    if ( active.contains(eCat) ) {
        Logger log = ARQ.getExecLogger() ;
        if ( log.isInfoEnabled() ) {
            output(log, eCat, String.valueOf(obj)) ;
        }
    }
}
项目:quack    文件:Explain2.java   
public static void explain(ExplainCategory eCat, String fmt, Object ... args) {
    if ( true ) {
        explain(ARQ.getExecLogger(), eCat, fmt, args) ;
        return ;
    }

    // Without logger.
    if ( active.contains(eCat) ) {
        String msg ;
        if ( args == null || args.length == 0 ) {
            msg = fmt ;
            if ( msg.endsWith("\n") )
                msg = fmt.substring(0, fmt.length()-1) ;
        }
        else
            msg = String.format(fmt, args) ;

        if ( msg.contains("\n") ) {
            String lines[] = msg.split("\n") ;
            output.println(startMarker + eCat.getlabel()) ;
            for (String line : lines)
                output.println(messageMarker + line) ;
            output.println(finishMarker + eCat.getlabel()) ;
        } else {
            output.println(msg) ;
        }
        output.flush() ;
    }
}
项目:quack    文件:Quack.java   
public static void setVerbose(boolean verbose) { 
    if ( verbose ) {
        // Force statistics output 
        LogCtl.enable(ARQ.logExecName) ;
        JOIN_EXPLAIN = true ;
        ARQ.setExecutionLogging(InfoLevel.ALL) ;
        Explain2.setActive(Quack.quackExec);
        Explain2.setActive(Quack.quackPlan);
    } else {
        ARQ.setExecutionLogging(InfoLevel.NONE) ;
        Explain2.remove(Quack.quackExec); 
        Explain2.remove(Quack.quackPlan);
        JOIN_EXPLAIN = false ;
    }
}
项目:D2RQ-Update    文件:D2RQRWQueryExecutionFactory.java   
/**
 * <p>Method that constructs a QueryExecution object from the given query, dataset, and context</p>
 * @param query - the Query object
 * @param dataset - the input dataset for the given model
 * @param context - the context for this query
 * @return a QueryExecution object
 */
private static QueryExecution make( Query query, Dataset dataset, Context context )
{
    query.validate() ;
    if ( context == null ) context = ARQ.getContext().copy();
    DatasetGraph dsg = null ;
    if ( dataset != null ) dsg = dataset.asDatasetGraph() ;
    QueryEngineFactory f = findFactory( query, dsg, context );
    if ( f == null )
    {
        ALog.warn( QueryExecutionFactory.class, "Failed to find a QueryEngineFactory for query: " + query );
        return null ;
    }
    return new QueryExecutionBase( query, dataset, context, f );
}
项目:KBox    文件:Server.java   
public Server(int port, String pagePath, String subDomain, long timeout, Model model, Listener listener) {
    this(port, pagePath, subDomain, model, listener);
    ARQ.getContext().set(ARQ.queryTimeout, Long.toString(timeout));
}
项目:c4a_data_repository    文件:D2RServer.java   
public void start() {
    startupError = true;
    if (config.isLocalMappingFile()) {
        this.dataset = new AutoReloadableDataset(loader,
                config.getLocalMappingFilename(),
                config.getAutoReloadMapping());
    } else {
        this.dataset = new AutoReloadableDataset(loader, null, false);
    }

    if (loader.getMapping().configuration().getUseAllOptimizations()) {
        log.info("Fast mode (all optimizations)");
    } else {
        log.info("Safe mode (launch using --fast to use all optimizations)");
    }

    // Set up a custom DescribeHandler that calls out to
    // {@link ResourceDescriber}
    DescribeHandlerRegistry.get().clear();
    DescribeHandlerRegistry.get().add(new DescribeHandlerFactory() {
        public DescribeHandler create() {
            return new DescribeHandler() {
                private BulkUpdateHandler adder;

                public void start(Model accumulateResultModel,
                        Context qContext) {
                    adder = accumulateResultModel.getGraph()
                            .getBulkUpdateHandler();
                }

                public void describe(Resource resource) {
                    log.info("DESCRIBE <" + resource + ">");
                    boolean outgoingTriplesOnly = isVocabularyResource(resource)
                            && !getConfig().getVocabularyIncludeInstances();
                    adder.add(new ResourceDescriber(getMapping(), resource
                            .asNode(), outgoingTriplesOnly,
                            Relation.NO_LIMIT, 
                            Math.round(config.getSPARQLTimeout())).description());
                }

                public void finish() {
                }
            };
        }
    });

    Registry.add(RDFServer.ServiceRegistryName,
            createJosekiServiceRegistry());

    if (config.getSPARQLTimeout() > 0) {
        ARQ.getContext().set(ARQ.queryTimeout, config.getSPARQLTimeout() * 1000);
    }

    startupError = false;
}
项目:OpenCollegeGraph    文件:D2RServer.java   
public void start() {
    startupError = true;
    if (config.isLocalMappingFile()) {
        this.dataset = new AutoReloadableDataset(loader,
                config.getLocalMappingFilename(),
                config.getAutoReloadMapping());
    } else {
        this.dataset = new AutoReloadableDataset(loader, null, false);
    }

    if (loader.getMapping().configuration().getUseAllOptimizations()) {
        log.info("Fast mode (all optimizations)");
    } else {
        log.info("Safe mode (launch using --fast to use all optimizations)");
    }

    // Set up a custom DescribeHandler that calls out to
    // {@link ResourceDescriber}
    DescribeHandlerRegistry.get().clear();
    DescribeHandlerRegistry.get().add(new DescribeHandlerFactory() {
        public DescribeHandler create() {
            return new DescribeHandler() {
                private BulkUpdateHandler adder;

                public void start(Model accumulateResultModel,
                        Context qContext) {
                    adder = accumulateResultModel.getGraph()
                            .getBulkUpdateHandler();
                }

                public void describe(Resource resource) {
                    log.info("DESCRIBE <" + resource + ">");
                    boolean outgoingTriplesOnly = isVocabularyResource(resource)
                            && !getConfig().getVocabularyIncludeInstances();
                    adder.add(new ResourceDescriber(getMapping(), resource
                            .asNode(), outgoingTriplesOnly,
                            Relation.NO_LIMIT, 
                            Math.round(config.getSPARQLTimeout())).description());
                }

                public void finish() {
                }
            };
        }
    });

    Registry.add(RDFServer.ServiceRegistryName,
            createJosekiServiceRegistry());

    if (config.getSPARQLTimeout() > 0) {
        ARQ.getContext().set(ARQ.queryTimeout, config.getSPARQLTimeout() * 1000);
    }

    startupError = false;
}
项目:aliada-tool    文件:RDFStoreDAO.java   
Context context() {
    final Context ctx = new Context(ARQ.getContext());
    ctx.put(Service.queryTimeout, 2000);
    return ctx; 
}
项目:VirtualSPARQLer    文件:D2RServer.java   
public void start() {
    startupError = true;
    if (config.isLocalMappingFile()) {
        this.dataset = new AutoReloadableDataset(loader,
                config.getLocalMappingFilename(),
                config.getAutoReloadMapping());
    } else {
        this.dataset = new AutoReloadableDataset(loader, null, false);
    }

    if (loader.getMapping().configuration().getUseAllOptimizations()) {
        log.info("Fast mode (all optimizations)");
    } else {
        log.info("Safe mode (launch using --fast to use all optimizations)");
    }

    // Set up a custom DescribeHandler that calls out to
    // {@link ResourceDescriber}
    DescribeHandlerRegistry.get().clear();
    DescribeHandlerRegistry.get().add(new DescribeHandlerFactory() {
        public DescribeHandler create() {
            return new DescribeHandler() {
                private BulkUpdateHandler adder;

                public void start(Model accumulateResultModel,
                        Context qContext) {
                    adder = accumulateResultModel.getGraph()
                            .getBulkUpdateHandler();
                }

                public void describe(Resource resource) {
                    log.info("DESCRIBE <" + resource + ">");
                    boolean outgoingTriplesOnly = isVocabularyResource(resource)
                            && !getConfig().getVocabularyIncludeInstances();
                    adder.add(new ResourceDescriber(getMapping(), resource
                            .asNode(), outgoingTriplesOnly,
                            Relation.NO_LIMIT, 
                            Math.round(config.getSPARQLTimeout())).description());
                }

                public void finish() {
                }
            };
        }
    });

    Registry.add(RDFServer.ServiceRegistryName,
            createJosekiServiceRegistry());

    if (config.getSPARQLTimeout() > 0) {
        ARQ.getContext().set(ARQ.queryTimeout, config.getSPARQLTimeout() * 1000);
    }

    startupError = false;
}
项目:2014-infotur    文件:RDFService.java   
/**
    * Busca informações extras (abstract) no DBPedia usando SPARQL.
    * 
    * @param nomeLocal
    * @return
    */
   public String buscarInformacoesExtrasDBPedia(String nome) {

String service = "http://dbpedia.org/sparql";
StringBuilder sparqlQueryBuilder = new StringBuilder();

// Remove acentos e substitui espacos em branco por underline.
nome = nome.replace(" ", "_");
nome = Normalizer.normalize(nome, Normalizer.Form.NFD);
nome = nome.replaceAll("[^\\p{ASCII}]", "");

sparqlQueryBuilder.append(" SELECT ?abstract ");
sparqlQueryBuilder.append(" WHERE {{ ");
sparqlQueryBuilder.append("   <http://dbpedia.org/resource/" + nome + "> ");
sparqlQueryBuilder.append("   <http://dbpedia.org/ontology/abstract> ");
sparqlQueryBuilder.append("   ?abstract . ");
sparqlQueryBuilder.append(" FILTER (");
sparqlQueryBuilder.append("  langMatches(lang(?abstract), 'pt') || langMatches(lang(?abstract), 'en')");
sparqlQueryBuilder.append(")}}");

Query query = QueryFactory.create(sparqlQueryBuilder.toString());
ARQ.getContext().setTrue(ARQ.useSAX);

QueryExecution qe = QueryExecutionFactory.sparqlService(service, query);

String resultado = "Nenhuma informação extra encontrada.";

try {
    ResultSet results = qe.execSelect();

    while (results.hasNext()) {
    QuerySolution sol = results.nextSolution();
    resultado = sol.get("?abstract").toString();
    }

} catch (Exception e) {
    e.printStackTrace();
} finally {
    qe.close();
}

resultado = "Informações do DBPedia: \n\n" + resultado;

return resultado;
   }
项目:quack    文件:OpEvaluator.java   
protected OpEvaluator(ExecutionContext execCxt)
{
    this.execCxt = execCxt ;
    this.dispatcher = new EvaluatorDispatch(this) ;
    this.hideBNodeVars = execCxt.getContext().isTrue(ARQ.hideNonDistiguishedVariables) ;
}
项目:quack    文件:Quack.java   
/** Set the OpExecutorFactory to be used by QueryEngineQuackFactory */
public static void setOpExecutorFactory(OpExecutorFactory factory) {
    setOpExecutorFactory(ARQ.getContext(), factory) ;
}
项目:quack    文件:TS_SPARQLTestsSPARQL11.java   
@BeforeClass static public void beforeClass() {
    ARQ.setNormalMode();
    ARQ.setStrictMode() ;
    NodeValue.VerboseWarnings = false ;
    E_Function.WarnOnUnknownFunction = false ;
}
项目:quack    文件:TS_SPARQLTestsSPARQL11.java   
@AfterClass static public void afterClass() {
    ARQ.setNormalMode();
    NodeValue.VerboseWarnings = true ;
    E_Function.WarnOnUnknownFunction = true ;
}