@Deprecated public static boolean inferClassRelations_OLD(Model classModel) { InputStream is = AbstractNIFParser.class.getClassLoader() .getResourceAsStream(TYPE_INFERENCE_RULES); List<String> lines; try { lines = IOUtils.readLines(is); } catch (IOException e) { LOGGER.error("Couldn't load type inferencer rules from resource \"" + TYPE_INFERENCE_RULES + "\". Working on the standard model.", e); return false; } IOUtils.closeQuietly(is); StringBuilder sb = new StringBuilder(); for (String line : lines) { sb.append(line); } Reasoner reasoner = new GenericRuleReasoner(Rule.parseRules(sb .toString())); InfModel infModel = ModelFactory.createInfModel(reasoner, classModel); classModel.add(infModel); return true; }
public static SimpleSubClassInferencer createInferencer(Model classModel) { String resourceName = GerbilConfiguration.getInstance().getString(SUB_CLASS_INFERENCER_RULE_FILE_KEY); if (resourceName == null) { LOGGER.error("Couldn't load subclass inferencer rules resource name from properties. Returning null."); return null; } InputStream is = RootConfig.class.getClassLoader().getResourceAsStream(resourceName); List<String> lines; try { lines = IOUtils.readLines(is); } catch (IOException e) { LOGGER.error("Couldn't load subclass inferencer rules from resource \"" + resourceName + "\". Returning null.", e); return null; } IOUtils.closeQuietly(is); StringBuilder sb = new StringBuilder(); for (String line : lines) { sb.append(line); } Reasoner reasoner = new GenericRuleReasoner(Rule.parseRules(sb.toString())); InfModel inf = ModelFactory.createInfModel(reasoner, classModel); SimpleSubClassInferencer inferencer = new SimpleSubClassInferencer(inf); return inferencer; }
/** * Validates the collected sample. * * @param resource the Job resource representation (i.e. the sample owner). * @param exchange the current exchange. */ public void validate(final JobResource resource, final Exchange exchange) { log.info(MessageCatalog._00055_VALIDATING, resource.getID()); resource.markAsValidated(); collectSample(resource.getID(), exchange.getIn().getBody(String.class)); final InfModel infmodel = ModelFactory.createInfModel(reasoner, samples.remove(resource.getID())); final ValidityReport validity = infmodel.validate(); if (!validity.isClean()) { log.info(MessageCatalog._00057_VALIDATION_KO, resource.getID()); for (final Iterator<ValidityReport.Report> iterator = validity.getReports(); iterator.hasNext(); ) { final ValidityReport.Report report = iterator.next(); validationMessageRepository.save(new ValidationMessage(resource.getID(), report.getType(), report.getDescription())); log.info(MessageCatalog._00058_VALIDATION_MSG, resource.getID(), report.getDescription(), report.getType()); } resource.setRunning(false); exchange.setProperty(Exchange.ROUTE_STOP, Boolean.TRUE); } else { log.info(MessageCatalog._00056_VALIDATION_OK, resource.getID()); } }
public static void main( String[] args ) { // load some data that uses RDFS Model data = FileManager.get().loadModel("file:data/input/turtle/ex5-data.ttl"); Reasoner reasoner = ReasonerRegistry.getRDFSReasoner(); reasoner.setParameter(ReasonerVocabulary.PROPsetRDFSLevel, ReasonerVocabulary.RDFS_DEFAULT); InfModel infmodel = ModelFactory.createInfModel(reasoner, data ); /* Do a SPARQL Query over the data in the model */ String queryString = "SELECT ?x ?z WHERE { ?x <http://www.example.org/example#worksFor> ?z }" ; /* Now create and execute the query using a Query object */ Query query = QueryFactory.create(queryString) ; QueryExecution qexec = QueryExecutionFactory.create(query, infmodel) ; QueryExecUtils.executeQuery(qexec); System.out.println( "\n----------\ndone" ); }
public static void main( String[] args ) { Model schema = FileManager.get().loadModel("file:data/input/turtle/ex1-schema.ttl"); Model data = FileManager.get().loadModel("file:data/input/turtle/ex1-data.ttl"); InfModel infmodel = ModelFactory.createRDFSModel(schema, data); ValidityReport validity = infmodel.validate(); if (validity.isValid()) { System.out.println("\nOK"); } else { System.out.println("\nConflicts"); for (Iterator i = validity.getReports(); i.hasNext(); ) { ValidityReport.Report report = (ValidityReport.Report)i.next(); System.out.println(" - " + report); } } System.out.println( "done" ); }
public static void main( String[] args ) { // load some data that uses RDFS Model data = FileManager.get().loadModel("file:data/input/turtle/ex13-data.ttl"); Reasoner reasoner = ReasonerRegistry.getRDFSReasoner(); reasoner.setParameter(ReasonerVocabulary.PROPsetRDFSLevel, ReasonerVocabulary.RDFS_DEFAULT); InfModel infmodel = ModelFactory.createInfModel(reasoner, data ); /* Do a SPARQL Query over the data in the model */ String queryString = "SELECT ?x ?z WHERE { ?x <http://www.w3.org/2000/01/rdf-schema#label> ?z }"; /* Now create and execute the query using a Query object */ Query query = QueryFactory.create(queryString) ; QueryExecution qexec = QueryExecutionFactory.create(query, infmodel) ; QueryExecUtils.executeQuery(qexec); System.out.println( "done" ); }
public static void main( String[] args ) { // load some data that uses RDFS Model data = FileManager.get().loadModel("file:data/input/turtle/ex12-data2.ttl"); Reasoner reasoner = ReasonerRegistry.getRDFSReasoner(); reasoner.setParameter(ReasonerVocabulary.PROPsetRDFSLevel, ReasonerVocabulary.RDFS_DEFAULT); InfModel infmodel = ModelFactory.createInfModel(reasoner, data ); /* Do a SPARQL Query over the data in the model */ String queryString = "SELECT ?x WHERE { ?x <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://www.example.org/example#Investigator> }"; /* Now create and execute the query using a Query object */ Query query = QueryFactory.create(queryString) ; QueryExecution qexec = QueryExecutionFactory.create(query, infmodel) ; QueryExecUtils.executeQuery(qexec); System.out.println( "\n---------------\n" ); }
public static void main( String[] args ) { // load some data that uses RDFS Model data = FileManager.get().loadModel("file:data/input/turtle/ex17-data2.ttl"); Reasoner reasoner = ReasonerRegistry.getRDFSReasoner(); reasoner.setParameter(ReasonerVocabulary.PROPsetRDFSLevel, ReasonerVocabulary.RDFS_DEFAULT); InfModel infmodel = ModelFactory.createInfModel(reasoner, data ); /* Do a SPARQL Query over the data in the model */ String queryString = "SELECT ?x ?z WHERE { ?x <http://cal.example.com/cal#nextDeparture> ?z }"; /* Now create and execute the query using a Query object */ Query query = QueryFactory.create(queryString) ; QueryExecution qexec = QueryExecutionFactory.create(query, infmodel) ; QueryExecUtils.executeQuery(qexec); System.out.println( "done" ); }
public static void main( String[] args ) { // load some data that uses RDFS Model data = FileManager.get().loadModel("file:data/input/turtle/ex12-data.ttl"); Reasoner reasoner = ReasonerRegistry.getRDFSReasoner(); reasoner.setParameter(ReasonerVocabulary.PROPsetRDFSLevel, ReasonerVocabulary.RDFS_DEFAULT); InfModel infmodel = ModelFactory.createInfModel(reasoner, data ); /* Do a SPARQL Query over the data in the model */ String queryString = "SELECT ?x WHERE { ?x <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://www.example.org/example#Analyst> }"; /* Now create and execute the query using a Query object */ Query query = QueryFactory.create(queryString) ; QueryExecution qexec = QueryExecutionFactory.create(query, infmodel) ; QueryExecUtils.executeQuery(qexec); System.out.println( "\n---------------\n" ); }
public static void main( String[] args ) { // load some data that uses RDFS Model data = FileManager.get().loadModel("file:data/input/turtle/ex22-data.ttl"); Reasoner reasoner = ReasonerRegistry.getOWLReasoner(); InfModel infmodel = ModelFactory.createInfModel(reasoner, data ); /* Do a SPARQL Query over the data in the model */ String queryString = "SELECT ?x ?z WHERE { ?x <http://www.example.org/example#hasAncestor> ?z }"; /* Now create and execute the query using a Query object */ Query query = QueryFactory.create(queryString) ; QueryExecution qexec = QueryExecutionFactory.create(query, infmodel) ; QueryExecUtils.executeQuery(qexec); System.out.println( "done" ); }
public static void main( String[] args ) { // load some data that uses RDFS Model data = FileManager.get().loadModel("file:data/input/turtle/ex30-data.ttl"); Reasoner reasoner = ReasonerRegistry.getOWLReasoner(); InfModel infmodel = ModelFactory.createInfModel(reasoner, data ); /* Do a SPARQL Query over the data in the model */ String queryString = "SELECT ?facility ?location WHERE { ?p <http://www.example.org/example#manufactureLocation> ?location . ?p <http://mfg.example.org/mfg#facility> ?facility . }"; /* Now create and execute the query using a Query object */ Query query = QueryFactory.create(queryString) ; QueryExecution qexec = QueryExecutionFactory.create(query, infmodel) ; QueryExecUtils.executeQuery(qexec); System.out.println( "\n-------DONE--------\n" ); }
public static void main( String[] args ) { // load some data that uses RDFS Model data = FileManager.get().loadModel("file:data/input/turtle/ex30-data.ttl"); Model sameAsData = FileManager.get().loadModel( "file:data/input/turtle/ex30-sameas.ttl" ); data.add( sameAsData ); Reasoner reasoner = ReasonerRegistry.getOWLReasoner(); InfModel infmodel = ModelFactory.createInfModel(reasoner, data ); /* Do a SPARQL Query over the data in the model */ String queryString = "SELECT ?facility ?location WHERE { ?p <http://www.example.org/example#manufactureLocation> ?location . ?p <http://mfg.example.org/mfg#facility> ?facility . }"; /* Now create and execute the query using a Query object */ Query query = QueryFactory.create(queryString) ; QueryExecution qexec = QueryExecutionFactory.create(query, infmodel) ; QueryExecUtils.executeQuery(qexec); System.out.println( "\n-------DONE--------\n" ); }
public static void main( String[] args ) { // load some data that uses RDFS Model data = FileManager.get().loadModel("file:data/input/turtle/ex19-data.ttl"); Reasoner reasoner = ReasonerRegistry.getOWLReasoner(); InfModel infmodel = ModelFactory.createInfModel(reasoner, data ); /* Do a SPARQL Query over the data in the model */ String queryString = "SELECT ?x ?z WHERE { ?x <http://www.example.org/example#hasPossession> ?z }"; /* Now create and execute the query using a Query object */ Query query = QueryFactory.create(queryString) ; QueryExecution qexec = QueryExecutionFactory.create(query, infmodel) ; QueryExecUtils.executeQuery(qexec); System.out.println( "done" ); }
public static void main( String[] args ) { // load some data that uses RDFS Model data = FileManager.get().loadModel("file:data/input/turtle/ex21-data.ttl"); Reasoner reasoner = ReasonerRegistry.getOWLReasoner(); InfModel infmodel = ModelFactory.createInfModel(reasoner, data ); /* Do a SPARQL Query over the data in the model */ String queryString = "SELECT ?x ?z WHERE { ?x <http://www.example.org/example#married> ?z }"; /* Now create and execute the query using a Query object */ Query query = QueryFactory.create(queryString) ; QueryExecution qexec = QueryExecutionFactory.create(query, infmodel) ; QueryExecUtils.executeQuery(qexec); System.out.println( "\n---------------\n" ); }
public static void main( String[] args ) { // load some data that uses RDFS Model data = FileManager.get().loadModel("file:data/input/turtle/ex20-data.ttl"); Reasoner reasoner = ReasonerRegistry.getOWLReasoner(); InfModel infmodel = ModelFactory.createInfModel(reasoner, data ); /* Do a SPARQL Query over the data in the model */ String queryString = "SELECT ?x ?z WHERE { ?x <http://www.w3.org/2000/01/rdf-schema#subclassOf> ?z }"; /* Now create and execute the query using a Query object */ Query query = QueryFactory.create(queryString) ; QueryExecution qexec = QueryExecutionFactory.create(query, infmodel) ; QueryExecUtils.executeQuery(qexec); System.out.println( "\n---------------\n" ); }
protected String parseToDefinitions(Model createModel) throws InvalidRequestException { try{ createModel.removeAll(null, Omn.isResourceOf, null); createModel.removeAll(null, Omn.hasReservation, null); createModel.removeAll(null, Omn_lifecycle.hasState, null); createModel.removeAll(null, Omn_lifecycle.implementedBy, null); System.out.println("CREATE MODEL " + createModel); Map<String,String> pref = createModel.getNsPrefixMap(); InfModel infModel = createInfModel(createModel); infModel.setNsPrefix("osco","http://opensdncore.org/ontology/"); return OMN2Tosca.getTopology(infModel); } catch(InvalidModelException | JAXBException | MultiplePropertyValuesException | RequiredResourceNotFoundException | MultipleNamespacesException e){ throw new InvalidRequestException(e); } }
public static void subprojectsSellers() { //services for each table ApplicationContext ctx = new ClassPathXmlApplicationContext("spring.xml"); SubProjectSellersService sub = (SubProjectSellersService) ctx.getBean("subProjectSellersServiceImpl"); List<SubProjectSellers> subProjectSeller = sub.getSubProjectSellers(); //--------------RDF Model--------------// Model model = ModelFactory.createDefaultModel(); Reasoner reasoner = ReasonerRegistry.getOWLReasoner(); InfModel infModel = ModelFactory.createInfModel(reasoner, model); model.setNsPrefix("elod", OntologySpecification.elodPrefix); model.setNsPrefix("gr", OntologySpecification.goodRelationsPrefix); for (SubProjectSellers subProjectSeller1 : subProjectSeller) { Resource instanceSeller = infModel.createResource(OntologySpecification.instancePrefix + "Organization/" + subProjectSeller1.getSellerId()); Resource instanceSubProject = infModel.createResource(OntologySpecification.instancePrefix + "Subproject/" + subProjectSeller1.getOps() + "/" + subProjectSeller1.getSubProjectId()); infModel.add(instanceSeller, RDF.type, OntologySpecification.organizationResource); infModel.add(instanceSeller, RDF.type, OntologySpecification.businessResource); infModel.add(instanceSubProject, RDF.type, OntologySpecification.subProjectResource); instanceSubProject.addProperty(OntologySpecification.seller, instanceSeller); } try { FileOutputStream fout = new FileOutputStream( "/Users/giovaf/Documents/yds_pilot1/espa_tests/22-02-2016_ouput/subProjectSellersEspa.rdf"); model.write(fout); } catch (IOException e) { System.out.println("Exception caught" + e.getMessage()); } }
public static void espaSellers() { ApplicationContext ctx = new ClassPathXmlApplicationContext("spring.xml"); SellersService sellers = (SellersService) ctx.getBean("sellersServiceImpl"); List<Sellers> seller = sellers.getSellers(); //--------------RDF Model--------------// Model model = ModelFactory.createDefaultModel(); Reasoner reasoner = ReasonerRegistry.getOWLReasoner(); InfModel infModel = ModelFactory.createInfModel(reasoner, model); model.setNsPrefix("elod", OntologySpecification.elodPrefix); model.setNsPrefix("gr", OntologySpecification.goodRelationsPrefix); model.setNsPrefix("vcard", OntologySpecification.vcardPrefix); for (Sellers seller1 : seller) { Resource instanceSeller = infModel.createResource(OntologySpecification.instancePrefix + "Organization/" + seller1.getId()); infModel.add(instanceSeller, RDF.type, OntologySpecification.organizationResource); instanceSeller.addProperty(OntologySpecification.name, seller1.getEponimia(), XSDDatatype.XSDstring); } try { FileOutputStream fout = new FileOutputStream( "/Users/giovaf/Documents/yds_pilot1/espa_tests/22-02-2016_ouput/sellersEspa.rdf"); model.write(fout); } catch (IOException e) { System.out.println("Exception caught" + e.getMessage()); } }
/** * Test if an inference model produce any results * */ @Test public void testIsEmpty() throws Exception { boolean res = false; InfModel inf = ModelFactory.createInfModel(this.myReasoner, this.instances); if (!inf.isEmpty()) { /// Inference is OK. Maybe we have new statements res = true; } assertTrue(res); }
/** * Test if the new Inference Model is consistent. * */ @Test public void testIsConsistent() throws Exception { boolean res = false; InfModel inf = ModelFactory.createInfModel(this.myReasoner, this.instances); if (!inf.isEmpty()) { ValidityReport validity = inf.validate(); if (validity.isValid()) { // Our inference has been validated and we can say that is consistent based on new rules. res = true; } } assertTrue(res); }
/** * Test if there are new statements based on loaded rule in the Reasoner * */ @Test public void testNewStatements() throws Exception { boolean res = false; InfModel inf = ModelFactory.createInfModel(this.myReasoner, this.instances); if (!inf.isEmpty()) { // It returns True if empty res = !inf.getDeductionsModel().isEmpty(); } assertTrue(res); }
/** * * Check reasoning inference (no spin rules infereces, only OWL inferences * and restrictions) * * @param model */ public void checkOntologyModelInconsistencies(OntModel model) { Reasoner reasoner = model.getReasoner(); InfModel infmodel = ModelFactory.createInfModel(reasoner, model); ValidityReport report = infmodel.validate(); if (!report.isValid()) { this.getLogger().severe("Incosistent ontology -> isValid: " + report.isValid()); } else { this.getLogger().severe("No incosistent ontology -> isValid: " + report.isValid()); } }
/** * Check the correctness for RDFS FULL, i.e. follow the RDFS specification * exactly, even inferring syntactic sugar. * <p> * Ontology must be in NT format * * @param ontologyLocation * where the ontology is on disk */ public static void checkInferrayCorrectnessRDFSAndDumpToFiles( final String ontologyLocation) { // Compute the model processed by Inferray final DefaultConfiguration config = new DefaultConfiguration(); config.setRulesProfile(SupportedProfile.RDFS); final Inferray infere = new Inferray(config); infere.parse(ontologyLocation); infere.process(); final Model actualModel = ExportUtils.exportToJenaModel(infere); // Compute the Jena Model final Model intemerdiateModel = ModelFactory.createDefaultModel(); try { final InputStream is = new BufferedInputStream(new FileInputStream( ontologyLocation)); intemerdiateModel.read(is, null, "N-TRIPLE");// } catch (final FileNotFoundException e) { LOGGER.error("Error reading file", e); return; } final Reasoner reasoner = ReasonerRegistry.getRDFSReasoner(); reasoner.setParameter(ReasonerVocabulary.PROPsetRDFSLevel, ReasonerVocabulary.RDFS_FULL); final InfModel expectedModel = ModelFactory.createInfModel(reasoner, intemerdiateModel); // Compute correctness checking final CorrectnessResult result = CorrectnessChecker .checkForCorrectness(expectedModel, actualModel); result.dumpMissingToFile(new File("missing.nt")); result.dumpOverheadToFile(new File("overhead.nt")); }
protected void prepareInfModel() throws ConfigurationException { getReasonerOnlyWhenNeeded(); if (infModel == null || newInputFlag == true) { if (schemaModelIsCachedInferredModel) { infModel = schemaModel; } else { long t1 = System.currentTimeMillis(); generateTboxModelWithSpec(); logger.debug("In prepareInfModel, modelSpec: "+modelSpec.toString()); logger.debug("In prepareInfModel, reasoner rule count: "+getReasonerOnlyWhenNeeded().getRules().size()); infModel = ModelFactory.createInfModel(reasoner, tboxModelWithSpec); // InfGraph graph = reasoner.bind(tboxModelWithSpec.getGraph()); // infModel = new InfModelImpl(graph); synchronized(ReasonerFamily) { infModel.size(); // this forces instantiation of the inference model if (collectTimingInfo) { long t2 = System.currentTimeMillis(); timingInfo.add(new ReasonerTiming(TIMING_PREPARE_INFMODEL, "prepare inference model", t2 - t1)); } } } } else if(newInputFlag == true) { logger.debug("In prepareInfModel, reusing infModel with newInputFlag is true"); if (infModel instanceof InfModel) { synchronized(ReasonerFamily) { logger.debug("In prepareInfModel, reusing infModel, rebinding existing infModel"); ((InfModel) infModel).rebind(); infModel.size(); // force re-instantiation? } } } else { logger.debug("In prepareInfModel, reusing infModel without any changes, newInputFlag is false"); } newInputFlag = false; }
public boolean saveInferredModel(String filename, String modelname, boolean deductionsOnly) throws FileNotFoundException { try { prepareInfModel(); } catch (ConfigurationException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } if (infModel != null) { OntModel m; if (deductionsOnly && infModel instanceof InfModel) { m = ModelFactory.createOntologyModel(configurationMgr.getOntModelSpec(null), ((InfModel) infModel).getDeductionsModel()); } else { m = ModelFactory.createOntologyModel(configurationMgr.getOntModelSpec(null), infModel); } String format = "RDF/XML-ABBREV"; FileOutputStream fps = new FileOutputStream(filename); RDFWriter rdfw = m.getWriter(format); rdfw.write(m, fps, modelname); try { fps.close(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } return false; }
public boolean reset() { // if (infModel != null) { // System.out.println("Before rebind, infModel size is: " + infModel.size()); // } if (!initialized) { return false; } // if (dataModel != null) { // System.out.println("Before removeAll, dataModel size is: " + dataModel.size()); // dataModel.getBaseModel().removeAll(); // System.out.println("Before removeAll, tboxModelWithSpec size is: " + tboxModelWithSpec.size()); // tboxModelWithSpec.removeAll(); // System.out.println("After basemodel removeAll, dataModel size is: " + dataModel.size()); //// dataModel.removeAll(); //// System.out.println("After removeAll, dataModel size is: " + dataModel.size()); // } infModel = null; dataModel = null; tboxModelWithSpec = null; // prepareInfModel(); if (infModel != null && infModel instanceof InfModel) { ((InfModel)infModel).rebind(); // System.out.println("After rebind, infModel size is: " + infModel.size()); } // what else do we need to do? return true; }
public DataSource getDerivations() throws InvalidDerivationException, ConfigurationException { getReasonerOnlyWhenNeeded(); if (getDerivationLevel().equals(DERIVATION_NONE)){ return null; } try { prepareInfModel(); StmtIterator sitr; if (infModel instanceof InfModel) { sitr = ((InfModel) infModel).getDeductionsModel().listStatements(); } else { sitr = infModel.listStatements(); } if (sitr.hasNext()) { StringWriter swriter = new StringWriter(); PrintWriter out = new PrintWriter(swriter); out.println("Derivations from instance data combined with model '" + tbox + "', " + now() + "\n"); writeStatementDerivations(out, null, sitr); String derivations = swriter.toString(); out.close(); StringDataSource ds = new StringDataSource(derivations, "text/plain"); ds.setName("Derivations"); return ds; } } catch (ConfigurationException e) { // TODO Auto-generated catch block e.printStackTrace(); } return null; }
public Model getInferredModel(boolean deductionsOnly) throws ConfigurationException { prepareInfModel(); if (deductionsOnly && infModel instanceof InfModel) { return ((InfModel)infModel).getDeductionsModel(); } return infModel; }
public boolean saveInferredModel(String filename, String modelname, boolean deductionsOnly) throws FileNotFoundException { try { prepareInfModel(); } catch (ConfigurationException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } if (infModel != null) { OntModel m; if (deductionsOnly && infModel instanceof InfModel) { m = ModelFactory.createOntologyModel(configurationMgr.getOntModelSpec(null), ((InfModel) infModel).getDeductionsModel()); } else { m = ModelFactory.createOntologyModel(configurationMgr.getOntModelSpec(null), infModel); } String format = ConfigurationManager.RDF_XML_ABBREV_FORMAT; FileOutputStream fps = new FileOutputStream(filename); RDFWriter rdfw = m.getWriter(format); rdfw.write(m, fps, modelname); try { fps.close(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } return false; }
public static void main( String[] args ) { // load some data that uses RDFS Model data = FileManager.get().loadModel("file:data/input/turtle/ex2-data.ttl"); Reasoner reasoner = ReasonerRegistry.getRDFSReasoner(); reasoner.setParameter(ReasonerVocabulary.PROPsetRDFSLevel, ReasonerVocabulary.RDFS_DEFAULT); InfModel infmodel = ModelFactory.createInfModel(reasoner, data ); /* Do a SPARQL Query over the data in the model */ String queryString = "SELECT ?x WHERE { ?x <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://www.example.com/shop#Shirts>}" ; /* Now create and execute the query using a Query object */ Query query = QueryFactory.create(queryString) ; QueryExecution qexec = QueryExecutionFactory.create(query, infmodel); try { ResultSet results = qexec.execSelect() ; while( results.hasNext() ) { QuerySolution soln = results.nextSolution(); System.out.println( "soln: " + soln.toString()); } } finally { qexec.close(); } System.out.println( "done" ); }
protected String parseToDefinitions(Model model) throws InvalidRequestException { try { InfModel infModel = createInfModel(model); infModel.setNsPrefix("osco","http://opensdncore.org/ontology/"); return OMN2Tosca.getTopology(infModel); } catch(InvalidModelException | JAXBException | MultiplePropertyValuesException | RequiredResourceNotFoundException | MultipleNamespacesException e){ throw new InvalidRequestException(e); } }
private InfModel createInfModel(Model model) throws InvalidModelException{ model.add( this.toscaAdapter.getAdapterABox().getModel()); List additionalOntologies = new ArrayList<String>(); additionalOntologies.add("/ontologies/osco.ttl"); Parser parser = new Parser(model, additionalOntologies); return parser.getInfModel(); }
protected InfModel createInfModel(Model model) throws InvalidModelException{ model.add( this.adapterABox.getModel()); List additionalOntologies = new ArrayList<String>(); additionalOntologies.add("/ontologies/osco.ttl"); Parser parser = new Parser(model, additionalOntologies); return parser.getInfModel(); }
private Model runInference(Model data, URL rules, int lineLength, int maxLineLength) throws IOException { Reasoner reasoner = new GenericRuleReasoner(Rule.rulesFromURL(rules.toString())); InfModel inf = ModelFactory.createInfModel(reasoner, data); // Break long literals (more than lineLength chars) using carriage returns Model remove = ModelFactory.createDefaultModel(); Model add = ModelFactory.createDefaultModel(); Selector sel = new SimpleSelector(null, null, (String)null); for(StmtIterator sIt = inf.listStatements(sel); sIt.hasNext();) { Statement s = sIt.nextStatement(); if(!s.getObject().isLiteral()) continue; String l = s.getString(); String lp = paginate(l, lineLength, maxLineLength); if (lp.length() != l.length()) { remove.add(s); add.add(s.getSubject(), s.getPredicate(), lp, s.getLanguage()); } } inf.remove(remove); inf.add(add); return inf; }
/** * Add OWL rules and compute the forward chain. * * @param base * @param datasetPaths */ public static void run(String base) { Reasoner reasoner = PelletReasonerFactory.theInstance().create(); OntModel ontModel = ModelFactory .createOntologyModel(PelletReasonerFactory.THE_SPEC); InfModel infModel = ModelFactory.createInfModel(reasoner, ontModel); String path = System.getProperty("user.dir"); RDFDataMgr.read(infModel, "file://" + path + "/" + base + "/model.nt"); logger.info("Model size = " + ontModel.size()); ValidityReport report = infModel.validate(); printIterator(report.getReports(), "Validation Results"); logger.info("Inferred model size = " + infModel.size()); infModel.enterCriticalSection(Lock.READ); try { RDFDataMgr.write(new FileOutputStream(new File(base + "/model-fwc.nt")), infModel, Lang.NT); logger.info("Model generated."); } catch (FileNotFoundException e) { logger.fatal(e.getMessage()); throw new RuntimeException("Necessary file model-fwc.nt was not generated."); } finally { infModel.leaveCriticalSection(); } new File(base + "/model.nt").delete(); }
public static void closure(String input, String output) { Reasoner reasoner = PelletReasonerFactory.theInstance().create(); OntModel ontModel = ModelFactory .createOntologyModel(PelletReasonerFactory.THE_SPEC); InfModel infModel = ModelFactory.createInfModel(reasoner, ontModel); String path = System.getProperty("user.dir"); RDFDataMgr.read(infModel, "file://" + path + "/" + input); logger.info("Model = "+input+", size = " + ontModel.size()); ValidityReport report = infModel.validate(); printIterator(report.getReports(), "Validation Results"); logger.info("Inferred model size = " + infModel.size()); infModel.enterCriticalSection(Lock.READ); try { RDFDataMgr.write(new FileOutputStream(new File(output)), infModel, Lang.NT); logger.info("Model generated at "+output); } catch (FileNotFoundException e) { logger.fatal(e.getMessage()); throw new RuntimeException("Necessary file "+output+" was not generated."); } finally { infModel.leaveCriticalSection(); } }