/** * Parse RDF input as string * * @param input RDF values as String * @return an {@link Request} object which contains information about latitude, longitude and date * @throws IllegalStateException if RDF is not literal * @throws NullPointerException if input is null */ public static Request parse(String input) { Objects.requireNonNull(input); Model model = ModelFactory.createDefaultModel(); model.read(new ByteArrayInputStream(input.getBytes()), null, "TURTLE"); Map<String, Object> map = new HashMap<>(); model.listStatements().forEachRemaining(statement -> { RDFNode rdfNode = statement.getObject(); if (rdfNode.isLiteral()) { try { map.put(statement.getPredicate().getLocalName(), statement.getObject().asLiteral().getValue()); } catch (Exception e) { LOGGER.error("RDF statement is not literal"); throw new IllegalStateException(e.getMessage()); } } }); model.close(); return getDataFromMap(map); }
@Override public void writeTo(Model t, Class<?> type, Type genericType, Annotation[] annotations, MediaType mediaType, MultivaluedMap<String, Object> httpHeaders, OutputStream entityStream) throws IOException, WebApplicationException { Lang lang = null; if(mediaType.equals(KRFormat.N3_TYPE)) lang = Lang.N3; else if(mediaType.equals(KRFormat.N_TRIPLE_TYPE)) lang = Lang.NTRIPLES; else if(mediaType.equals(KRFormat.RDF_JSON_TYPE)) lang = Lang.RDFJSON; else if(mediaType.equals(new MediaType("application", "json-ld"))) lang = Lang.JSONLD; else lang = Lang.TURTLE; RDFDataMgr.write(entityStream, t, lang); }
private List<Statement> expandSubClasses(Model model){ List<Statement> stmts = new ArrayList<Statement>(); String sparql = "PREFIX rdfs: <" + RDFS.getURI() + ">" + "SELECT DISTINCT ?class ?synonym " + "WHERE { " + "?class rdfs:subClassOf+ ?subClass . " + "?subClass <" + synonym + "> ?synonym" + "}"; Query query = QueryFactory.create(sparql, Syntax.syntaxARQ); QueryExecution queryExecution = QueryExecutionFactory.create(query, model); ResultSet resultSet = queryExecution.execSelect(); resultSet.forEachRemaining(querySolution -> { stmts.add(new StatementImpl(querySolution.getResource("class"), synonym, querySolution.getLiteral("synonym"))); }); return stmts; }
private List<Statement> expandSubProperties(Model model){ List<Statement> stmts = new ArrayList<Statement>(); String sparql = "PREFIX rdfs: <" + RDFS.getURI() + ">" + "SELECT DISTINCT ?property ?synonym " + "WHERE { " + "?property rdfs:subPropertyOf+ ?subProperty . " + "?subProperty <" + synonym + "> ?synonym" + "}"; Query query = QueryFactory.create(sparql, Syntax.syntaxARQ); QueryExecution queryExecution = QueryExecutionFactory.create(query, model); ResultSet resultSet = queryExecution.execSelect(); resultSet.forEachRemaining(querySolution -> { stmts.add(new StatementImpl(querySolution.getResource("property"), synonym, querySolution.getLiteral("synonym"))); }); return stmts; }
private void doIndexing(String ontologyId, String name, String description, String iri){ Model model = FileManager.get().loadModel(iri); IndexingJobInput indexingJobInput = null; if(ontologyId == null) indexingJobInput = new IndexingJobInput(name, description, iri, model); else indexingJobInput = new IndexingJobInput(ontologyId, name, description, iri, model); try { String jobId = indexOntology(indexingJobInput); while(!jobManager.ping(jobId).isDone()) Thread.sleep(1000); } catch (OntologyAlreadyExistingException | InterruptedException e) { log.error(e.getMessage(), e); } }
/** * Add to the model the supervisor of the current Organization * * @param Model the model we are currently working with * @param Resource the current organization * @param Organization the supervisor of the organization */ private void createSupervisorOrganization(Model model, Resource orgResource, Organization supervisor) { Resource supervisorResource = model.getResource(Ontology.instancePrefix + "Organization/" + supervisor.getVatNumber()); if (model.containsResource(supervisorResource)) { orgResource.addProperty(Ontology.hasSupervisorOrganization, supervisorResource); } else { supervisorResource = model.createResource(Ontology.instancePrefix + "Organization/" + supervisor.getVatNumber(), Ontology.organizationResource); model.createResource(Ontology.instancePrefix + "Organization/" + supervisor.getVatNumber(), Ontology.businessEntityResource); model.createResource(Ontology.instancePrefix + "Organization/" + supervisor.getVatNumber(), Ontology.orgOrganizationResource); model.createResource(Ontology.instancePrefix + "Organization/" + supervisor.getVatNumber(), Ontology.registeredOrganizationResource); if (supervisor.getVatNumber() != "") { supervisorResource.addLiteral(Ontology.vatId, supervisor.getVatNumber()); } else { supervisorResource.addLiteral(Ontology.vatId, "Empty vatID"); } supervisorResource.addLiteral(Ontology.organizationId, supervisor.getUid()); /** organization - Supervisor **/ orgResource.addProperty(Ontology.hasSupervisorOrganization, supervisorResource); } }
/** * Add to the model the organization statuses. * * @param Model the model we are currently working with */ public void addOrganizationStatusToModel(Model model) { List<String> statusesList = Arrays.asList("Active", "Inactive", "Pending"); for (String status : statusesList) { /** statusResource **/ Resource statusResource = model.createResource(Ontology.instancePrefix + "OrganizationStatus/" + status, Ontology.organizationStatusResource); model.createResource(Ontology.instancePrefix + "OrganizationStatus/" + status, Ontology.conceptResource); /** configure prefLabel **/ String[] statusDtls = hm.findOrganizationStatusDetails(status); statusResource.addProperty(Ontology.prefLabel, statusDtls[1], "el"); statusResource.addProperty(Ontology.prefLabel, statusDtls[2], "en"); } }
/** * Add to the model the budget types. * * @param Model the model we are currently working with */ public void addBudgetTypeToModel(Model model) { List<String> budgetTypeList = Arrays.asList("Τακτικός Προϋπολογισμός", "Πρόγραμμα Δημοσίων Επενδύσεων", "Ίδια Έσοδα", "Συγχρηματοδοτούμενο Έργο"); for (String budgetType : budgetTypeList) { String[] budgetDtls = hm.findBudgetTypeIndividual(budgetType); /** statusResource **/ Resource statusResource = model.createResource(budgetDtls[0], Ontology.budgetCategoryResource); model.createResource(budgetDtls[0], Ontology.conceptResource); /** configure prefLabel **/ statusResource.addProperty(Ontology.prefLabel, budgetDtls[1], "el"); statusResource.addProperty(Ontology.prefLabel, budgetDtls[2], "en"); } }
/** * Add to the model the decision statuses. * * @param Model * the model we are currently working with */ public void addDecisionStatusToModel(Model model) { List<String> statusesList = Arrays.asList("Published", "Pending_Revocation", "Revoked", "Submitted"); for (String status : statusesList) { String[] statusDtls = hm.findDecisionStatusIndividual(status); /** statusResource **/ Resource statusResource = model.createResource(statusDtls[0], Ontology.decisionStatusResource); model.createResource(statusDtls[0], Ontology.conceptResource); /** configure prefLabel **/ statusResource.addProperty(Ontology.prefLabel, statusDtls[1], "el"); statusResource.addProperty(Ontology.prefLabel, statusDtls[2], "en"); } }
/** * Add to the model the Regular Acts. * * @param Model * the model we are currently working with */ public void addKanonistikiToModel(Model model) { List<String> regularList = Arrays.asList("Υπουργική Απόφαση", "Πράξη Γενικού Γραμματέα Αποκεντρωμένης Διοίκησης", "Πράξη Οργάνου Διοίκησης Ν.Π.Δ.Δ.", "Πράξη Οργάνου Διοίκησης ΟΤΑ Α’ και Β’ Βαθμού (και εποπτευόμενων φορέων τους)", "Λοιπές Κανονιστικές Πράξεις"); for (String regular : regularList) { String[] regularDtls = hm.findKanonistikiIndividual(regular); /** regulatoryResource **/ Resource regularResource = model.createResource(regularDtls[0], Ontology.regulatoryActResource); model.createResource(regularDtls[0], Ontology.conceptResource); /** configure prefLabel **/ regularResource.addProperty(Ontology.prefLabel, regularDtls[1], "el"); regularResource.addProperty(Ontology.prefLabel, regularDtls[2], "en"); } }
/** * Add to the model the Opinion Types. * * @param Model * the model we are currently working with */ public void addOpinionToModel(Model model) { List<String> opinionList = Arrays.asList("Ανεξάρτητη Αρχή", "ΝΣΚ"); for (String opinion : opinionList) { String[] opinionDtls = hm.findOpinionOrgIndividual(opinion); /** opinionResource **/ Resource opinionResource = model.createResource(opinionDtls[0], Ontology.opinionResource); model.createResource(opinionDtls[0], Ontology.conceptResource); /** configure prefLabel **/ opinionResource.addProperty(Ontology.prefLabel, opinionDtls[1], "el"); opinionResource.addProperty(Ontology.prefLabel, opinionDtls[2], "en"); } }
/** * Add to the model the Account Types. * * @param Model * the model we are currently working with */ public void addAccountTypeToModel(Model model) { List<String> accountList = Arrays.asList("Ισολογισμός", "Απολογισμός", "Ισολογισμός και Απολογισμός"); for (String account : accountList) { String[] accountDtls = hm.findAccountTypeIndividual(account); /** accountResource **/ Resource accountResource = model.createResource(accountDtls[0], Ontology.accountResource); model.createResource(accountDtls[0], Ontology.conceptResource); /** configure prefLabel **/ accountResource.addProperty(Ontology.prefLabel, accountDtls[1], "el"); accountResource.addProperty(Ontology.prefLabel, accountDtls[2], "en"); } }
/** * Runs a given Jena Query on a given instance and adds the inferred triples * to a given Model. * @param queryWrapper the wrapper of the CONSTRUCT query to execute * @param queryModel the query Model * @param newTriples the Model to write the triples to * @param instance the instance to run the inferences on * @param checkContains true to only call add if a Triple wasn't there yet * @return true if changes were done (only meaningful if checkContains == true) */ public static boolean runQueryOnInstance(QueryWrapper queryWrapper, Model queryModel, Model newTriples, Resource instance, boolean checkContains) { boolean changed = false; QueryExecution qexec = ARQFactory.get().createQueryExecution(queryWrapper.getQuery(), queryModel); QuerySolutionMap bindings = new QuerySolutionMap(); bindings.add(SPIN.THIS_VAR_NAME, instance); Map<String,RDFNode> initialBindings = queryWrapper.getTemplateBinding(); if(initialBindings != null) { for(String varName : initialBindings.keySet()) { RDFNode value = initialBindings.get(varName); bindings.add(varName, value); } } qexec.setInitialBinding(bindings); Model cm = qexec.execConstruct(); StmtIterator cit = cm.listStatements(); while(cit.hasNext()) { Statement s = cit.nextStatement(); if(!checkContains || !queryModel.contains(s)) { changed = true; newTriples.add(s); } } return changed; }
/** * Add to the model the Vacancy Types. * * @param Model * the model we are currently working with */ public void addVacancyTypeToModel(Model model) { List<String> vacancyList = Arrays.asList( "Προκήρυξη Πλήρωσης Θέσεων με διαγωνισμό ή επιλογή στις οποίες περιλαμβάνονται και οι προκηρύξεις για επιλογή και πλήρωση θέσεων διευθυντικών στελεχών των ΝΠΔΔ, φορέων του ευρύτερου δημόσιου τομέα, και των επιχειρήσεων και φορέων των ΟΤΑ Α’ & Β’ βαθμού", "Προκήρυξη Πλήρωσης Θέσεων Εκπαιδευτικού Προσωπικού (ΕΠ) Τεχνολογικού τομέα της Ανώτατης Εκπαίδευσης", "Προκήρυξη Πλήρωσης Θέσεων Διδακτικού Ερευνητικού Προσωπικού (ΔΕΠ) Πανεπιστημιακού τομέα"); for (String vacancy : vacancyList) { String[] vacancyDtls = hm.findVacancyTypeIndividual(vacancy); /** vacancyTypeResource **/ Resource vacancyTypeResource = model.createResource(vacancyDtls[0], Ontology.vacancyTypeResource); model.createResource(vacancyDtls[0], Ontology.conceptResource); /** configure prefLabel **/ vacancyTypeResource.addProperty(Ontology.prefLabel, vacancyDtls[1], "el"); vacancyTypeResource.addProperty(Ontology.prefLabel, vacancyDtls[2], "en"); } }
/** * Add to the model the Administrative Changes. * * @param Model * the model we are currently working with */ public void addAdminChangeToModel(Model model) { List<String> changeList = Arrays.asList("Μετάταξη", "Λύση Υπαλληλικής Σχέσης", "Υποβιβασμός", "Αποδοχή Παραίτησης"); for (String change : changeList) { String[] changeDtls = hm.findAdministrativeChangeIndividual(change); /** adminChangeResource **/ Resource adminChangeResource = model.createResource(changeDtls[0], Ontology.adminChangeResource); model.createResource(changeDtls[0], Ontology.conceptResource); /** configure prefLabel **/ adminChangeResource.addProperty(Ontology.prefLabel, changeDtls[1], "el"); adminChangeResource.addProperty(Ontology.prefLabel, changeDtls[2], "en"); } }
/** * Add to the model the selection criteria. * * @param Model * the model we are currently working with */ public void addSelectionCriteriaToModel(Model model) { List<String> criteriaList = Arrays.asList("Χαμηλότερη Τιμή", "Συμφερότερη από οικονομικής άποψης", "Τεχνική ποιότητα"); for (String criterion : criteriaList) { String[] selectionDtls = hm.findCriterionIndividual(criterion); /** statusResource **/ Resource statusResource = model.createResource(selectionDtls[0], Ontology.selectionCriterionResource); model.createResource(selectionDtls[0], Ontology.conceptResource); /** configure prefLabel **/ statusResource.addProperty(Ontology.prefLabel, selectionDtls[2], "el"); statusResource.addProperty(Ontology.prefLabel, selectionDtls[3], "en"); } }
/** * Add the necessary prefixes to the model we are currently working with. * * @param Model * the model we are currently working with */ public void setPrefixes(Model model) { model.setNsPrefix("elod", Ontology.eLodPrefix); model.setNsPrefix("elodGeo", Ontology.elodGeoPrefix); model.setNsPrefix("pc", Ontology.publicContractsPrefix); model.setNsPrefix("skos", Ontology.skosPrefix); model.setNsPrefix("gr", Ontology.goodRelationsPrefix); model.setNsPrefix("rov", Ontology.regOrgPrefix); model.setNsPrefix("org", Ontology.orgPrefix); model.setNsPrefix("foaf", Ontology.foafPrefix); model.setNsPrefix("xsd", Ontology.xsdPrefix); model.setNsPrefix("dcterms", Ontology.dctermsPrefix); model.setNsPrefix("dc", Ontology.dcPrefix); model.setNsPrefix("pcdt", Ontology.pcdtPrefix); model.setNsPrefix("vcard", Ontology.vcardPrefix); }
public static List<String> getIriList(Model model, String queryString){ List<String> iriList = new ArrayList<>(); Query query = QueryFactory.create(queryString); // Execute the query and obtain results QueryExecution qe = QueryExecutionFactory.create(query, model); ResultSet results = qe.execSelect(); for (; results.hasNext(); ) { QuerySolution soln = results.nextSolution(); if (soln.contains("iri") && soln.get("iri").isURIResource()) { iriList.add(soln.get("iri").asResource().getURI()); } } // Important - free up resources used running the query qe.close(); return iriList; }
public static Collection<Object[]> getTestListFromManifest(String manifestFileURL) { // We'd like to use FileManager.loadModel() but it doesn't work on jar: URLs // Model m = FileManager.get().loadModel(manifestFileURL); Model m = ModelFactory.createDefaultModel(); m.read(manifestFileURL, "TURTLE"); IRI baseIRI = D2RQTestUtil.createIRI(m.getNsPrefixURI("base")); ResultSet rs = QueryExecutionFactory.create(TEST_CASE_LIST, m).execSelect(); List<Object[]> result = new ArrayList<Object[]>(); while (rs.hasNext()) { QuerySolution qs = rs.next(); Resource mapping = qs.getResource("mapping"); Resource schema = qs.getResource("schema"); // if (!mapping.getLocalName().equals("constant-object.ttl")) continue; QueryExecution qe = QueryExecutionFactory.create(TEST_CASE_TRIPLES, m); qe.setInitialBinding(qs); Model expectedTriples = qe.execConstruct(); result.add(new Object[]{baseIRI.relativize(mapping.getURI()).toString(), mapping.getURI(), schema.getURI(), expectedTriples}); } return result; }
private static void addTechniques(Model model) { Resource uriCountAssessmentTechnique = model.createResource(LDQM.AT_SPARQL_Query.getURI()); uriCountAssessmentTechnique.addProperty(RDF.type, LDQ.AssessmentTechnique); uriCountAssessmentTechnique.addLiteral(LDQ.isSubjective, false); uriCountAssessmentTechnique.addProperty(LDQ.hasAutomationLevel, LDQ.Automatic); uriCountAssessmentTechnique.addLiteral(DC.description, model.createLiteral("Distinct IRI counts for a given RDF graph are calculated " + "with a SPARQL query. This technique is used to count distinct iris, subjects, predicates, objects, etc.", "en")); Resource assessmentTechnique = model.createResource(LDQM.AT_Url_Dereferencing.getURI()); assessmentTechnique.addProperty(RDF.type, LDQ.AssessmentTechnique); assessmentTechnique.addLiteral(LDQ.isSubjective, false); assessmentTechnique.addProperty(LDQ.hasAutomationLevel, LDQ.Automatic); assessmentTechnique.addLiteral(DC.description, model.createLiteral("Each URI is dereferenced using the HTTP HEAD method, and if fails " + "using the HTTP GET method. Resources with 2XX responses (after redirection) are considered dereferenceable.", "en")); }
public static OntModel loadDefaultModel(){ InputStream in = BodyGeometryTest.class.getClassLoader() .getResourceAsStream("Duplex_A_20110505.ttl"); Model model=ModelFactory.createDefaultModel(); model.read(in,null,"TTL"); InputStream ins = BodyGeometryTest.class.getClassLoader() .getResourceAsStream("IFC2X3_TC1.ttl"); InputStream input = BodyGeometryTest.class.getClassLoader() .getResourceAsStream("Duplex_A_20110505_geometry.ttl"); Model geometryModel=ModelFactory.createDefaultModel(); geometryModel.read(input,null,"TTL"); Model schema=ModelFactory.createDefaultModel(); schema.read(ins,null,"TTL"); try { BimSPARQL.init(model,geometryModel); } catch (ClassNotFoundException | IOException | ParserConfigurationException | SAXException | URISyntaxException e) { // TODO Auto-generated catch block e.printStackTrace(); } OntModel ontology=ModelFactory.createOntologyModel(); ontology.add(schema); ontology.add(model); ontology.add(geometryModel); return ontology; }
public void assertNoUndefinedTerms(Model model, int undefinedPropertyErrorCode, int undefinedClassErrorCode) { Collection<Property> unknownProperties = getUndefinedProperties(model); if (!unknownProperties.isEmpty()) { throw new D2RQException( "Unknown property " + PrettyPrinter.toString( unknownProperties.iterator().next()) + ", maybe a typo?", undefinedPropertyErrorCode); } Collection<Resource> unknownClasses = getUndefinedClasses(model); if (!unknownClasses.isEmpty()) { throw new D2RQException( "Unknown class " + PrettyPrinter.toString( unknownClasses.iterator().next()) + ", maybe a typo?", undefinedClassErrorCode); } }
/** * Convert a PhenoPacket to RDF triples using the JSON-LD context * * @param packet * @param base * URI base for generated RDF; if `null` a UUID-based base will * be used * @return model containing RDF triples * @throws JsonLdError * @throws JsonProcessingException */ public static Model toRdf(PhenoPacket packet, String base) throws JsonLdError, JsonProcessingException { PhenoPacket packetWithContext; if (packet.getContext() == null) { packetWithContext = PhenoPacket.newBuilder(packet) .context(ContextUtil.defaultContextURI) .build(); } else { packetWithContext = packet; } Model rdfModel = ModelFactory.createDefaultModel(); StringReader reader = new StringReader( JsonGenerator.render(packetWithContext)); String baseToUse; if (base != null) { baseToUse = base; } else { String uuid = UUID.randomUUID().toString(); baseToUse = "http://phenopackets.org/local/" + uuid + "/"; } RDFDataMgr.read(rdfModel, reader, baseToUse, Lang.JSONLD); return rdfModel; }
@Test public void testReadRdf() throws IOException, JsonLdError { // FIXME this does not really test the output PhenoPacket packet = YamlReader .readFile("src/test/resources/context/phenopacket-with-context.yaml"); Model model = RdfGenerator.toRdf(packet, null); String packetID = packet.getId(); PhenoPacket newPacket = RdfReader.readModel(model, ContextUtil.expandIdentifierAsValue(packetID, packet)); ObjectMapper m = new ObjectMapper(); m.setSerializationInclusion(JsonInclude.Include.NON_NULL); m.setFilterProvider(new SimpleFilterProvider().addFilter( "PhenoPacketClass", SimpleBeanPropertyFilter.serializeAll())); ObjectWriter writer = m.writerWithDefaultPrettyPrinter(); System.out.println(writer.writeValueAsString(newPacket)); }
public static Model loadIbcData(){ Model table=loadModel("Table_705.8.ttl","TTL"); Model tableFunction=loadModel("Table_705.8_Function.ttl","TTL"); Model qudtExpr=loadModel("QudtExpr.ttl","TTL"); Model qudtSpin=loadModel("http://www.qudt.org/qudt/owl/1.0.0/qudt-spin.owl",null); Model unit=loadModel("http://www.qudt.org/qudt/owl/1.0.0/unit.owl",null); Model union=ModelFactory.createDefaultModel(); Model spModel = SP.getModel(); Model spinModel = SPIN.getModel(); union.add(table); union.add(tableFunction); union.add(qudtExpr); union.add(qudtSpin); union.add(unit); union.add(spModel); union.add(spinModel); return union; }
@GET @Consumes(MediaType.WILDCARD) @Produces({ KRFormat.RDF_XML, KRFormat.RDF_JSON, KRFormat.TURTLE, KRFormat.N_TRIPLE, KRFormat.N3, "application/json-ld" }) @Path("/{id}/source") public Response getOntologySource(@PathParam("id") String id){ ResponseBuilder responseBuilder = null; Model model; try { model = ontonetHub.getOntologySource(id); responseBuilder = Response.ok(model); } catch (NoSuchOntologyException e1) { JSONObject json = new JSONObject(); try { json.put("error", "No ontology exists with the ID provided."); } catch (JSONException e) { log.error(e.getMessage(), e); } responseBuilder = Response.status(Status.NOT_FOUND).entity(json); } return responseBuilder.build(); }
@Override public Model readFrom(Class<Model> type, Type genericType, Annotation[] annotations, MediaType mediaType, MultivaluedMap<String, String> httpHeaders, InputStream entityStream) throws IOException, WebApplicationException { Model model = ModelFactory.createDefaultModel(); model.read(entityStream, null); return model; }
public IndexingJob(OntoNetHubSiteManager siteManager, String ontologyName, String ontologyDescription, String baseURI, Model data, BundleContext ctx, TcManager tcManager, File ontologiesFolder) { this.siteManager = siteManager; this.ontologyName = ontologyName; this.ontologyDescription = ontologyDescription; this.baseURI = baseURI; this.data = data; this.ctx = ctx; this.stanbolHome = ctx.getProperty("stanbol.home"); this.tcManager = tcManager; this.ontologiesFolder = ontologiesFolder; }
private List<Statement> getUsage(Property property, Model model){ List<Statement> stmts = new ArrayList<Statement>(); String sparql = "PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> " + "PREFIX owl: <http://www.w3.org/2002/07/owl#> " + "SELECT DISTINCT ?concept " + "WHERE{" + " {<" + property.getURI() + "> rdfs:domain ?concept} " + " UNION " + " { " + " ?concept rdfs:subClassOf|owl:equivalentClass ?restriction . " + " ?restriction a owl:Restriction; " + " owl:onProperty <" + property.getURI() + "> " + " } " + "}"; Query query = QueryFactory.create(sparql, Syntax.syntaxARQ); QueryExecution queryExecution = QueryExecutionFactory.create(query, model); ResultSet resultSet = queryExecution.execSelect(); while(resultSet.hasNext()){ QuerySolution querySolution = resultSet.next(); Resource concept = querySolution.getResource("concept"); stmts.add(new StatementImpl(property, usage, concept)); } return stmts; }
private List<Statement> getUsage(OntClass ontClass, Model model){ List<Statement> stmts = new ArrayList<Statement>(); try{ String sparql = "PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> " + "PREFIX owl: <http://www.w3.org/2002/07/owl#> " + "SELECT DISTINCT ?concept " + "WHERE{" + " {?prop rdfs:range <" + ontClass.getURI() + ">; " + " rdfs:domain ?concept" + " }" + " UNION " + " { " + " ?concept rdfs:subClassOf|owl:equivalentClass ?restriction . " + " ?restriction a owl:Restriction; " + " ?p <" + ontClass.getURI() + "> " + " } " + "}"; Query query = QueryFactory.create(sparql, Syntax.syntaxARQ); QueryExecution queryExecution = QueryExecutionFactory.create(query, model); ResultSet resultSet = queryExecution.execSelect(); while(resultSet.hasNext()){ QuerySolution querySolution = resultSet.next(); Resource concept = querySolution.getResource("concept"); stmts.add(new StatementImpl(ontClass, usage, concept)); } } catch(Exception e){ log.error(e.getMessage(), e); } return stmts; }
public RDFIndexingJob(String ontologyName, String ontologyDescription, String baseURI, Model data, BundleContext ctx, TcManager tcManager, File ontologiesFolder) { this.ontologyName = ontologyName; this.ontologyDescription = ontologyDescription; this.baseURI = baseURI; this.data = data; this.ctx = ctx; this.stanbolHome = ctx.getProperty("stanbol.home"); this.tcManager = tcManager; this.ontologiesFolder = ontologiesFolder; }
@Override public Model getOntologySource(String id) throws NoSuchOntologyException { try { Model model = FileManager.get().loadModel(new File(ontologiesFolder, id + ".rdf").getCanonicalPath()); return model; } catch (IOException e) { throw new NoSuchOntologyException(id); } }
private void doIndexing(String ontologyId, String name, String description, String iri, Model model){ IndexingJobInput indexingJobInput = null; if(ontologyId == null) indexingJobInput = new IndexingJobInput(name, description, iri, model); else indexingJobInput = new IndexingJobInput(ontologyId, name, description, iri, model); try { String jobId = indexRDF(indexingJobInput); while(!jobManager.ping(jobId).isDone()) Thread.sleep(1000); } catch (OntologyAlreadyExistingException | InterruptedException e) { log.error(e.getMessage(), e); } }
private Model createModel() { Model model = ModelFactory.createDefaultModel(); String NS = "http://www.computas.no/zebra/"; model.setNsPrefix("", NS); model.setNsPrefix("xsd", "http://www.w3.org/2001/XMLSchema#"); Resource subject = model.createResource(NS + "Joe"); Property predicate = model.createProperty(NS + "hasSentMessage"); String object = "Hello World!"; subject.addProperty(predicate, object, XSDDatatype.XSDstring); return model; }
/** * Add to the model the Resources that contain the Countries as retrieved from the respective dictionary * * @param Model the model we are currently working with * @param DictionaryItems the countries as retrieved from the respective dictionary */ public void addCountriesToModel(Model model, DictionaryItems countryList) { for (DictionaryItem item : countryList.getItems()) { Resource fekTypeResource = model.createResource(Ontology.instancePrefix + "Country/" + item.getUid(), Ontology.countryResource); String[] countryDtls = countries.findCountryFromAbbreviation(item.getUid()); fekTypeResource.addLiteral(Ontology.countryId, item.getUid()); fekTypeResource.addProperty(Ontology.prefLabel, countryDtls[0], "el"); fekTypeResource.addProperty(Ontology.prefLabel, countryDtls[1], "en"); } }
public List<String> getResult(InputStream in, String query){ Model model=ModelFactory.createDefaultModel(); model.read(in,null,"TTL"); Query q = QueryFactory.create(query); QueryExecution qe = QueryExecutionFactory.create(q, model); ResultSet qresults = qe.execSelect(); List<QuerySolution> solutions=ResultSetFormatter.toList(qresults); List<String> results=new ArrayList<String>(); for(QuerySolution qs:solutions){ results.add(qs.toString()); } return results; }
/** * Add to the model the the Fek that is related to the Organization * * @param Model the model we are currently working with * @param Resource the resource of the related organization * @param String the issue type of the Fek * @param String the year that Fek was published * @param String the number of the Fek */ private void createFekResource(Model model, Resource orgResource, String fekIssue, String fekYear, String fekNumber, boolean newOrganizationFlag) { String fekUriName = ""; if ( (fekIssue != null) && (fekIssue != "") ) { fekUriName = fekIssue + "/" + fekYear + "/" + fekNumber; } else { fekUriName = fekYear + "/" + fekNumber; } Resource fekResource = model.getResource(Ontology.instancePrefix + "Fek/" + fekUriName); if (model.containsResource(fekResource)) { //if Fek resource exists use it orgResource.addProperty(Ontology.relatedFek, fekResource); } else { //...else create it fekResource = model.createResource(Ontology.instancePrefix + "Fek/" + fekUriName, Ontology.fekResource); fekResource.addProperty(Ontology.fekNumber, fekNumber); fekResource.addProperty(Ontology.fekYear, fekYear); if ( (fekIssue != null) && (fekIssue != "") ) { fekResource.addProperty(Ontology.fekIssue, model.getResource(Ontology.instancePrefix + "FekType/" + fekIssue)); } } /** Organization - FEK **/ if (newOrganizationFlag) { orgResource.addProperty(Ontology.relatedFek, fekResource); } else { orgResource.removeAll(Ontology.relatedFek); //delete the old relationships orgResource.addProperty(Ontology.relatedFek, fekResource); } }
/** * Add to the model the the Organization Unit that is related to the Organization * * @param Model the model we are currently working with * @param Resource the resource of the related organization * @param Unit the list of the organization units */ private void addOrganizationUnitToModel(Model model, Resource orgResource, Units unitsList) { for (Unit unit : unitsList.getUnits()) { Resource orgUnitResource = model.createResource(Ontology.instancePrefix + "OrganizationalUnit/" + unit.getUid(), Ontology.organizationalUnitResource); orgUnitResource.addLiteral(Ontology.organizationUnitId, unit.getUid()); //orgUnitResource.addLiteral(Ontology.organizationUnitActive, unit.isActive()); orgUnitResource.addProperty(RDFS.label, model.createLiteral(hm.cleanInputData(unit.getLabel()), "el")); if (unit.getAbbreviation() != null) { orgUnitResource.addLiteral(Ontology.organizationUnitAbbreviation, unit.getAbbreviation()); } if ((unit.getCategory() != null) && (unit.getCategory() != "")) { orgUnitResource.addProperty(Ontology.hasOrgUnitCategory, model.getResource(Ontology.instancePrefix + "OrganizationalUnitCategory/" + unit.getCategory())); } if (unit.getUnitDomains() != null) { for (String unitDomain : unit.getUnitDomains()) { orgUnitResource.addProperty(Ontology.orgActivity, model.getResource(Ontology.instancePrefix + "OrganizationDomain/" + unitDomain)); } } /** Organization - OrganizationalUnit **/ orgResource.addProperty(Ontology.hasUnit, orgUnitResource); /** OrganizationalUnit - Organization **/ orgUnitResource.addProperty(Ontology.unitOf, orgResource); } }
protected OntModel readModel() { Reasoner reasoner = PelletReasonerFactory.theInstance().create(); Model infModel = ModelFactory.createInfModel(reasoner, ModelFactory.createDefaultModel()); OntModel ontModel = ModelFactory.createOntologyModel(PelletReasonerFactory.THE_SPEC, infModel); ontModel.read(getClass().getResourceAsStream("/" + modelBase), null, TTL); return ontModel; }
/** * Fetch all the decisions related to the provided date and * add them to the existing model. * * @param String the date to search for decisions * @param Model the model we are working with */ private static void dailyDecisions(String aDate, Model model) { RdfActions rdfActions = new RdfActions(); HandleApiRequests handleRequests = new HandleApiRequests(); HelperMethods hm = new HelperMethods(); //fetch the Decisions for that date System.out.println("Searching date: " + aDate); List<Decision> descionsList = handleRequests.searchDecisions(aDate); int decisionCounter = 1; int totalDecisions = descionsList.size(); //For each Decision add it to the RDF graph for (Decision decision : descionsList) { System.out.print("\nDecision number " + decisionCounter + " out of " + totalDecisions); //decision = handleRequests.searchSingleDecision(""); if (decision.getAda() != null) { rdfActions.createRdfFromDecision(decision, model); } decisionCounter += 1; //break; } /* store the model */ rdfActions.writeModel(model); hm.writeUnknownMetadata("datesOK", aDate); }