public Set<String> initializeCategories() { Model model = ModelFactory.createDefaultModel(); model.read("/home/zwicklbauer/HDTGeneration/skos_categories_en.nt"); StmtIterator it = model.listStatements(); Set<String> set = new HashSet<String>(); System.out.println("Los gehts"); while (it.hasNext()) { Statement s = it.next(); Resource r = s.getSubject(); Property p = s.getPredicate(); RDFNode n = s.getObject(); if (p.getURI().equalsIgnoreCase( "http://www.w3.org/2004/02/skos/core#broader") && n.isResource()) { Resource target = n.asResource(); if(!hasSubCategory(target.getURI())) set.add(target.getURI()); if(!hasSubCategory(r.getURI())) set.add(r.getURI()); } } return set; }
/** * Parse RDF input as string * * @param input RDF values as String * @return an {@link Request} object which contains information about latitude, longitude and date * @throws IllegalStateException if RDF is not literal * @throws NullPointerException if input is null */ public static Request parse(String input) { Objects.requireNonNull(input); Model model = ModelFactory.createDefaultModel(); model.read(new ByteArrayInputStream(input.getBytes()), null, "TURTLE"); Map<String, Object> map = new HashMap<>(); model.listStatements().forEachRemaining(statement -> { RDFNode rdfNode = statement.getObject(); if (rdfNode.isLiteral()) { try { map.put(statement.getPredicate().getLocalName(), statement.getObject().asLiteral().getValue()); } catch (Exception e) { LOGGER.error("RDF statement is not literal"); throw new IllegalStateException(e.getMessage()); } } }); model.close(); return getDataFromMap(map); }
@Test public void run() { SystemLoader loader = new SystemLoader(); loader.setJdbcURL(db.getJdbcURL()); loader.setUsername(db.getUser()); loader.setMappingFile(mappingFile); loader.setStartupSQLScript(sqlFile); loader.setSystemBaseURI(BASE_URI); if (resultFile == null) { R2RMLReader reader = loader.getR2RMLReader(); MappingValidator validator = new MappingValidator( reader.getMapping(), loader.getSQLConnection()); validator.setReport(reader.getReport()); validator.run(); if (!reader.getReport().hasError()) { fail("Expected validation error"); } return; } Model actualTriples = ModelFactory.createDefaultModel(); actualTriples.add(loader.getModelD2RQ()); Model expectedTriples = FileManager.get().loadModel(resultFile, "N-TRIPLES"); ModelAssert.assertIsomorphic(expectedTriples, actualTriples); }
public static Collection<Object[]> getTestListFromManifest(String manifestFileURL) { // We'd like to use FileManager.loadModel() but it doesn't work on jar: URLs // Model m = FileManager.get().loadModel(manifestFileURL); Model m = ModelFactory.createDefaultModel(); m.read(manifestFileURL, "TURTLE"); IRI baseIRI = D2RQTestUtil.createIRI(m.getNsPrefixURI("base")); ResultSet rs = QueryExecutionFactory.create(TEST_CASE_LIST, m).execSelect(); List<Object[]> result = new ArrayList<Object[]>(); while (rs.hasNext()) { QuerySolution qs = rs.next(); Resource mapping = qs.getResource("mapping"); Resource schema = qs.getResource("schema"); // if (!mapping.getLocalName().equals("constant-object.ttl")) continue; QueryExecution qe = QueryExecutionFactory.create(TEST_CASE_TRIPLES, m); qe.setInitialBinding(qs); Model expectedTriples = qe.execConstruct(); result.add(new Object[]{baseIRI.relativize(mapping.getURI()).toString(), mapping.getURI(), schema.getURI(), expectedTriples}); } return result; }
private void reload() { loader.getMapping().connect(); GraphD2RQ graph = loader.getGraphD2RQ(); datasetGraph = DatasetGraphFactory.createOneGraph(graph); defaultModel = ModelFactory.createModelForGraph(datasetGraph.getDefaultGraph()); hasTruncatedResults = false; for (Database db: loader.getMapping().databases()) { if (db.getResultSizeLimit() != Database.NO_LIMIT) { hasTruncatedResults = true; } } if (autoReload) { lastModified = watchedFile.lastModified(); lastReload = System.currentTimeMillis(); } }
/** * Convert a PhenoPacket to RDF triples using the JSON-LD context * * @param packet * @param base * URI base for generated RDF; if `null` a UUID-based base will * be used * @return model containing RDF triples * @throws JsonLdError * @throws JsonProcessingException */ public static Model toRdf(PhenoPacket packet, String base) throws JsonLdError, JsonProcessingException { PhenoPacket packetWithContext; if (packet.getContext() == null) { packetWithContext = PhenoPacket.newBuilder(packet) .context(ContextUtil.defaultContextURI) .build(); } else { packetWithContext = packet; } Model rdfModel = ModelFactory.createDefaultModel(); StringReader reader = new StringReader( JsonGenerator.render(packetWithContext)); String baseToUse; if (base != null) { baseToUse = base; } else { String uuid = UUID.randomUUID().toString(); baseToUse = "http://phenopackets.org/local/" + uuid + "/"; } RDFDataMgr.read(rdfModel, reader, baseToUse, Lang.JSONLD); return rdfModel; }
private static Model loadDefaultRules(){ Model rules=ModelFactory.createDefaultModel(); rules.add(SPIN.getModel()); rules.add(SP.getModel()); InputStream in=BimSPARQL.class.getClassLoader().getResourceAsStream("bimsparql/schm.ttl"); //new FileInputStream("IFC2X3_Schema.rdf"); rules.read(in, null,"TTL"); InputStream in2=BimSPARQL.class.getClassLoader().getResourceAsStream("bimsparql/pset.ttl"); //new FileInputStream("IFC2X3_Schema.rdf"); rules.read(in2, null,"TTL"); InputStream in3=BimSPARQL.class.getClassLoader().getResourceAsStream("bimsparql/pdt.ttl"); //new FileInputStream("IFC2X3_Schema.rdf"); rules.read(in3, null,"TTL"); InputStream in4=BimSPARQL.class.getClassLoader().getResourceAsStream("bimsparql/qto.ttl"); //new FileInputStream("IFC2X3_Schema.rdf"); rules.read(in4, null,"TTL"); InputStream in5=BimSPARQL.class.getClassLoader().getResourceAsStream("bimsparql/geom.ttl"); //new FileInputStream("IFC2X3_Schema.rdf"); rules.read(in5, null,"TTL"); InputStream in6=BimSPARQL.class.getClassLoader().getResourceAsStream("bimsparql/spt.ttl"); //new FileInputStream("IFC2X3_Schema.rdf"); rules.read(in6, null,"TTL"); return rules; }
public static OntModel loadDefaultModel(){ InputStream in = BodyGeometryTest.class.getClassLoader() .getResourceAsStream("Duplex_A_20110505.ttl"); Model model=ModelFactory.createDefaultModel(); model.read(in,null,"TTL"); InputStream ins = BodyGeometryTest.class.getClassLoader() .getResourceAsStream("IFC2X3_TC1.ttl"); InputStream input = BodyGeometryTest.class.getClassLoader() .getResourceAsStream("Duplex_A_20110505_geometry.ttl"); Model geometryModel=ModelFactory.createDefaultModel(); geometryModel.read(input,null,"TTL"); Model schema=ModelFactory.createDefaultModel(); schema.read(ins,null,"TTL"); try { BimSPARQL.init(model,geometryModel); } catch (ClassNotFoundException | IOException | ParserConfigurationException | SAXException | URISyntaxException e) { // TODO Auto-generated catch block e.printStackTrace(); } OntModel ontology=ModelFactory.createOntologyModel(); ontology.add(schema); ontology.add(model); ontology.add(geometryModel); return ontology; }
public static Model loadIbcData(){ Model table=loadModel("Table_705.8.ttl","TTL"); Model tableFunction=loadModel("Table_705.8_Function.ttl","TTL"); Model qudtExpr=loadModel("QudtExpr.ttl","TTL"); Model qudtSpin=loadModel("http://www.qudt.org/qudt/owl/1.0.0/qudt-spin.owl",null); Model unit=loadModel("http://www.qudt.org/qudt/owl/1.0.0/unit.owl",null); Model union=ModelFactory.createDefaultModel(); Model spModel = SP.getModel(); Model spinModel = SPIN.getModel(); union.add(table); union.add(tableFunction); union.add(qudtExpr); union.add(qudtSpin); union.add(unit); union.add(spModel); union.add(spinModel); return union; }
/*************************************************************************** * Private Methods - Handle Translation **************************************************************************/ private Model sendRequest(HttpServletRequest req, String url) throws ResponseThrowable { HttpClient client = new HttpClient(); GetMethod method = new GetMethod(url); method.setRequestHeader("Accept-Encoding", "gzip"); method.setRequestHeader("Accept" , _format.getMimetype()); InputStream is = null; try { int rspCode = client.executeMethod(method); if ( rspCode != 200 ) { throw new ResponseThrowable(rspCode); } is = getInputStream(method); Model m = ModelFactory.createDefaultModel(); m.read(is, null, _format.getJenaFormat().getLang().getLabel()); return m; } catch (RiotException | IOException e) { throw new ResponseThrowable(e, 502); } finally { IOUtils.closeQuietly(is); method.releaseConnection(); } }
/** * Writes the user entities to rdf */ private static OntModel writeUserEntries() { OntModel model = ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM); try { thewebsemantic.Bean2RDF writer = new Bean2RDF(model); InitialContext ic = new InitialContext(); UserService userService = (UserService) ic.lookup("java:module/UserService"); List<User> users = userService.getAll(); for (User u : users) { writer.save(u); } } catch (Exception e) { e.printStackTrace(); } return model; }
public void process(File src, File all, File dst, File enrich) throws IOException { Model m = ModelFactory.createDefaultModel(); loadFiles(src, ".xml", m); storeAsEDM(m, all); _type = m.getProperty(RDF_TYPE); _prefLabel = m.getProperty(SKOS_PREF_LABEL); _foafName = m.getProperty(FOAF_NAME); Resource pcho = m.getResource(EDM_PROVIDEDCHO); CSVWriter printer = new CSVWriter(enrich, CSVFormat.EXCEL); printer.start(); try { enrichImpl(m.listSubjectsWithProperty(_type, pcho), printer); } finally { printer.end(); } storeAsEDM(m, dst); new CHOAnalysis().analyse(dst); }
public void merge(File src, File dst) throws IOException { Model m = ModelFactory.createDefaultModel(); m.setNsPrefix("cc", "http://creativecommons.org/ns#"); m.setNsPrefix("dc", "http://purl.org/dc/elements/1.1/"); m.setNsPrefix("dcterms", "http://purl.org/dc/terms/"); m.setNsPrefix("edm", "http://www.europeana.eu/schemas/edm/"); m.setNsPrefix("foaf", "http://xmlns.com/foaf/0.1/"); m.setNsPrefix("odrl", "http://www.w3.org/ns/odrl/2/"); m.setNsPrefix("ore", "http://www.openarchives.org/ore/terms/"); m.setNsPrefix("owl", "http://www.w3.org/2002/07/owl#"); m.setNsPrefix("rdaGr2", "http://rdvocab.info/ElementsGr2/"); m.setNsPrefix("skos", "http://www.w3.org/2004/02/skos/core#"); m.setNsPrefix("wgs84_pos", "http://www.w3.org/2003/01/geo/wgs84_pos#"); m.setNsPrefix("xsi", "http://www.w3.org/2001/XMLSchema-instance"); m.setNsPrefix("xml", "http://www.w3.org/XML/1998/namespace"); merge(src, m); //store(m, dst, "RDF/XML-ABBREV"); store(m, dst, "RDF/XML"); }
public static boolean loadModel(Model m, String url) { String sContent = getRDF(url); if ( sContent == null ) { return false; } Model mTemp = ModelFactory.createDefaultModel(); try { mTemp.read(new StringReader(sContent), url, "RDF/XML"); } catch (Exception e) { System.err.println("error parsing: " + url + ", error: " + e.getMessage()); return false; } m.add(mTemp); return true; }
public ObjectStat analyse(File srcList, File src, File dst) throws IOException { Collection<String> c = loadDataURLs(srcList, PATTERN_BABELNET); Model m = ModelFactory.createDefaultModel(); loadModel(m, src, null); ObjectStat stat = new ObjectStat("BabelNet", true, false, true); stat.addPropertyValue(m.getProperty("http://babelnet.org/model/babelnet#gloss")); ResIterator iter = m.listSubjects(); while ( iter.hasNext() ) { Resource r = iter.next(); if ( !c.contains(r.getURI()) ) { continue; } stat.newObject(r); } if ( dst != null ) { stat.print(new PrintStream(dst, "UTF-8")); } return stat; }
public static Model readRDFMetaDataFile(File rdfMetaDataFile){ RDFReader reader = new TurtleReader(); Model rdfMetaDataModel = ModelFactory.createDefaultModel(); LOGGER.info("Loading meta data file from " + rdfMetaDataFile.getAbsolutePath()); FileInputStream fin = null; try { fin = new FileInputStream(rdfMetaDataFile); reader.read(rdfMetaDataModel, fin, ""); } catch (FileNotFoundException e) { LOGGER.error("Couldn't read meta data from file. Returning null.", e); return null; } finally { IOUtils.closeQuietly(fin); } return rdfMetaDataModel; }
public static void main(String[] args) throws IOException { File file = new File("C:\\Users\\Hugo\\Google Drive\\Europeana\\EDM\\jsonld\\agent.xml"); Model m = ModelFactory.createDefaultModel(); try { m.read(new FileInputStream(file), null, "RDF/XML"); } catch (Exception e) { e.printStackTrace(System.err); return; } URL url = new URL("file:///C:/Users/Hugo/Google%20Drive/Europeana/EDM/jsonld/context.jsonld"); new JsonLdWriter(url).write(m, new OutputStreamWriter(System.out)); }
private Integer getNumberOfLinks(String nif) { Model model = ModelFactory.createDefaultModel(); model.read(new ByteArrayInputStream(nif.getBytes()), null, "TTL"); StmtIterator iter = model.listStatements(); Integer result = 0; while (iter.hasNext()) { Statement stm = iter.nextStatement(); if (NIF21Format.RDF_PROPERTY_IDENTREF.equals(stm.getPredicate().toString())) { result += 1; } } return result; }
@Test public void testSomeMethod2() throws Exception { Dataset ds = TDBFactory.createDataset("/scratch/WORK2/jena/dataset2/"); OntModel model1 = ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM, ds.getNamedModel("vijaym1")); OntModel model2 = ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM, ds.getNamedModel("vijaym2")); OntClass thing = model1.createClass("http://www.w3.org/2002/07/owl#Thing"); model1.createIndividual("http://example.com/onto1#VijayRaj", thing); model2.createIndividual("http://example.;cegilovcom/onto2#VijayRaj", thing); Model m = model1.union(model2); FileWriter fw = new FileWriter("/scratch/WORK2/jena/testModels/mergetestds.xml"); RDFDataMgr.write(fw, ds, RDFFormat.NQUADS_UTF8); }
public CSVSToRDFConverter(String sourceFilesDir, String className, String baseNameSpace, String separator, String textFieldSeparator, int keyColIndex) { nsTBox = baseNameSpace + "model#"; nsABox = baseNameSpace + "data#"; this.sourceFilesDir = sourceFilesDir; this.className = className; this.separator = separator; this.textFieldSeparator = textFieldSeparator; tboxModel = ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM, null); aboxModel = ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM, null); pathSeparator = System.getProperty("file.separator"); this.keyColIndex = keyColIndex; }
public static void main(String[] args) throws JSONException, KarmaException, IOException { Options options = createCommandLineOptions(); CommandLine cl = CommandLineArgumentParser.parse(args, options, GenerateContextFromModel.class.getSimpleName()); if(cl == null) { return; } String fileName = (String) cl.getOptionValue("modelpath"); String output = (String) cl.getOptionValue("outputfile"); if (fileName == null) { System.out.println("No model file specified!"); return; } if (output == null) { output = fileName + ".json"; } File file = new File(fileName); Model model = ModelFactory.createDefaultModel(); InputStream s = new FileInputStream(file); model.read(s, null, "TURTLE"); JSONObject top = new ContextGenerator(model, true).generateContext(); PrintWriter pw = new PrintWriter(output); pw.println(top.toString(4)); pw.close(); }
public static List<Document> loadDataset() { Model nifModel = ModelFactory.createDefaultModel(); InputStream inputStream = ExtendedCetusSurfaceFormExtractorTest.class.getClassLoader().getResourceAsStream( DATASET_NAME); if (inputStream == null) { LOGGER.error("Couldn't get InputStream for the dataset. Returning null"); return null; } try { LocalNIFParser parser = new LocalNIFParser(Lang.TTL); return parser.parseNIF(inputStream, nifModel); } catch (Exception e) { LOGGER.error("Exception while parsing dataset. Returning null", e); return null; } finally { IOUtils.closeQuietly(inputStream); } }
public Model getHierarchyModel(String uri){ // returns all subclasses for given URI Model m = ModelFactory.createDefaultModel(); OntoRecord initRecord = new OntoRecord(); initRecord.setUri(uri); while(initRecord !=null){ initRecord = getSuperclass(initRecord.getUri()); if(initRecord != null){ StmtIterator iter1 = model.listStatements( new SimpleSelector(ResourceFactory.createResource(uri), RDFS.subClassOf, (RDFNode)null)); m.add(iter1); } } return m; }
/** * Test method for LabelExtractionHandler#addExtractedLabels(), 5.nt */ @Test public final void test03AddExtractedLabels() { // set input file String inFile = "src/test/data/expected_5.nt"; // set expected result Model expected = null; try { expected = ModelFactory.createDefaultModel(); expected.read( inFile, "N-TRIPLES" ); } catch( Exception e ) { fail( e.toString() ); } // run test handler.addExtractedLabels(); assertTrue( expected.isIsomorphicWith( handler.getVoidModel() ) ); }
/** * Performs the actual transformation mapping the data extracted from OSM XML data to a Jena model. * @return */ public Model jenaTransform() { Model model = ModelFactory.createDefaultModel(); processXmlBinary(); for(String wayId: osmWayNodeMap.keySet()) { OsmWay wayObj = osmWayNodeMap.get(wayId); Resource wayUri = model.createResource("http://fusepoolp3.eu/osm/way/" + wayId); wayUri.addProperty(model.createProperty("http://www.w3.org/1999/02/22-rdf-syntax-ns#type"), model.createResource("http://schema.org/PostalAddress")); wayUri.addProperty(model.createProperty("http://schema.org/streetAddress"), wayObj.getTagName()); Resource geometryUri = model.createResource("http://fusepoolp3.eu/osm/geometry/" + wayId); wayUri.addProperty(model.createProperty("http://www.opengis.net/ont/geosparql#geometry"), geometryUri); String linestring = getWktLineString(wayObj.getNodeReferenceList()); geometryUri.addProperty(model.createProperty("http://www.opengis.net/ont/geosparql#asWKT"), linestring); } log.info("Number of triples transformed: " + model.size()); return model; }
/** * Encoding Treating * * Date: 2013. 5. 22. ���� 12:30:31 * Author: ytaek.kim * * Method Brief : * @param fileNameOrUrl * @return * @throws IOException * @throws InvalidSPDXAnalysisException * * #Logs: */ public static SPDXDocument creatSpdxDocument(String fileNameOrUrl, String charsetName) throws IOException, InvalidSPDXAnalysisException { try { Class.forName("net.rootdev.javardfa.jena.RDFaReader"); } catch(java.lang.ClassNotFoundException e) { logger.warn("Unable to load the RDFaReader Class"); } InputStream spdxRdfInput = FileManager.get().open(fileNameOrUrl); if (spdxRdfInput == null) throw new FileNotFoundException("Unable to open \"" + fileNameOrUrl + "\" for reading"); InputStreamReader inputReader = new InputStreamReader(spdxRdfInput, charsetName); Model model = ModelFactory.createDefaultModel(); model.read(inputReader, figureBaseUri(fileNameOrUrl), fileType(fileNameOrUrl)); return new SPDXDocument(model); }
/** * Read the RDF model from files. */ public static void readSemanticModelFiles() { logger.debug("Reading the model from a file"); // Read the model to an existing model String dataDir = UQasarUtil.getDataDirPath(); String modelPath = "file:///" + dataDir + ONTOLOGYFILE; // String modelPath = "file:///C:/nyrhinen/Programme/jboss-as-7.1.1.Final/standalone/data/uq-ontology-model.rdf"; OntModel model = ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM); RDFDataMgr.read(model, modelPath); // Test output to standard output // RDFDataMgr.write(System.out, uqModel, RDFFormat.RDFXML_PRETTY); logger.debug("Model read from file " +modelPath); UQasarUtil.setUqModel(model); System.out.println("Reading done."); }
public static Model getModel(File f) { Model m = ModelFactory.createDefaultModel(); if ((f == null) || !f.exists()) { return m; } try { FileInputStream fis = new FileInputStream(f); try { m.read(fis, "RDF/XML"); } finally { fis.close(); } } catch(Exception e) { e.printStackTrace(); } return m; }
@Test public void testNewToJenaNode() throws ModelRuntimeException { com.hp.hpl.jena.rdf.model.Model model = ModelFactory.createDefaultModel(); DatatypeLiteralImpl l1 = new DatatypeLiteralImpl("test", new URIImpl("test:funky", false)); DatatypeLiteralImpl l2 = new DatatypeLiteralImpl("test", new URIImpl("test:funky", false)); Node n1 = TypeConversion.toJenaNode(l1, model); Node n2 = TypeConversion.toJenaNode(l2, model); assertTrue(n1.equals(n2)); Object o1 = TypeConversion.toRDF2Go(n1); Object o2 = TypeConversion.toRDF2Go(n2); assertTrue(o1 instanceof DatatypeLiteral); assertTrue(o2 instanceof DatatypeLiteral); DatatypeLiteralImpl new1 = (DatatypeLiteralImpl)o1; DatatypeLiteralImpl new2 = (DatatypeLiteralImpl)o2; assertTrue(new1.getValue().equals("test")); assertTrue(new2.getValue().equals("test")); assertTrue(new1.getDatatype().equals(new URIImpl("test:funky", false))); assertTrue(new2.getDatatype().equals(new URIImpl("test:funky", false))); }
public static void parseJSONLD(File file, PrintStream ps) { System.out.println("Parsing file: " + file.getName()); Model m = ModelFactory.createDefaultModel(); try { m.read(new FileInputStream(file), null, "JSONLD"); } catch (Exception e) { e.printStackTrace(ps); System.out.println(); return; } //new Test().test(new OutputStreamWriter(ps), m.getGraph()); //WriterGraphRIOT r = RDFDataMgr.createGraphWriter(Lang.JSONLD); //.write(ps, m.getGraph(), pm, null, null); //RDFDataMgr.write(ps, m, Lang.JSONLD); m.write(ps, "RDF/XML"); System.out.println(); }
public static final void main(String[] args) throws MalformedURLException, FileNotFoundException { Model model = ModelFactory.createDefaultModel(); model.read(args[0]); StmtIterator iter = model.listStatements(); System.out.println("Start"); while (iter.hasNext()) { Statement stmt = iter.next(); Resource subject = stmt.getSubject(); String url[] = subject.getURI().split("/"); String subjectIdent = url[url.length - 1]; RDFNode object = stmt.getObject(); String s[] = object.asResource().getURI().split("/"); String objIdent = s[s.length - 1]; try { System.out.println(objIdent+" "+subjectIdent); HBaseOperations.getInstance().addRecord("DBPEDIA_CatToEnts", objIdent, "data", String.valueOf(subjectIdent.hashCode()), subjectIdent); } catch (IOException e) { e.printStackTrace(); } } }
@Override public Model exportToJenaModel() { Model model = ModelFactory.createDefaultModel(); String baseNS = source.getUri(); model.setNsPrefix("", baseNS); model.setNsPrefix(Prefixes.KARMA, Namespaces.KARMA); model.setNsPrefix(Prefixes.RDF, Namespaces.RDF); model.setNsPrefix(Prefixes.RDFS, Namespaces.RDFS); model.setNsPrefix(Prefixes.SWRL, Namespaces.SWRL); addSourceInfoPart(model); return model; }
public static void main(String[] args) { Model m = ModelFactory.createDefaultModel(); m.read(args[0]); StmtIterator iter = m.listStatements(); HashSet<String> hash = new HashSet<String>(); while (iter.hasNext()) { Statement stmt = iter.next(); RDFNode node = stmt.getObject(); String uri = node.asResource().getURI(); hash.add(uri); } File output = new File(args[1]); try { PrintWriter writer = new PrintWriter(output); for(String s : hash) { writer.println(s); } writer.close(); } catch (FileNotFoundException e) { e.printStackTrace(); } }
public static void main(String[] args) { Model model = ModelFactory.createDefaultModel(); ModelUtils.fillModel(model, FOAF_BASE_URI, FOAF_SCHEMA_FilePath); // renderer all namespaces System.out.println(model.getNsPrefixMap()); // insert foaf:me rdf:type foaf:Person Resource me = model.createResource(FOAF_BASE_URI + "me"); Property rdfType = model.getProperty(Constants.RDF_TYPE_URL); Resource FOAFPersonClass = model.getResource(FOAF_BASE_URI + "Person"); model.add(me, rdfType, FOAFPersonClass); // query the inserted facts StringBuilder query = SPARQLUtils.getRegualrSPARQLPREFIX(); query.append("PREFIX foaf: <http://xmlns.com/foaf/0.1/>").append(Constants.NEWLINE); query.append("SELECT DISTINCT ?person WHERE {?person rdf:type foaf:Person}"); SPARQLUtils.query(model, query.toString(), "?person"); }
@Override public Model readFrom(Class<Model> type, Type genericType, Annotation[] annotations, MediaType mediaType, MultivaluedMap<String, String> httpHeaders, InputStream entityStream) throws IOException, WebApplicationException { Model model = ModelFactory.createDefaultModel(); model.read(entityStream, null); return model; }