private void copyToTdb() throws RepositoryException { if ( !needsSave || null == tdbdir ) { return; } final Dataset dataset = TDBFactory.createDataset( tdbdir.getAbsolutePath() ); try { rc.export( new TdbExporter( dataset ) ); } catch ( RepositoryException | RDFHandlerException e ) { log.error( "Problem exporting data to TDB", e ); dataset.abort(); } finally { dataset.close(); } }
static ArrayList<String> getAllEntityEvents (Dataset dataset, String entity) { ArrayList<String> events = new ArrayList<String>(); Iterator<String> it = dataset.listNames(); while (it.hasNext()) { String name = it.next(); if (!name.equals(instanceGraph) && (!name.equals(provenanceGraph))) { Model namedModel = dataset.getNamedModel(name); StmtIterator siter = namedModel.listStatements(); while (siter.hasNext()) { Statement s = siter.nextStatement(); String object = getObjectValue(s).toLowerCase(); if (object.indexOf(entity.toLowerCase()) > -1) { String subject = s.getSubject().getURI(); if (!events.contains(subject)) { events.add(subject); } } } } } return events; }
@Test public void testSomeMethod2() throws Exception { Dataset ds = TDBFactory.createDataset("/scratch/WORK2/jena/dataset2/"); OntModel model1 = ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM, ds.getNamedModel("vijaym1")); OntModel model2 = ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM, ds.getNamedModel("vijaym2")); OntClass thing = model1.createClass("http://www.w3.org/2002/07/owl#Thing"); model1.createIndividual("http://example.com/onto1#VijayRaj", thing); model2.createIndividual("http://example.;cegilovcom/onto2#VijayRaj", thing); Model m = model1.union(model2); FileWriter fw = new FileWriter("/scratch/WORK2/jena/testModels/mergetestds.xml"); RDFDataMgr.write(fw, ds, RDFFormat.NQUADS_UTF8); }
/** * Creates an in-memory Jena TDB data set and Lucene index from code. * @return */ public Dataset createMemDatasetFromCode(){ log.info("Construct an in-memory dataset with in-memory lucene index using code") ; TextQuery.init(); // Build a text dataset by code. // Here , in-memory base data and in-memory Lucene index // Base data Dataset jenads = DatasetFactory.createMem() ; Property streetAddress = jenads.getDefaultModel().createProperty("http://schema.org/streetAddress"); // Define the index mapping //EntityDefinition entDef = new EntityDefinition("uri", "text", RDFS.label.asNode()) ; EntityDefinition entDef = new EntityDefinition("uri", "text", streetAddress.asNode()) ; // Lucene, in memory. Directory dir = new RAMDirectory(); // Join together into a dataset Dataset ds = TextDatasetFactory.createLucene(jenads, dir, entDef) ; return ds ; }
/** * Creates a persistent Jena TDB data set and Lucene index. * @return * @throws IOException */ public Dataset createPersistentDatasetFromCode() throws IOException{ log.info("Construct a persistent Jena data set with lucene index using code") ; // Build a text dataset by code. TextQuery.init(); // Remove old files and folders deleteFiles(JENA_TDB_TEMP_FOLDER); deleteFiles(LUCENE_INDEX_TEMP_FOLDER); // Creates new folders JENA_TDB_TEMP_FOLDER.mkdirs(); LUCENE_INDEX_TEMP_FOLDER.mkdirs(); // Creates persisted Jena data set and Lucene index Dataset jenaDataset = TDBFactory.createDataset(JENA_TDB_TEMP_FOLDER.getAbsolutePath()) ; // Lucene, persisted. Directory luceneIndex = FSDirectory.open(LUCENE_INDEX_TEMP_FOLDER); // Define the index mapping EntityDefinition entDef = new EntityDefinition("uri", "text", RDFS.label.asNode()) ; // Join together into a dataset return TextDatasetFactory.createLucene(jenaDataset, luceneIndex, entDef) ; }
/** * Import the data into the data set. When a new data set is imported the old data is deleted. * @param dataset * @param file */ public void loadData(Dataset dataset, String file){ log.info("Start loading") ; long startTime = System.nanoTime() ; dataset.begin(ReadWrite.WRITE) ; try { Model m = dataset.getDefaultModel() ; log.info("Number of triples before loading: " + m.size()); RDFDataMgr.read(m, file) ; log.info("Number of triples after loading: " + m.size()); dataset.commit() ; } finally { dataset.end() ; } long finishTime = System.nanoTime() ; double time = (finishTime-startTime)/1.0e6 ; log.info(String.format("Finish loading - %.2fms", time)) ; }
/** * Load jena TDB */ private void TDBloading(){ logger.info("TDB loading"); // create model from tdb Dataset dataset = TDBFactory.createDataset(tdbDirectory); // assume we want the default model, or we could get a named model here dataset.begin(ReadWrite.READ); model = dataset.getDefaultModel(); dataset.end() ; // if model is null load local dataset into jena TDB if(model == null) TDBloading(datasetFile); }
static void demoOfUsingADirectory() { // Make a TDB-backed dataset String directory = TDB_DIR; // read something Dataset dataset = TDBFactory.createDataset(directory); logger.debug("read tx start!!!"); demoOfReadTransaction(dataset); logger.debug("read tx end!!!"); dataset.close(); // write something dataset = TDBFactory.createDataset(directory); logger.debug("write tx start!!!"); demoOfWriteTransaction(dataset); logger.debug("write tx end!!!"); dataset.close(); // read again dataset = TDBFactory.createDataset(directory); logger.debug("read tx start!!!"); demoOfReadTransaction(dataset); logger.debug("read tx end!!!"); dataset.close(); }
private static void demoOfReadTransaction(Dataset dataset) { dataset.begin(ReadWrite.READ); // Get model inside the transaction Model model = dataset.getDefaultModel(); // query the inserted facts StringBuilder query = SPARQLUtils.getRegualrSPARQLPREFIX(); query.append("PREFIX foaf: <http://xmlns.com/foaf/0.1/>").append(Constants.NEWLINE); query.append("SELECT DISTINCT ?person WHERE {?person rdf:type foaf:Person}"); SPARQLUtils.query(model, query.toString(), "?person"); model.close();// closing the model to flush dataset.end(); }
private static void demoOfWriteTransaction(Dataset dataset) { dataset.begin(ReadWrite.WRITE); Model model = dataset.getDefaultModel(); ModelUtils.fillModel(model, FOAF_BASE_URI, FOAF_SCHEMA_FilePath); // insert foaf:me rdf:type foaf:Person Resource me = model.createResource(FOAF_BASE_URI + "me"); Property rdfType = model.getProperty(Constants.RDF_TYPE_URL); Resource FOAFPersonClass = model.getResource(FOAF_BASE_URI + "Person"); model.add(me, rdfType, FOAFPersonClass); // model.write(System.out);// for debug model.close();// closing the model to flush dataset.commit(); dataset.end(); }
/** * Retrieve the life cycle state for a plan stored in Fedora * * @param planId * the id of the plan * @param uriInfo * the {@link javax.ws.rs.core.UriInfo} injected by JAX-RS for having the context * path available * @return the plan's current life cycle state * @throws javax.jcr.RepositoryException * if an error occurred while fetching the life cycle tate of * the plan */ @GET @Path("{id}") public Response retrievePlanLifecycleState(@PathParam("id") final String planId, @Context UriInfo uriInfo) throws RepositoryException { /* fetch the plan RDF from fedora */ final String planUri = "/" + Plans.PLAN_FOLDER + planId; final FedoraObject plan = this.objectService.findOrCreateObject(this.session, planUri); /* get the relevant information from the RDF dataset */ final IdentifierTranslator subjects = new DefaultIdentifierTranslator(); final Dataset data = plan.getPropertiesDataset(subjects); final Model rdfModel = SerializationUtils.unifyDatasetModel(data); final String lifecycle = rdfModel .listStatements(subjects.getSubject(plan.getNode().getPath()), rdfModel.getProperty("http://scapeproject.eu/model#hasLifecycleState"), (RDFNode) null).next().getObject().asLiteral().getString(); return Response.ok(lifecycle, MediaType.TEXT_PLAIN).build(); }
@Test public void addModel() throws Exception { final Dataset ds = TDBFactory.createDataset(); final DatasetPopulator dsp = new DatasetPopulator(ds); final Model model = ModelFactory.createDefaultModel(); final Resource s = model.createResource(); final Property p = model.createProperty("urn:example:prop", "foo"); final Resource o = model.createResource(); model.add(s, p, o); dsp.addModel(model); ds.begin(ReadWrite.READ); try { assertTrue(ds.getDefaultModel().containsAll(model)); } finally { ds.end(); } }
@Test public void inferMissingPropertyNames() throws Exception { final Dataset ds = TDBFactory.createDataset(); final DatasetPopulator dsp = new DatasetPopulator(ds); dsp.addModel(loadModel("infer-property-names/data.ttl")); final Model x = loadModel("infer-property-names/expected.ttl"); ds.begin(ReadWrite.READ); try { final Model m = ds.getDefaultModel(); assertTrue(m.containsAll(x)); } finally { ds.end(); } }
@Test public void findCompatiblePublicProperties() { final Dataset ds = loadDataset("compatibility/properties.ttl"); final CompatibleResourceFinder finder = new CompatibleResourceFinder(ds); final Type y0 = type(0); final Type y1 = type(1); final Type y2 = type(2); final Type y3 = type(3); final Property p0 = property(0, y0); final Property p1 = property(1, y1); final Property p2 = property(2, y2); final Property p3 = property(3, y3); final Action a1 = action(widget(1, p1), p1); final Action a2 = action(widget(2, p2), p2, p3); final PublishedProperty pp1 = new PublishedProperty(p1, a1); final PublishedProperty pp2 = new PublishedProperty(p2, a2); final Set<PublishedProperty> xpps = setOf(pp1, pp2); final Set<PublishedProperty> pps = finder.findCompatibleOffers(p0); assertEquals(pps, xpps); }
@Test public void findCompatibleFunctionalities() { final Dataset ds = loadDataset("compatibility/functionalities.ttl"); final CompatibleResourceFinder finder = new CompatibleResourceFinder(ds); final Functionality f0 = functionality(0); final Functionality f1 = functionality(1); final Functionality f2 = functionality(2); final Action a1 = action(widget(1), f1); final Action a2 = action(widget(2), f2); final RealizedFunctionality rt1 = new RealizedFunctionality(f1, a1); final RealizedFunctionality rt2 = new RealizedFunctionality(f2, a2); final Set<RealizedFunctionality> xrts = setOf(rt1, rt2); final Set<RealizedFunctionality> rts = finder.findCompatibleOffers(f0); assertEquals(rts, xrts); }
@Test public void test_rdfcreation_msft() throws SAXException, IOException, ParserConfigurationException, Exception { Document dataDoc = parser.parse(RdfFactoryTest.class.getResourceAsStream( "/data/msft-20130630.xml"), -1); RdfFactory factory = new RdfFactory(new RunConfig(domain)); factory.createRdfs(dataDoc, testTdbDir); Dataset dataset = TDBFactory.createDataset(testTdbDir); dataset.begin(ReadWrite.READ); Model model = dataset.getDefaultModel(); Assert.assertFalse("No RDF was generated. TDB directory: " + testTdbDir, model.isEmpty()); dataset.end(); }
public static void main(String[] args) throws Exception { Dataset memoryDataset = DatasetFactory.createMem(); Model memoryModel = ModelFactory.createDefaultModel(); memoryModel.read(new FileReader("/work/workspaces/rdf/SolRDF/solrdf/src/test/resources/sample_data/one_triple_1.ttl"), "http://e.org", "TTL"); memoryDataset.addNamedModel("http://grapha.com", memoryModel); memoryModel = ModelFactory.createDefaultModel(); memoryModel.read(new FileReader("/work/workspaces/rdf/SolRDF/solrdf/src/test/resources/sample_data/one_triple_2.ttl"), "http://e.org", "TTL"); memoryDataset.addNamedModel("http://graphb.com", memoryModel); memoryModel = ModelFactory.createDefaultModel(); memoryModel.read(new FileReader("/work/workspaces/rdf/SolRDF/solrdf/src/test/resources/sample_data/one_triple_3.ttl"), "http://e.org", "TTL"); memoryDataset.addNamedModel("http://graphc.com", memoryModel); memoryModel = ModelFactory.createDefaultModel(); memoryModel.read(new FileReader("/work/workspaces/rdf/SolRDF/solrdf/src/test/resources/sample_data/one_triple_4.ttl"), "http://e.org", "TTL"); memoryDataset.addNamedModel("http://graphd.com", memoryModel); final Query query = QueryFactory.create(q2());//"SELECT ?s FROM <http://grapha.com> WHERE { ?s <http://example.org/title> ?o }"); System.out.println(ResultSetFormatter.asText(QueryExecutionFactory.create(query, memoryDataset).execSelect())); }
private static void loadFile(File fileOrFolder){ if (fileOrFolder.isHidden()) return ; if (fileOrFolder.getPath().endsWith(".trig")){ Dataset _ds = RDFDataMgr.loadDataset(fileOrFolder.getPath()); Iterator<String> iter = _ds.listNames(); while (iter.hasNext()){ String name = iter.next(); d.addNamedModel(name, _ds.getNamedModel(name)); } d.getDefaultModel().add(_ds.getDefaultModel()); } if (fileOrFolder.isDirectory()){ File[] listOfFiles = fileOrFolder.listFiles(); for(File file : listOfFiles){ loadFile(file); } } }
public void doit() { Dataset dataset = DatasetFactory.createMem(); Model model = dataset.getDefaultModel(); model.read("category_labels_en.nq"); if (model.READ){ System.out.println("right!!"); } Query q = QueryFactory.create(query); QueryExecution qe = QueryExecutionFactory.create(q, model); ResultSet rs = qe.execSelect(); ResultSetFormatter.out(rs); }
private Dataset joinDataset(Dataset baseDataset, File indexDir) throws IOException { EntityDefinition entDef = new EntityDefinition("entityField", "geoField"); // you need JTS lib in the classpath to run the examples //entDef.setSpatialContextFactory(SpatialQuery.JTS_SPATIAL_CONTEXT_FACTORY_CLASS); // set custom goe predicates entDef.addSpatialPredicatePair(ResourceFactory.createResource("http://schema.org/latitude"), ResourceFactory.createResource("http://schema.org/longitude")); /* entDef.addSpatialPredicatePair(ResourceFactory.createResource("http://localhost/jena_example/#latitude_2"), ResourceFactory.createResource("http://localhost/jena_example/#longitude_2")); entDef.addWKTPredicate(ResourceFactory.createResource("http://localhost/jena_example/#wkt_1")); entDef.addWKTPredicate(ResourceFactory.createResource("http://localhost/jena_example/#wkt_2")); */ // Lucene, index in File system. Directory dir = FSDirectory.open(indexDir); // Join together into a dataset Dataset ds = SpatialDatasetFactory.createLucene(baseDataset, dir, entDef); return ds; }
public boolean isCachedGraph(Dataset dataset, String graphName){ boolean isCached = false; dataset.begin(ReadWrite.READ); try { Iterator<String> inames = getDataset().listNames(); while(inames.hasNext()){ if( graphName.equals( inames.next() )) { isCached = true; } } } finally { dataset.end(); } return isCached; }
static public HashMap<String, String> readSemTrig (ArrayList<String> eventIdentifierArray,String trigFolder) { HashMap<String, String> tokenIdMap = new HashMap<String, String>(); ArrayList<File> trigFiles = Util.makeRecursiveFileList(new File(trigFolder), ".trig"); // System.out.println("trigFiles.size() = " + trigFiles.size()); for (int i = 0; i < trigFiles.size(); i++) { File file = trigFiles.get(i); Dataset dataset = TDBFactory.createDataset(); dataset = RDFDataMgr.loadDataset(file.getAbsolutePath()); Iterator<String> it = dataset.listNames(); while (it.hasNext()) { String name = it.next(); if (name.equals(instanceGraph)) { Model namedModel = dataset.getNamedModel(name); StmtIterator siter = namedModel.listStatements(); while (siter.hasNext()) { Statement s = siter.nextStatement(); updateTokenMap(eventIdentifierArray, tokenIdMap, s); } } } dataset.close(); } return tokenIdMap; }
/** Create an index using your own model saving you the time needed to import the model when using an endpoint. * If you only have an endpoint or want to index a subset of the triples, * use the static methods {@link #createIndex(String, String, List)}, {@link #createClassIndex(String, String)} or {@link #createPropertyIndex(String, String)}. * All triples (uri,rdfs:label,label) will be put into the index. * @param model the jena model containing the rdf:label statements that you want to index. Changes to the model after the construtor call are probably not indexed. * @param minSimilarity Between 0 (maximum fuzzyness) and 1f (no fuzzy matching). */ public SPARQLModelIndex(Model model,float minSimilarity) { this.minSimilarity=minSimilarity; Dataset ds1 = DatasetFactory.createMem() ; EntityDefinition entDef = new EntityDefinition("uri", "text", RDFS.label) ; // Lucene, in memory. Directory dir = new RAMDirectory(); // Join together into a dataset dataset = TextDatasetFactory.createLucene(ds1, dir, entDef); // ds.setDefaultModel(model); synchronized(model) { dataset.begin(ReadWrite.WRITE); try { dataset.getDefaultModel().add(model); dataset.commit(); } finally { dataset.end(); } } // this.model = model; }
/** * Method to save the nanopub. * @param f Receives the file. * @throws Exception It can throw an exception. */ public void save(String f) throws Exception { this.quads = this.getAllQuads(); if (quads == null) { throw new Exception( "Quad list is null. Do you call createNanoPub() first?"); } if (quads.size() == 0) { throw new Exception("Quad list is empty."); } Dataset ds = TDBFactory.createDataset(); DatasetGraph dsg = ds.asDatasetGraph(); for (int i = 0; i < quads.size(); i++) { dsg.add(quads.get(i)); } RDFDataMgr.write(new FileOutputStream(new File(f)), dsg, RDFFormat.NQUADS); }
public void start(final int port) { final Dataset dataset = DatasetFactory.create(model); ServerConfig config = FusekiConfig.defaultConfiguration("dataset", dataset.asDatasetGraph(), false, true); config.port = config.pagesPort = port; config.pages = null; final SPARQLServer server = new SPARQLServer(config); server.start(); }
public static void main(String []args) { // Parse //Query query = QueryFactory.read("file:C:\\SVN\\PigSPARQL_main\\queries\\q8.sparql") ; Query query = QueryFactory.read("file:queries/SP2Bench/q8mod.sparql") ; //System.out.println(query) ; // Generate algebra Op op = Algebra.compile(query) ; op = Algebra.optimize(op) ; //System.out.println(op) ; // Print Algebra Using SSE //PrintUtils.printOp(query, true); //System.out.println(); String dftGraphURI = "file:datasets/SP2BEnch/dblp25M.n3" ; //String dftGraphURI = "file:D:\\ZerProf\\Uni\\Master\\Masterarbeit\\sp2b\\bin\\dblp50K.n3" ; Dataset dataset = DatasetFactory.create(dftGraphURI); // Execute it. QueryIterator qIter = Algebra.exec(op, dataset) ; // Results int results = 0; for ( ; qIter.hasNext() ; ) { Binding b = qIter.nextBinding() ; results++; System.out.println(b) ; } qIter.close() ; System.out.println("# solution mappings: "+results); }
public Server(int port, String pagePath, String subDomain, Model model, Listener listener) { this.port = port; this.subDomain = subDomain; this.pagePath = pagePath; Dataset dataset = DatasetFactory.create(model); this.dsg = dataset.asDatasetGraph(); this.listener = listener; }
public static Model createModel(String... dbDirs) { Model mainModel = null; Dataset dataset = null; for(String dbDir : dbDirs) { dataset = TDBFactory.createDataset(dbDir); if(mainModel == null) { mainModel = dataset.getDefaultModel(); } else { Model secondaryModel = dataset.getDefaultModel(); mainModel = ModelFactory.createUnion(mainModel, secondaryModel); } } mainModel = ModelFactory.createRDFSModel(mainModel); return mainModel; }
public JenaEngine( Dataset dataset ) throws RepositoryException { openDB( new Properties() ); tdbdir = null; try { copyFromTdb( dataset ); } catch ( Exception e ) { log.fatal( e, e ); } }
private void copyFromTdb( String file ) throws RepositoryException { tdbdir = new File( file ); Dataset dataset = TDBFactory.createDataset( file ); try { copyFromTdb( dataset ); } finally { dataset.close(); } }
@Override public void publishRecord(byte[] rdf, String rdfAbout, String host) throws Exception { super.publishRecord(rdf, rdfAbout, host); SDBConnection conn = new SDBConnection(jenaDataSource); StoreDesc storeDesc = new StoreDesc(LayoutType.LayoutTripleNodesHash, BygleSystemUtils.getDBType(databaseType)); Store store = SDBFactory.connectStore(conn, storeDesc); if (!StoreUtils.isFormatted(store)) store.getTableFormatter().create(); Dataset dataset = SDBFactory.connectDataset(store); Model modelTpl = ModelFactory.createDefaultModel(); modelTpl.read(new ByteArrayInputStream(rdf), ""); dataset.getDefaultModel().add(modelTpl); store.getConnection().close(); store.close(); }
@Override public void dePublishRecord(byte[] rdf, String rdfAbout, String host) throws Exception { super.dePublishRecord(rdf, rdfAbout, host); SDBConnection conn = new SDBConnection(jenaDataSource); StoreDesc storeDesc = new StoreDesc(LayoutType.LayoutTripleNodesHash, BygleSystemUtils.getDBType(databaseType)); Store store = SDBFactory.connectStore(conn, storeDesc); Dataset dataset = SDBFactory.connectDataset(store); Model modelTpl = ModelFactory.createDefaultModel(); modelTpl.read(new ByteArrayInputStream(rdf), ""); StringBuilder query = new StringBuilder(); query.append("DELETE {?bn ?a ?b} WHERE {"); query.append("{<" + host + "/" + rdfAbout + "> ?p ?o"); query.append(". FILTER(isBlank(?o))"); query.append(". ?o ?c ?s"); query.append(". FILTER(isBlank(?s))"); query.append(". ?s ?d ?bn"); query.append(". FILTER(isBlank(?bn))}"); query.append("UNION{"); query.append("<" + host + "/" + rdfAbout + "> ?p ?o"); query.append(". FILTER(isBlank(?o))"); query.append(". ?o ?c ?bn"); query.append(". FILTER(isBlank(?bn))}"); query.append("UNION{"); query.append(" <" + host + "/" + rdfAbout + "> ?p ?bn"); query.append(". FILTER(isBlank(?bn))"); query.append("} ?bn ?a ?b}"); UpdateAction.parseExecute(query.toString(), modelTpl); modelTpl.removeAll(modelTpl.createResource(host + "/" + rdfAbout), null, null); dataset.getDefaultModel().remove(modelTpl); store.getConnection().close(); store.close(); }
@Override public void executePublishing() throws Exception { super.executeImport(); SDBConnection conn = new SDBConnection(jenaDataSource); StoreDesc storeDesc = new StoreDesc(LayoutType.LayoutTripleNodesHash, BygleSystemUtils.getDBType(databaseType)); Store store = SDBFactory.connectStore(conn, storeDesc); if (!StoreUtils.isFormatted(store)) store.getTableFormatter().create(); File importDir = new File(importDirectory); FileFilter fileFilter = new WildcardFileFilter("*.nt"); File[] importFiles = importDir.listFiles(fileFilter); if (importFiles.length > 0) { OntModel ontModel = ModelFactory.createOntologyModel(); FileFilter ontologyFileFilter = new WildcardFileFilter("*.owl"); File[] ontologyfiles = importDir.listFiles(ontologyFileFilter); for (int x = 0; x < ontologyfiles.length; x++) { FileManager.get().readModel(ontModel, ontologyfiles[x].getAbsolutePath()); } System.out.println("##############################STARTING PUBLISHING#############################"); for (int i = 0; i < importFiles.length; i++) { Model modelTpl = ModelFactory.createDefaultModel(); FileManager.get().readModel(modelTpl, importFiles[i].getAbsolutePath()); System.out.println("PUBLISHING FILE " + importFiles[i].getName()); System.out.println("##############################START SAVING DATA###############################"); ontModel.add(modelTpl); } Dataset dataset = SDBFactory.connectDataset(store); dataset.getDefaultModel().add(ontModel); store.getConnection().close(); store.close(); System.out.println("##############################END PUBLISHING##################################"); FileUtils.cleanDirectory(importDir); System.out.println("##############################PUBLISHING SUCCESS##############################"); } else { System.out.println("##############################NO FILES TO PUBLISH##############################"); } }
public DissolveGUI(Dataset data, String typename) { this.data = data; this.typename = typename; getInstances(); getSubtypes(); setDefaultCloseOperation(javax.swing.WindowConstants.DISPOSE_ON_CLOSE); getContentPane(); displayMembers(); }
public SemDistGUI(Dataset data) { this.data = data; getDistantEntities(); getDistantTypes(); setDefaultCloseOperation(javax.swing.WindowConstants.DISPOSE_ON_CLOSE); displayMembers(); }
public static void main(String[] args0){ //load dataset Dataset dataset; JFileChooser fc = new JFileChooser(); fc.setCurrentDirectory(new File(System.getProperty("user.dir"))); fc.setFileSelectionMode(JFileChooser.DIRECTORIES_ONLY); int returnVal = fc.showOpenDialog(null); if (returnVal == JFileChooser.APPROVE_OPTION) { dataset = TDBFactory.createDataset(fc.getSelectedFile().toString()); TransformationProcessor tp = new TransformationProcessor(dataset); Map<String,String> pmap = new HashMap<>(); tp.transform("deletex.sparql",pmap); } }
public static void perspectiveRelationsToTrig (String pathToTrigFile, ArrayList<PerspectiveObject> perspectiveObjects) { try { OutputStream fos = new FileOutputStream(pathToTrigFile); Dataset ds = TDBFactory.createDataset(); Model defaultModel = ds.getDefaultModel(); ResourcesUri.prefixModel(defaultModel); // Model provenanceModel = ds.getNamedModel("http://www.newsreader-project.eu/perspective"); ResourcesUri.prefixModelGaf(defaultModel); JenaSerialization.addJenaPerspectiveObjects(ds, perspectiveObjects); RDFDataMgr.write(fos, ds, RDFFormat.TRIG_PRETTY); fos.close(); } catch (IOException e) { e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates. } }
public static void perspectiveRelationsToTrigStream (OutputStream fos, ArrayList<PerspectiveObject> perspectiveObjects) { Dataset ds = TDBFactory.createDataset(); Model defaultModel = ds.getDefaultModel(); ResourcesUri.prefixModel(defaultModel); // Model provenanceModel = ds.getNamedModel("http://www.newsreader-project.eu/perspective"); ResourcesUri.prefixModelGaf(defaultModel); JenaSerialization.addJenaPerspectiveObjects(ds, perspectiveObjects); RDFDataMgr.write(fos, ds, RDFFormat.TRIG_PRETTY); }
static public void addJenaPerspectiveObjects(Dataset ds, ArrayList<PerspectiveObject> perspectiveObjects) { for (int i = 0; i < perspectiveObjects.size(); i++) { PerspectiveObject perspectiveObject = perspectiveObjects.get(i); // System.out.println("perspectiveObject.toString() = " + perspectiveObject.toString()); perspectiveObject.addToJenaDataSet(ds); } }