Java 类com.hp.hpl.jena.rdf.model.RDFReader 实例源码

项目:sdh-vocabulary    文件:ModuleHelper.java   
public ModuleHelper load(final URI base,final Format format) throws IOException {
    final String data=
        new String(
            Files.readAllBytes(this.file),
            StandardCharsets.UTF_8);
    if(!format.equals(Format.RDF_XML) || RDFXMLUtil.isStandaloneDocument(data)) {
        try {
            this.model =ModelFactory.createDefaultModel();
            this.format=format;
            final RDFReader reader = this.model.getReader(format.lang.getLabel());
            reader.setProperty("error-mode", "strict-fatal");
            reader.read(this.model,new StringReader(data),base.toString());
        } catch (final RiotException e) {
            this.model=null;
            this.format=format;
            throw new IOException("Parsing failed",e);
        }
    }
    return this;
}
项目:Tapioca    文件:AbstractEngine.java   
public static Model readRDFMetaDataFile(File rdfMetaDataFile){
    RDFReader reader = new TurtleReader();
    Model rdfMetaDataModel = ModelFactory.createDefaultModel();

    LOGGER.info("Loading meta data file from " + rdfMetaDataFile.getAbsolutePath());
    FileInputStream fin = null;
    try {
        fin = new FileInputStream(rdfMetaDataFile);
        reader.read(rdfMetaDataModel, fin, "");
    } catch (FileNotFoundException e) {
        LOGGER.error("Couldn't read meta data from file. Returning null.", e);
        return null;
    } finally {
        IOUtils.closeQuietly(fin);
    }  
    return rdfMetaDataModel;
}
项目:swows-web    文件:WebInput.java   
public void handleEvent(String eventString) {
        buildGraph();
        RDFReaderF readerFactory = new RDFReaderFImpl();
        RDFReader reader = readerFactory.getReader("N3");
        Model model = ModelFactory.createModelForGraph(eventGraph);
        reader.read(
                model,
                new StringReader(eventString),
                DEFAULT_BASE);
        logger.debug("Launching update thread... ");
//      LocalTimer.get().schedule(
//              new TimerTask() {
//          //                  public void run() {
                        RunnableContextFactory.getDefaultRunnableContext().run(
                                new Runnable() {
                                    public void run() {
                                        logger.debug("Sending update events ... ");
                                        eventGraph.sendUpdateEvents();
                                        logger.debug("Update events sent!");
                                    }
                                } );
//                  }
//              }, 0 );
//      logger.debug("Update thread launched!");
    }
项目:quality    文件:ModelParser.java   
public static boolean timeoutModel(final String uri){
        final Model m = ModelFactory.createDefaultModel();  
        try {
              TimeLimitedCodeBlock.runWithTimeout(new Runnable() {
                @Override
                public void run() {
                    RDFReader arp = m.getReader("RDF/XML");
                    arp.setProperty("WARN_REDEFINITION_OF_ID","EM_IGNORE");
                    arp.read(m, uri);
//                  m.read(uri);
                }
              }, 3, TimeUnit.SECONDS);
            }
        catch (Exception e) {
            logger.debug("Timeout Reading Model: "+uri);
        }

        if (m.size() > 0){
            return true;
        } else {
            return false;
        }
    }
项目:quality    文件:ModelParser.java   
public static boolean timeoutModel(final String uri, final Lang tryLang){
    final Model m = ModelFactory.createDefaultModel();  
    try {
            final RDFReader arp = m.getReader(tryLang.getName());
            TimeLimitedCodeBlock.runWithTimeout(new Runnable() {
                @Override
                public void run() {
                    arp.setProperty("WARN_REDEFINITION_OF_ID","EM_IGNORE");
                    arp.read(m, uri);
            //      m.read(uri, tryLang.getName());
                }
            }, 3, TimeUnit.SECONDS);
        } catch (Exception e) {
            logger.debug("Timeout Reading Model: "+uri);
        }

    if (m.size() > 0){
        return true;
    } else {
        return false;
    }
}
项目:sdh-vocabulary    文件:TestHelper.java   
public static Model load(final Context context, final String relativePath) throws IOException {
    final Path file=moduleLocation(context,relativePath);
    final Model model=ModelFactory.createDefaultModel();
    final RDFReader reader=model.getReader("TURTLE");
    reader.setProperty("error-mode", "strict-fatal");
    reader.
        read(
            model,
            new FileReader(
                file.toFile()),
                context.getCanonicalNamespace(file).toString());
    return model;
}
项目:Tapioca    文件:FinalCorpusExporter.java   
protected Model readModel(String metaDataModelFile) {
    RDFReader reader = new TurtleReader();
    Model model = ModelFactory.createDefaultModel();
    FileInputStream fin = null;
    try {
        fin = new FileInputStream(metaDataModelFile);
        reader.read(model, fin, LOD_STATS_DOC_BASE_URI);
    } catch (FileNotFoundException e) {
        LOGGER.error("Couldn't read model with additional meta data from file. Ignoring this file.", e);
        return null;
    } finally {
        IOUtils.closeQuietly(fin);
    }
    return model;
}
项目:gerbil    文件:ClassHierarchyLoader.java   
protected void readClassHierarchy(File file, String rdfLang, String baseUri, Model model) throws IOException {
    InputStream is = null;
    RDFReader rdfReader = factory.getReader(rdfLang);
    try {
        is = new BufferedInputStream(new FileInputStream(file));
        rdfReader.read(model, is, baseUri);
    } finally {
        IOUtils.closeQuietly(is);
    }
}
项目:adapters    文件:ToscaAdapterTest.java   
private Model getModelFromTurtleFile(String path){
  InputStream input = getClass().getResourceAsStream(path);
  Model model = ModelFactory.createDefaultModel();
  final RDFReader reader = model.getReader("TTL");
  reader.read(model, input, null);
  return model;
}
项目:swows-web    文件:WebInput.java   
public static void handleEvent(String eventString, EventListener listener) {
    RDFReaderF readerFactory = new RDFReaderFImpl();
    RDFReader reader = readerFactory.getReader("N3");
    Model model = ModelFactory.createDefaultModel();
    reader.read(
            model,
            new StringReader(eventString),
            DEFAULT_BASE);
    ResIterator eventResources = model.listResourcesWithProperty(RDF.type, DOMEvents.Event);
    while(eventResources.hasNext()) {
        listener.handleEvent(new EventWithDescriptor(eventResources.next()));
    }
}
项目:DALI    文件:LodStatsReader.java   
public static Model getModel(String file) {
    InputStream in;
    Model model = ModelFactory.createDefaultModel();
    VectorDescription d = new VectorDescription(file);
    RDFReader r = model.getReader("TURTLE");
    try {
        in = new FileInputStream(file);
        InputStreamReader reader = new InputStreamReader(in, "UTF8");
        r.read(model, reader, null);
        logger.info("RDF model read from " + file + " is of size " + model.size());
    } catch (Exception e) {
        logger.warn("Error reading model " + file);
    }
    return model;
}
项目:Tapioca    文件:TMEngine.java   
public static TMEngine createEngine(WorkerBasedLabelRetrievingDocumentSupplierDecorator cachingLabelRetriever,
        File inputFolder, File metaDataFile) {
    LOGGER.info("Loading model from \"" + inputFolder.getAbsolutePath() + "\".");
    // read probabilistic word topic Model from file
    GZipProbTopicModelingAlgorithmStateReader modelReader = new GZipProbTopicModelingAlgorithmStateReader();
    ProbTopicModelingAlgorithmStateSupplier model = (ProbTopicModelingAlgorithmStateSupplier) modelReader
            .readProbTopicModelState(new File(inputFolder.getAbsolutePath() + File.separator + MODEL_FILE_NAME));
    if (model == null) {
        LOGGER.error("Couldn't read model. Returning null.");
        return null;
    }
    ProbabilisticWordTopicModel probModel = (ProbabilisticWordTopicModel) ((ModelingAlgorithm) model).getModel();
    GZipCorpusObjectReader corpusReader = new GZipCorpusObjectReader(new File(inputFolder.getAbsolutePath()
            + File.separator + CORPUS_FILE_NAME));
    Corpus corpus = corpusReader.getCorpus();
    if (corpus == null) {
        LOGGER.error("Couldn't read corpus. Returning null.");
        return null;
    }
    ObjectObjectOpenHashMap<String, SimpleVector> knownDatasets = new ObjectObjectOpenHashMap<String, SimpleVector>(
            corpus.getNumberOfDocuments());
    // translate word topic assignment into topic vectors for each document
    SingleDocumentPreprocessor tempPreProc = new SingleDocumentPreprocessor();
    DocumentWordCountingSupplierDecorator decorator = new DocumentWordCountingSupplierDecorator(tempPreProc);
    tempPreProc.setDocumentSupplier(decorator);
    for (int i = 0; i < corpus.getNumberOfDocuments(); ++i) {
        // knownDatasets.put(createDataset(corpus.getDocument(i)),
        // createVector(model.getWordTopicAssignmentForDocument(i),
        // model.getNumberOfTopics()));
        // let's use smoothing for this
        knownDatasets.put(getUri(corpus.getDocument(i)), new SimpleVector((double[]) probModel
                .getClassificationForDocument(tempPreProc.processDocument(corpus.getDocument(i))).getValue()));
    }
    SingleDocumentPreprocessor preprocessor = createPreprocessing(cachingLabelRetriever, model.getVocabulary());
    if (preprocessor == null) {
        LOGGER.error("Couldn't create preprocessor. Returning null.");
        return null;
    }
    // Read additional meta data
    RDFReader reader = new TurtleReader();
    Model metaDataModel = ModelFactory.createDefaultModel();
    FileInputStream fin = null;
    try {
        fin = new FileInputStream(metaDataFile);
        reader.read(metaDataModel, fin, "");
    } catch (FileNotFoundException e) {
        LOGGER.error("Couldn't read meta data from file. Returning null.", e);
        return null;
    } finally {
        IOUtils.closeQuietly(fin);
    }

    return new TMEngine(probModel, knownDatasets, preprocessor, metaDataModel);
}
项目:Tapioca    文件:BLEngine.java   
public static BLEngine createEngine(File inputFolder, File metaDataFile) {
    GZipCorpusObjectReader corpusReader = new GZipCorpusObjectReader(new File(inputFolder.getAbsolutePath()
            + File.separator + CORPUS_FILE_NAME));
    Corpus corpus = corpusReader.getCorpus();
    if (corpus == null) {
        LOGGER.error("Couldn't read corpus. Returning null.");
        return null;
    }
    ObjectObjectOpenHashMap<String, ObjectOpenHashSet<String>> knownDatasets = new ObjectObjectOpenHashMap<String, ObjectOpenHashSet<String>>(
            corpus.getNumberOfDocuments());
    // generate a URI set for each document
    DatasetURIs uris;
    for (Document document : corpus) {
        uris = document.getProperty(DatasetURIs.class);
        if (uris == null) {
            LOGGER.warn("Got a document without DatasetURIs property. Ignoring this document.");
        } else {
            knownDatasets.put(getUri(document), uris.get());
        }
    }
    SingleDocumentPreprocessor preprocessor = createPreprocessing();
    if (preprocessor == null) {
        LOGGER.error("Couldn't create preprocessor. Returning null.");
        return null;
    }
    // Read additional meta data
    RDFReader reader = new TurtleReader();
    Model metaDataModel = ModelFactory.createDefaultModel();
    FileInputStream fin = null;
    try {
        fin = new FileInputStream(metaDataFile);
        reader.read(metaDataModel, fin, "");
    } catch (FileNotFoundException e) {
        LOGGER.error("Couldn't read meta data from file. Returning null.", e);
        return null;
    } finally {
        IOUtils.closeQuietly(fin);
    }

    return new BLEngine(knownDatasets, preprocessor, metaDataModel);
}
项目:CoreferenceResolution    文件:Fox.java   
@Override
public List<Entity> getEntities(String text) {
    List<Entity> list = new ArrayList<>();
    try {
        String foxJSONOutput = doTASK(text);

        JSONParser parser = new JSONParser();
        JSONArray jsonArray = (JSONArray) parser.parse(foxJSONOutput);
        String output = URLDecoder.decode((String) ((JSONObject) jsonArray.get(0)).get("output"), "UTF-8");

        String baseURI = "http://dbpedia.org";
        Model model = ModelFactory.createDefaultModel();
        RDFReader r = model.getReader("N3");
        r.read(model, new StringReader(output), baseURI);

        ResIterator iter = model.listSubjects();
        while (iter.hasNext()) {
            Resource next = iter.next();
            StmtIterator statementIter = next.listProperties();
            Entity ent = new Entity();
            while (statementIter.hasNext()) {
                Statement statement = statementIter.next();
                String predicateURI = statement.getPredicate().getURI();
                if (predicateURI.equals("http://www.w3.org/2000/10/annotation-ns#body")) {
                    ent.label = statement.getObject().asLiteral().getString();
                } else if (predicateURI.equals("http://ns.aksw.org/scms/means")) {
                    String uri = statement.getObject().asResource().getURI();
                    String encode = uri.replaceAll(",", "%2C");
                    ent.URI = encode;
                } else if (predicateURI.equals("http://ns.aksw.org/scms/beginIndex")) {
                    ent.start = statement.getObject().asLiteral().getInt();
                } else if (predicateURI.equals("http://ns.aksw.org/scms/endIndex")) {
                    ent.end = statement.getObject().asLiteral().getInt();
                }
            }
            list.add(ent);
        }

    } catch (IOException | ParseException e) {
        log.error("Could not call FOX for NER/NED", e);
    }
    return list;
}