Java 类org.antlr.runtime.ANTLRInputStream 实例源码

项目:moql    文件:OperandFactoryImpl.java   
@Override
public Operand createOperand(String operand) throws MoqlException {
    // TODO Auto-generated method stub
    Validate.notEmpty(operand, "Parameter 'operand' is empty!");
    Operand pseudoOperand = createPseudoOperand(operand);
    if (pseudoOperand != null)
      return pseudoOperand;
    try {
        ANTLRInputStream is = new ANTLRInputStream(new ByteArrayInputStream(operand.getBytes()));
        OperandLexer lexer = new OperandLexer(is);
        CommonTokenStream tokens = new CommonTokenStream(lexer);
        OperandParser parser = new OperandParser(tokens);
        parser.setFunctionFactory(functionFactory);
        return parser.operand();
    } catch (Exception e) {
        // TODO Auto-generated catch block
        throw new MoqlException(StringFormater.format("Create operand '{}' failed!", operand), e);
    }
}
项目:moql    文件:MoqlParser.java   
/**
 *
 * @param moql
 * @return SelectorMetadata or SetlectorMetadata
 * @throws MoqlException
 */
public static SelectorDefinition parseMoql(String moql) throws MoqlException {
  Validate.notEmpty(moql, "Parameter 'moql' is empty!");
  try {
    ANTLRInputStream is = new ANTLRInputStream(new ByteArrayInputStream(
        moql.getBytes()));
    SelectorLexer lexer = new SelectorLexer(is);
    CommonTokenStream tokens = new CommonTokenStream(lexer);
    SelectorParser parser = new SelectorParser(tokens);
    return parser.selector();
  } catch (Exception e) {
    // TODO Auto-generated catch block
    throw new MoqlException(StringFormater.format("Parse moql '{}' failed!",
        moql), e);
  }
}
项目:moql    文件:MoqlParser.java   
public static ConditionMetadata parseCondition(String condition)
    throws MoqlException {
  Validate.notEmpty(condition, "Parameter 'condition' is empty!");
  try {
    ANTLRInputStream is = new ANTLRInputStream(new ByteArrayInputStream(
        condition.getBytes()));
    FilterLexer lexer = new FilterLexer(is);
    CommonTokenStream tokens = new CommonTokenStream(lexer);
    FilterParser parser = new FilterParser(tokens);
    return new ConditionMetadata(parser.searchCondition());
  } catch (Exception e) {
    // TODO Auto-generated catch block
    throw new MoqlException(StringFormater.format(
        "Parse condition '{}' failed!", condition), e);
  }
}
项目:swingbox-javahelp-viewer    文件:CSSInputStream.java   
public static CSSInputStream urlStream(URL source, String encoding) throws IOException {
    CSSInputStream stream = new CSSInputStream();

    stream.base = source;
    if (encoding != null)
           stream.encoding = encoding;
    else
           stream.encoding = Charset.defaultCharset().name();

       URLConnection con = source.openConnection();
       InputStream is;
       if ("gzip".equalsIgnoreCase(con.getContentEncoding()))
           is = new GZIPInputStream(con.getInputStream());
       else
           is = con.getInputStream();
       stream.input = new ANTLRInputStream(is, stream.encoding);
       stream.source = is;
       stream.url = source;

    return stream;
}
项目:swift-t    文件:ParsedModule.java   
/**
 * Parse the specified file and create a ParsedModule object
 * @param path
 * @param preprocessed
 * @return
 * @throws IOException
 */
public static ParsedModule parse(String moduleName, String path,
                                 boolean preprocessed) throws IOException {
  FileInputStream inputStream = setupInput(path);
  /* Parse the input file and build AST */
  ANTLRInputStream antlrInput = new ANTLRInputStream(inputStream);
  LineMapping lineMapping;
  if (preprocessed) {
    int startMark = antlrInput.mark();
    lineMapping = parsePreprocOutput(antlrInput);
    antlrInput.rewind(startMark);
  } else {
    // Treat # lines as comments.  All input from same file
    lineMapping = LineMapping.makeSimple(path);
  }
  SwiftAST tree = runANTLR(antlrInput, lineMapping);

  return new ParsedModule(moduleName, path, tree, lineMapping);
}
项目:tuffylite    文件:InputParser.java   
private CommonTokenStream getTokens(String fname){
    try {
        InputStream is;
        FileInputStream fis = new FileInputStream(fname);
        if(fname.toLowerCase().endsWith(".gz")){
            is = new GZIPInputStream(fis);
        }else{
            is = fis;
        }
        ANTLRInputStream input = new ANTLRInputStream(is);
        MLNLexer lexer = new MLNLexer(input);
        CommonTokenStream tokens = new CommonTokenStream(lexer);
        is.close();
        return tokens;
    } catch (Exception e) {
        ExceptionMan.handle(e);
    }
    return null;
}
项目:cordovastudio    文件:CSSInputStream.java   
public static CSSInputStream urlStream(URL source, String encoding) throws IOException {
    CSSInputStream stream = new CSSInputStream();

    stream.base = source;
    if (encoding != null)
           stream.encoding = encoding;
    else
           stream.encoding = Charset.defaultCharset().name();

       URLConnection con = source.openConnection();
       InputStream is;
       if ("gzip".equalsIgnoreCase(con.getContentEncoding()))
           is = new GZIPInputStream(con.getInputStream());
       else
           is = con.getInputStream();
       stream.input = new ANTLRInputStream(is, stream.encoding);
       stream.source = is;
       stream.url = source;

    return stream;
}
项目:jFuzzyLogic    文件:FIS.java   
/**
 * Load an FCL file and create a "Fuzzy inference system (FIS)"  
 * @param fileName : FCL file name
 * @param verbose : Be verbose?
 * @return A new FIS or null on error
 */
public static FIS load(InputStream inputStream, boolean verbose) {
    // Parse file (lexer first, then parser)
    FclLexer lexer;
    try {
        lexer = new FclLexer(new ANTLRInputStream(inputStream));
    } catch (IOException e1) {
        System.err.println("Error reading inputStream'" + inputStream + "'");
        return null;
    }

    // Parse tree and create FIS
    try {
        return createFromLexer(lexer, verbose);
    } catch (RecognitionException e) {
        throw new RuntimeException(e);
    }
}
项目:ASLanPPConnector    文件:ASLanSpecificationBuilder.java   
@Override
public IASLanSpec loadFromPlainText(String plainTextSpec, ErrorGatherer err, ISymbolsProvider... extraDefaults) {
    ByteArrayInputStream bais = new ByteArrayInputStream(plainTextSpec.getBytes());
    try {
        ANTLRInputStream antlrStream = new ANTLRInputStream(bais);
        aslanLexer lexer = new aslanLexer(antlrStream);
        lexer.setErrorGatherer(err);
        CommonTokenStream tokens = new CommonTokenStream(lexer);
        aslanParser parser = new aslanParser(tokens);
        parser.setErrorGatherer(err);
        parser.setExtraSymbolsProviders(extraDefaults);
        IASLanSpec spec = parser.aslanSpecification();
        spec.finish();
        return spec;
    }
    catch (Exception e) {
        err.addException(ASLanErrorMessages.GENERIC_ERROR, e.getMessage());
        return null;
    }
}
项目:ASLanPPConnector    文件:GroundTermBuilder.java   
public static IGroundTerm fromString(String term) {
    ByteArrayInputStream bais = new ByteArrayInputStream(term.getBytes());
    try {
        ANTLRInputStream antlrStream = new ANTLRInputStream(bais);
        ErrorGatherer err = new ErrorGatherer(OutputFormatErrorMessages.DEFAULT);
        ofLexer lexer = new ofLexer(antlrStream);
        lexer.setErrorGatherer(err);
        CommonTokenStream tokens = new CommonTokenStream(lexer);
        ofParser parser = new ofParser(tokens);
        parser.setErrorGatherer(err);
        IGroundTerm t = parser.term();
        if (err.getErrors().size() == 0) {
            return t;
        }
        else {
            return null;
        }
    }
    catch (Exception e) {
        // silently ignore any errors for now
        return null;
    }
}
项目:DocIT    文件:Main.java   
public static void main(String[] args) throws Exception {
    ANTLRInputStream input =
 new ANTLRInputStream(
    new FileInputStream(
              new File(args[0])));
    MegaLLexer lexer = new MegaLLexer(input);
    CommonTokenStream tokens = new CommonTokenStream(lexer);
    MegaLParser parser = new MegaLParser(tokens);
    parser.megal();
    if (parser.error)
 System.exit(-1);
}
项目:DocIT    文件:Main.java   
public static void main(String[] args) throws IOException, RecognitionException {
    String path = args[0];
    String pkg = args[1];
    String stem = args[2];
    FileInputStream stream = new FileInputStream(path + File.separatorChar + stem + ".yapg");
    ANTLRInputStream antlr = new ANTLRInputStream(stream);
    GrammarLexer lexer = new GrammarLexer(antlr);
    CommonTokenStream tokens = new CommonTokenStream(lexer);
    GrammarParser parser = new GrammarParser(tokens);
    Grammar g = parser.parseGrammar();
    if (parser.error) throw new RecognitionException();
    Generator.generate(path, pkg, stem, g);
}
项目:alfresco-data-model    文件:FTSTest.java   
public void testLexer() throws IOException, RecognitionException
{
    ClassLoader cl = FTSTest.class.getClassLoader();
    InputStream modelStream = cl.getResourceAsStream("org/alfresco/repo/search/impl/parsers/fts_test.gunit");

    CharStream input = new ANTLRInputStream(modelStream);

    gUnitExecutor executer = new gUnitExecutor(parse(input), "FTS");

    String result = executer.execTest();
    System.out.print(executer.execTest()); // unit test result

    assertEquals("Failures: " + result, 0, executer.failures.size());
    assertEquals("Invalids " + result, 0, executer.invalids.size());
}
项目:alfresco-data-model    文件:CMIS_FTSTest.java   
public void testLexerOutput() throws IOException
{
    String str = "~woof^2";
    CharStream input = new ANTLRInputStream(new ByteArrayInputStream(str.getBytes("UTF-8")));
    FTSLexer lexer = new FTSLexer(input);
    CommonTokenStream tokenStream = new CommonTokenStream(lexer);
    for(CommonToken token : (List<CommonToken>)tokenStream.getTokens())
    {
        System.out.println(token.toString());
    }

}
项目:SDN-Multicast    文件:TopologyHelper.java   
private void getDotTreeGraphFromFile (String dotFileName) {     

        FileInputStream file;
        try {
            file = new FileInputStream(dotFileName);        
            ANTLRInputStream input = new ANTLRInputStream(file);
            DotLexer lexer = new DotLexer(input);
            CommonTokenStream tokens = new CommonTokenStream(lexer);

            DotParser parser = new DotParser(tokens);
            graph_return ret = parser.graph();
            CommonTree tree = ret.getTree();
            CommonTreeNodeStream ctnsNodes = new CommonTreeNodeStream(tree);
            DotTree dotTree = new DotTree(ctnsNodes);
            graphObjRet = dotTree.graph().graphObj;

            removeQuotesFromPropertyValueOfGrpah(graphObjRet);
//          System.out.println (graphObjRet.id);

            for (DotTree.Node n : graphObjRet.getNodes()) {
                //System.out.println (n.toString());
            }

            for (DotTree.NodePair np : graphObjRet.getNodePairs()) {
                //System.out.println (np.toString());
            }

        } catch (Exception e) {
            System.out.println("error in reading file named - "+dotFileName);
            //e.printStackTrace();
        }
    }
项目:rockIt    文件:SyntaxReader.java   
private Model getModelANTLR(String filename) throws IOException, RecognitionException, ParseException
{
    // Create an input character stream from standard in
    FileInputStream in = new FileInputStream(new File(filename));
    ANTLRInputStream input = new ANTLRInputStream(in);
    // Create an ModelLexer that feeds from that stream
    ModelLexer lexer = new ModelLexer(input);
    // Create a stream of tokens fed by the lexer
    CommonTokenStream tokens = new CommonTokenStream(lexer);
    // Create a parser that feeds off the token stream
    ModelParser parser = new ModelParser(tokens);
    // return model
    Model m = parser.model();
    return m;
}
项目:rockIt    文件:SyntaxReader.java   
private TreeSet<PredicateAbstract> getGroundValuesANTLR(String filename) throws IOException, RecognitionException, ParseException
{
    // Create an input character stream from standard in
    FileInputStream in = new FileInputStream(new File(filename));
    ANTLRInputStream input = new ANTLRInputStream(in);
    // Create an GroundValueLexer that feeds from that stream
    GroundValuesLexer lexer = new GroundValuesLexer(input);
    // Create a stream of tokens fed by the lexer
    CommonTokenStream tokens = new CommonTokenStream(lexer);
    // Create a parser that feeds off the token stream
    GroundValuesParser parser = new GroundValuesParser(tokens);
    // return model
    return parser.groundValues();

}
项目:form-follows-function    文件:VSGC1117Test.java   
private List<Integer> tokenStartPositions() throws IOException {
    List<Integer> res = new ArrayList<Integer>();
    ANTLRReaderStream input = new ANTLRInputStream(new FileInputStream(masterFile));
    v4Lexer lexer = new v4Lexer(input);
    Token t = lexer.nextToken();
    while (t.getType() != Token.EOF) {
        if (t.getChannel() != Token.HIDDEN_CHANNEL) {
            res.add(lexer.getCharIndex());
        }
        t = lexer.nextToken();
    }
    return res;
}
项目:community-edition-old    文件:FTSTest.java   
public void testLexer() throws IOException, RecognitionException
{
    ClassLoader cl = FTSTest.class.getClassLoader();
    InputStream modelStream = cl.getResourceAsStream("org/alfresco/repo/search/impl/parsers/fts_test.gunit");

    CharStream input = new ANTLRInputStream(modelStream);

    gUnitExecutor executer = new gUnitExecutor(parse(input), "FTS");

    String result = executer.execTest();
    System.out.print(executer.execTest()); // unit test result

    assertEquals("Failures: " + result, 0, executer.failures.size());
    assertEquals("Invalids " + result, 0, executer.invalids.size());
}
项目:community-edition-old    文件:CMIS_FTSTest.java   
public void testLexerOutput() throws IOException
{
    String str = "~woof^2";
    CharStream input = new ANTLRInputStream(new ByteArrayInputStream(str.getBytes("UTF-8")));
    FTSLexer lexer = new FTSLexer(input);
    CommonTokenStream tokenStream = new CommonTokenStream(lexer);
    for(CommonToken token : (List<CommonToken>)tokenStream.getTokens())
    {
        System.out.println(token.toString());
    }

}
项目:poly-ql    文件:ASTNodes.java   
public static QLParser getParser(String input) {
    QLParser parser=null;
    InputStream inputStream = new ByteArrayInputStream(input.getBytes());
    TokenStream tokenStream;
    try {
        tokenStream = new CommonTokenStream(
                new QLLexer( new ANTLRInputStream( inputStream))
                );
        parser= new QLParser(tokenStream);
    } catch (IOException e) {
        e.printStackTrace();
    }
    return parser; 
}
项目:omniproperties    文件:OmniPropertiesReader.java   
private static CommonTreeNodeStream compileAst(final InputStream input)
        throws IOException, RecognitionException {
    final OmniPropertiesParser parser = new OmniPropertiesParser(
            new CommonTokenStream(new OmniPropertiesLexer(
                    new ANTLRInputStream(input))));
    return new CommonTreeNodeStream(parser.omniproperties().getTree());
}
项目:pm    文件:ANTLRDemo.java   
public static void main(String[] args) throws Exception {

        StringInputStream inputStream = new StringInputStream("sum (sma(9, macd (1,2,3, close)) , close)");
        ANTLRInputStream in = new ANTLRInputStream(inputStream);
        ParameterizedOperationsLexer lexer = new ParameterizedOperationsLexer(in);
        CommonTokenStream tokens = new CommonTokenStream(lexer);
        ParameterizedOperationsParser parser = new ParameterizedOperationsParser(tokens);
        CommonTree tree  = (CommonTree) parser.indicatorexpr().getTree();
        DOTTreeGenerator gen = new DOTTreeGenerator();
        StringTemplate st = gen.toDOT(tree);
        System.out.println(st);
    }
项目:protostuff    文件:AbstractParser.java   
public static void load(InputStream in, Proto proto) throws Exception
{
    // Create an input character stream from standard in
    ANTLRInputStream input = new ANTLRInputStream(in);
    // Create an ExprLexer that feeds from that stream
    ProtoLexer lexer = new ProtoLexer(input);
    // Create a stream of tokens fed by the lexer
    CommonTokenStream tokens = new CommonTokenStream(lexer);
    // Create a parser that feeds off the token stream
    ProtoParser parser = new ProtoParser(tokens);
    // Begin parsing at rule prog
    parser.parse(proto);
}
项目:ASLanPPConnector    文件:Entity.java   
public static Entity fromStream(EntityManager manager, InputStream is, ChannelModel cm, ErrorGatherer err) {
    try {
        Debug.logger.trace("Loading from stream an entity.");
        ANTLRInputStream input = new ANTLRInputStream(is);
        ASLanPPNewLexer lexer = new ASLanPPNewLexer(input);
        lexer.setErrorGatherer(err);
        CommonTokenStream tokens = new CommonTokenStream(lexer);
        ASLanPPNewParser parser = new ASLanPPNewParser(tokens);
        parser.setErrorGatherer(err);
        ASLanPPSpecification dummy = new ASLanPPSpecification(manager, "dummy", cm);
        ASLanPPNewParser.entityDeclaration_return r = parser.entityDeclaration(dummy);
        Entity ent = r.e;
        if (r.getTree() != null) {
            // By this time the types are registered, so we can run the
            // tree
            // grammar that will register the symbols.
            CommonTree ct = (CommonTree) r.getTree();
            CommonTreeNodeStream nodes = new CommonTreeNodeStream(ct);
            SymbolsNew symb = new SymbolsNew(nodes);
            symb.entity(dummy);
            // Now we can run the tree grammar that will load the
            // expressions and types into the in-memory model.
            nodes.reset();
            ToASLanNew ta = new ToASLanNew(nodes);
            ta.entity(dummy);
        }
        Debug.logger.info("Entity called '" + ent.getName() + "' was successfully loaded from stream.");
        return ent;
    }
    catch (Exception ex) {
        Debug.logger.error("Exception occured while loading entity from stream.", ex);
        err.addException(ErrorMessages.ERROR_AT_IMPORT, ex.getMessage());
        return null;
    }
}
项目:ASLanPPConnector    文件:MetaInfo.java   
public static MetaInfo fromString(String commentLine) {
    ByteArrayInputStream bais = new ByteArrayInputStream(commentLine.getBytes());
    try {
        ANTLRInputStream antlrStream = new ANTLRInputStream(bais);
        metainfoLexer lexer = new metainfoLexer(antlrStream);
        CommonTokenStream tokens = new CommonTokenStream(lexer);
        metainfoParser parser = new metainfoParser(tokens);
        metainfo_return mr = parser.metainfo();
        if (!lexer.wasAnyError && !parser.wasAnyError) {
            MetaInfo mi = new MetaInfo(mr.name);
            for (String f : mr.flags) {
                mi.addFlag(f);
            }
            for (String k : mr.parameters.keySet()) {
                String v = mr.parameters.get(k);
                mi.addParameter(k, v);
            }
            return mi;
        }
        else {
            return null;
        }
    }
    catch (Exception e) {
        // silently ignore any errors for now
        return null;
    }
}
项目:asup    文件:CLParserWrapper.java   
@Override
public CLObject parse(InputStream in) throws CLScriptException {

    try {
        return invokeParser(new ANTLRInputStream(in, "UTF-8"));
    } catch (IOException e) {
           throw new CLScriptException(e);
    }
}
项目:asup    文件:CLCommandParserWrapper.java   
@Override
public CLCommand parse(InputStream in) throws CLScriptException {

    try {
        return invokeParser(new ANTLRInputStream(in, "UTF-8"));
    } catch (IOException e) {
           throw new CLScriptException(e);
    }
}
项目:asup    文件:CLParameterParserWrapper.java   
@Override
public CLParmAbstractComponent parse(InputStream in) throws CLScriptException {

    try {
        return invokeParser(new ANTLRInputStream(in, "UTF-8"));
    } catch (IOException e) {
           throw new CLScriptException(e);
    }
}
项目:asup    文件:CLExpressionParserWrapper.java   
@Override
public QExpression parse(InputStream in) throws CLScriptException {

    try {
        return invokeParser(new ANTLRInputStream(in, "UTF-8"));
    } catch (IOException e) {
           throw new CLScriptException(e);
    }
}
项目:jnome    文件:JavaModelFactory.java   
@Override
protected ChameleonANTLR3Parser<? extends Java7> getParser(InputStream inputStream, View view) throws IOException {
  ANTLRInputStream input = new ANTLRInputStream(inputStream);
  JavaLexer lexer = new JavaLexer(input);
  CommonTokenStream tokens = new CommonTokenStream(lexer);
  JavaParser parser = new JavaParser(tokens);
  parser.setView(view);
  return parser;
}
项目:Rapture    文件:ResourceBasedApiReader.java   
@Override
public ANTLRStringStream read(String apiFileName) throws IOException {
    InputStream inputStream = getClass().getResourceAsStream("/" + apiFileName);
    return new ANTLRInputStream(inputStream, StandardCharsets.UTF_8.name());
}
项目:jtcc    文件:TCCTokenizer.java   
public TCCTokenizer(InputStream ins) throws IOException {
    initialize(new ANTLRInputStream(ins));
}
项目:jtcc    文件:TCCTokenizer.java   
public TCCTokenizer(InputStream ins, String encoding) throws IOException {
    initialize(new ANTLRInputStream(ins, encoding));
}
项目:jawn    文件:STFastGroupDir.java   
private final ANTLRStringStream constructStringStream(URL f) throws IOException {
    return 
        skipLF ? 
        new ANTLRNoNewLineStream(f, encoding) : //removing \r and \n and trimming lines
        new ANTLRInputStream(f.openStream(), encoding); //reading templates as is
}
项目:pm    文件:EditorIndsLexerDelegate.java   
public EditorIndsLexerDelegate(ANTLRInputStream in, RecognizerSharedState state, Set<EditorOpDescr> runtimeOps) {
    super(state, in);
    this.runtimeOps = runtimeOps;
}
项目:swift-t    文件:ParsedModule.java   
/**
   Use ANTLR to parse the input and get the Tree
 * @throws IOException
 */
private static SwiftAST runANTLR(ANTLRInputStream input, LineMapping lineMap) {

  ExMLexer lexer = new ExMLexer(input);
  lexer.lineMap = lineMap;
  CommonTokenStream tokens = new CommonTokenStream(lexer);
  ExMParser parser = new ExMParser(tokens);
  parser.lineMap = lineMap;
  parser.setTreeAdaptor(new SwTreeAdaptor());

  // Launch parsing
  ExMParser.program_return program = null;
  try
  {
    program = parser.program();
  }
  catch (RecognitionException e)
  {
    // This is an internal error
    e.printStackTrace();
    System.out.println("Parsing failed: internal error");
    throw new STCFatal(ExitCode.ERROR_INTERNAL.code());
  }

  /* NOTE: in some cases the antlr parser will actually recover from
   *    errors, print an error message and continue, generating the
   *    parse tree that it thinks is most plausible.  This is where
   *    we detect this case.
   */
  if (parser.parserError) {
    // This is a user error
    System.err.println("Error occurred during parsing.");
    throw new STCFatal(ExitCode.ERROR_USER.code());
  }

  // Do we actually need this check? -Justin (10/26/2011)
  if (program == null)
    throw new STCRuntimeError("PARSER FAILED!");


  SwiftAST tree = (SwiftAST) program.getTree();

  return tree;
}
项目:swift-t    文件:ParsedModule.java   
/**
 * Use the file and line info from c preprocessor to
 * update SwiftAST
 * @param lexer
 * @param tree
 */
private static LineMapping parsePreprocOutput(ANTLRInputStream input) {

  /*
   * This function is a dirty hack, but works ok
   * because the C preprocessor output has a very simple output format
   * of
   * # linenum filename flags
   *
   * We basically just need the linenum and filename
   * (see http://gcc.gnu.org/onlinedocs/cpp/Preprocessor-Output.html)
   */
  LineMapping posTrack = new LineMapping();
  try {
    ExMLexer lexer = new ExMLexer(input);
    /*
     * don't emit error messages with bad line numbers:
     * we will emit lexer error messages on the second pass
     */
    lexer.quiet = true;
    Token t = lexer.nextToken();
    while (t.getType() != ExMLexer.EOF) {
      if (t.getChannel() == ExMLexer.CPP) {
        assert(t.getText().substring(0, 2).equals("# ")): t.getText();
        StreamTokenizer tok = new StreamTokenizer(
              new StringReader(t.getText().substring(2)));
        tok.slashSlashComments(false);
        tok.slashStarComments(false);
        tok.quoteChar('"');
        if (tok.nextToken() != StreamTokenizer.TT_NUMBER) {
          throw new STCRuntimeError("Confused by " +
              " preprocessor line " + t.getText());
        }
        int lineNum = (int)tok.nval;

        if (tok.nextToken() == '"') {
          // Quoted file name with octal escape sequences

          // Ignore lines from preprocessor holding information we
          // don't need (these start with "<"
          String fileName = tok.sval;
          if (!fileName.startsWith("<")) {
            posTrack.addPreprocInfo(t.getLine() + 1,
                                  fileName, lineNum);
          }
        }
      }
      t = lexer.nextToken();
    }
  } catch (IOException e) {
    System.out.println("Error while trying to read preprocessor" +
        " output: " + e.getMessage());
    throw new STCFatal(1);
  }
  return posTrack;
}
项目:protostuff    文件:ProtoUtil.java   
/**
 * Loads the proto from an {@link InputStream}.
 */
public static void loadFrom(InputStream in, Proto target) throws Exception
{
    loadFrom(new ANTLRInputStream(in), target);
}
项目:ASLanPPConnector    文件:ASLanPPSpecification.java   
public static ASLanPPSpecification fromStream(EntityManager manager, String fileName, InputStream aslanppSpec, ErrorGatherer err) throws IOException, RecognitionException {
    if (fileName != null) {
        sourceName = new java.io.File(fileName).getName();
        if (sourceName != null) {
            // remove any file name extension
            int lastindex = sourceName.lastIndexOf('.');
            if (lastindex >= 0)
                sourceName = sourceName.substring(0, lastindex);
        }
    }
    // Run the lexer first.
    ANTLRInputStream antStream = new ANTLRInputStream(aslanppSpec);
    if (err == null)
      err = new ErrorGatherer(ErrorMessages.DEFAULT);
    ASLanPPNewLexer lexer = new ASLanPPNewLexer(antStream);
    lexer.setErrorGatherer(err);
    CommonTokenStream tokens = new CommonTokenStream(lexer);
    ASLanPPNewParser parser = new ASLanPPNewParser(tokens);
    parser.setErrorGatherer(err);
    ASLanPPNewParser.program_return r = null;
    r = parser.program(manager);
    Debug.logger.info("Parser reported " + parser.getNumberOfSyntaxErrors() + " errors.");
    ASLanPPSpecification spec = r.spec;
    if (spec != null) try {
        spec.getErrorGatherer().addAll(err); // copy any errors from parsing phase into the new instance.
        if(parser.getNumberOfSyntaxErrors() == 0 && r.getTree() != null) {
            // By this time the types are registered, so we can run the
            // tree grammar that will register the symbols.
            CommonTree ct = (CommonTree) r.getTree();
            CommonTreeNodeStream nodes = new CommonTreeNodeStream(ct);
            SymbolsNew symb = new SymbolsNew(nodes);
            symb.entity(spec);
            // Now we can run the tree grammar that will load the
            // expressions and types into the in-memory model.
            nodes.reset();
            ToASLanNew ta = new ToASLanNew(nodes);
            ta.entity(spec);
        }
    }
    finally {
            err.addAll(spec.getErrorGatherer()); //copy back any errors 
    }
    return spec;
}