public static <L extends Lexer, P extends Parser> P newParser( Function<CharStream, L> lexerFactory, Function<TokenStream, P> parserFactory, String input, boolean useBailErrorStrategy, boolean removeErrorListeners) { CharStream charStream = new ANTLRInputStream(input); L lexer = lexerFactory.apply(charStream); if (removeErrorListeners) { lexer.removeErrorListeners(); } TokenStream tokenStream = new CommonTokenStream(lexer); P parser = parserFactory.apply(tokenStream); if (useBailErrorStrategy) { parser.setErrorHandler(new BailErrorStrategy()); } if (removeErrorListeners) { parser.removeErrorListeners(); } return parser; }
private Object readMatrix(List<Object> parameters) { String rationalString = in.nextLine(); MatrixLexer matrixLexer = new MatrixLexer(new ANTLRInputStream(rationalString)); MatrixParser matrixParser = new MatrixParser(new CommonTokenStream(matrixLexer)); matrixParser.setErrorHandler(new BailErrorStrategy()); try { MatrixParser.MatrixContext matrixContext = matrixParser.matrix(); return Matrix.fromMatrixContext(matrixContext, Scope.NULL_SCOPE); } catch (ParseCancellationException e) { throw new InvalidReadRuntimeError("Invalid input read from stdin! Expected matrix format!"); } }
static MysqlParser.ScriptContext parse(CharStream cs) { MysqlLexer lexer = new MysqlLexer(cs); CommonTokenStream tokens = new CommonTokenStream(lexer); tokens.setTokenSource(lexer); MysqlParser parser = new MysqlParser(tokens); parser.setErrorHandler(new BailErrorStrategy()); boolean success = false; try { MysqlParser.ScriptContext script = parser.script(); success = true; return script; } finally { if (!success && (parser.lastStatement != null)) { _log.debug("last passed statement: {}", ((ParseTree)parser.lastStatement).getText()); } } }
private void compileFiles(List<RawFile> files, OOPSourceCodeModel srcModel, List<String> projectFileTypes) { for (RawFile file : files) { try { CharStream charStream = new ANTLRInputStream(file.content()); GolangLexer lexer = new GolangLexer(charStream); TokenStream tokens = new CommonTokenStream(lexer); GolangParser parser = new GolangParser(tokens); SourceFileContext sourceFileContext = parser.sourceFile(); parser.setErrorHandler(new BailErrorStrategy()); parser.getInterpreter().setPredictionMode(PredictionMode.SLL); ParseTreeWalker walker = new ParseTreeWalker(); GolangBaseListener listener = new GoLangTreeListener(srcModel, projectFileTypes, file); walker.walk(listener, sourceFileContext); } catch (Exception e) { e.printStackTrace(); } } }
public static Program parse(String source) { RankPLLexer lexer = new RankPLLexer(new ANTLRInputStream(source)); TokenStream tokens = new CommonTokenStream(lexer); RankPLParser parser = new RankPLParser(tokens); parser.setErrorHandler(new BailErrorStrategy()); ConcreteParser classVisitor = new ConcreteParser(); // Parse Program program = null; try { program = (Program) classVisitor.visit(parser.program()); } catch (ParseCancellationException e) { System.out.println("Syntax error"); lexer = new RankPLLexer(new ANTLRInputStream(source)); tokens = new CommonTokenStream(lexer); parser = new RankPLParser(tokens); classVisitor = new ConcreteParser(); try { program = (Program) classVisitor.visit(parser.program()); } catch (Exception ex) { // Ignore } return null; } return program; }
@Override protected void configure() { bind(Importer.class).to(ImporterImpl.class); bind(FileDescriptorLoader.class).to(FileDescriptorLoaderImpl.class); bind(ANTLRErrorListener.class).to(ParseErrorLogger.class); bind(ANTLRErrorStrategy.class).to(BailErrorStrategy.class); bind(ProtoContext.class) .annotatedWith(Names.named(DESCRIPTOR_PROTO)) .toProvider(DefaultDescriptorProtoProvider.class); Multibinder<ProtoContextPostProcessor> postProcessors = Multibinder .newSetBinder(binder(), ProtoContextPostProcessor.class); postProcessors.addBinding().to(ImportsPostProcessor.class); postProcessors.addBinding().to(TypeRegistratorPostProcessor.class); postProcessors.addBinding().to(TypeResolverPostProcessor.class); postProcessors.addBinding().to(ExtensionRegistratorPostProcessor.class); postProcessors.addBinding().to(OptionsPostProcessor.class); postProcessors.addBinding().to(UserTypeValidationPostProcessor.class); install(new FactoryModuleBuilder() .implement(FileReader.class, MultiPathFileReader.class) .build(FileReaderFactory.class)); }
private Enum parseEnumBlock(String input) { CharStream stream = CharStreams.fromString(input); ProtoLexer lexer = new ProtoLexer(stream); lexer.removeErrorListeners(); lexer.addErrorListener(TestUtils.ERROR_LISTENER); CommonTokenStream tokenStream = new CommonTokenStream(lexer); ProtoParser parser = new ProtoParser(tokenStream); parser.setErrorHandler(new BailErrorStrategy()); parser.removeErrorListeners(); parser.addErrorListener(TestUtils.ERROR_LISTENER); ProtoContext context = new ProtoContext("test.proto"); Proto proto = new Proto(); context.push(proto); EnumParseListener enumParseListener = new EnumParseListener(tokenStream, context); OptionParseListener optionParseListener = new OptionParseListener(tokenStream, context); parser.addParseListener(enumParseListener); parser.addParseListener(optionParseListener); parser.enumBlock(); return proto.getEnums().get(0); }
@Test public void can_parse_an_interface_with_extends_and_a_single_method() throws Exception { String input = "public interface Resolver\n" + " extends Serializable {\n\n" + " public int resolve(String value);\n" + "}\n"; Java8Lexer lexer = new Java8Lexer(new ANTLRInputStream(input)); CommonTokenStream tokens = new CommonTokenStream(lexer); Java8Parser parser = new Java8Parser(tokens); parser.setErrorHandler(new BailErrorStrategy()); ParseTree ast = parser.compilationUnit(); Assert.assertNotNull(ast); }
@Test public void parses_a_simple_input() { String input = ".packageDeclaration #identifier::before {\n" + " content: \" \";\n" + "}\n"; StringTemplateCSSLexer lexer = new StringTemplateCSSLexer(new ANTLRInputStream(input)); CommonTokenStream tokens = new CommonTokenStream(lexer); StringTemplateCSSParser parser = new StringTemplateCSSParser(tokens); parser.setErrorHandler(new BailErrorStrategy()); ParseTree ast = parser.css(); Assert.assertNotNull(ast); }
@Test public void parses_another_simple_input() { String input = " .packageDeclaration \";\"::after {\n" + " content: \"\\n\\n\";\n" + " }"; StringTemplateCSSLexer lexer = new StringTemplateCSSLexer(new ANTLRInputStream(input)); CommonTokenStream tokens = new CommonTokenStream(lexer); StringTemplateCSSParser parser = new StringTemplateCSSParser(tokens); parser.setErrorHandler(new BailErrorStrategy()); ParseTree ast = parser.css(); Assert.assertNotNull(ast); }
/** * Compile request to AST. * * @param path request * @return AST parse tree */ public static ParseTree parse(String path) { String normalizedPath = Paths.get(path).normalize().toString().replace(File.separatorChar, '/'); if (normalizedPath.startsWith("/")) { normalizedPath = normalizedPath.substring(1); } ANTLRInputStream is = new ANTLRInputStream(normalizedPath); CoreLexer lexer = new CoreLexer(is); lexer.removeErrorListeners(); lexer.addErrorListener(new BaseErrorListener() { @Override public void syntaxError(Recognizer<?, ?> recognizer, Object offendingSymbol, int line, int charPositionInLine, String msg, RecognitionException e) { throw new ParseCancellationException(msg, e); } }); CoreParser parser = new CoreParser(new CommonTokenStream(lexer)); parser.setErrorHandler(new BailErrorStrategy()); return parser.start(); }
/** * Compile request to AST. * @param path request * @return AST */ public static ParseTree parse(String path) { ANTLRInputStream is = new ANTLRInputStream(path); CoreLexer lexer = new CoreLexer(is); lexer.removeErrorListeners(); lexer.addErrorListener(new BaseErrorListener() { @Override public void syntaxError(Recognizer<?, ?> recognizer, Object offendingSymbol, int line, int charPositionInLine, String msg, RecognitionException e) { throw new ParseCancellationException(e); } }); CoreParser parser = new CoreParser(new CommonTokenStream(lexer)); parser.setErrorHandler(new BailErrorStrategy()); return parser.start(); }
public static ParseTree parseExpression(String expression) { ANTLRInputStream is = new ANTLRInputStream(expression); ExpressionLexer lexer = new ExpressionLexer(is); lexer.removeErrorListeners(); lexer.addErrorListener(new BaseErrorListener() { @Override public void syntaxError(Recognizer<?, ?> recognizer, Object offendingSymbol, int line, int charPositionInLine, String msg, RecognitionException e) { throw new ParseCancellationException(msg, e); } }); ExpressionParser parser = new ExpressionParser(new CommonTokenStream(lexer)); parser.setErrorHandler(new BailErrorStrategy()); lexer.reset(); return parser.start(); }
@Test public void testParseWorkingExamples() throws IOException { FileVisitor<Path> workingFilesVisitior = new SimpleFileVisitor<Path>() { @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { System.out.println("Testing parser input from file \""+file.toString()+"\""); ANTLRFileStream antlrStream = new ANTLRFileStream(file.toString()); MiniJLexer lexer = new MiniJLexer(antlrStream); TokenStream tokens = new CommonTokenStream(lexer); MiniJParser parser = new MiniJParser(tokens); parser.setErrorHandler(new BailErrorStrategy()); parser.prog(); return super.visitFile(file, attrs); } }; Files.walkFileTree(EXAMPLE_PROGRAM_PATH_WORKING, workingFilesVisitior); }
@Test public void testParseFailingExamples() throws IOException { FileVisitor<Path> workingFilesVisitior = new SimpleFileVisitor<Path>() { @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { System.out.println("Testing parser input from file \""+file.toString()+"\""); ANTLRFileStream antlrStream = new ANTLRFileStream(file.toString()); MiniJLexer lexer = new MiniJLexer(antlrStream); TokenStream tokens = new CommonTokenStream(lexer); MiniJParser parser = new MiniJParser(tokens); parser.setErrorHandler(new BailErrorStrategy()); /* * Catch all exceptions first, to ensure that every single * compilation unit exits with an Exception. Otherwise, this * method will return after the first piece of code. */ try { parser.prog(); fail("The example "+file.toString()+" should have failed, but was accepted by the parser."); } catch (ParseCancellationException e) { } return super.visitFile(file, attrs); } }; Files.walkFileTree(EXAMPLE_PROGRAM_PATH_FAILING, workingFilesVisitior); }
/** * "First phase" parsing attempt. Provides better performance than {@link #parseLL(CompilationUnit, String)}, but * will erroneously report syntax errors when parsing script text utilizing certain parts of the grammar. * * @param compilationUnit The unit of work to compile/parse. Represents the grammar's start symbol that should be * used. * @param scriptText A plaintext representation of the HyperTalk script to parse * @return The root of the abstract syntax tree associated with the given compilation unit (i.e., {@link Script}), * or null if parsing fails. */ static Object parseSLL(CompilationUnit compilationUnit, String scriptText) { HyperTalkLexer lexer = new HyperTalkLexer(new CaseInsensitiveInputStream(scriptText)); CommonTokenStream tokens = new CommonTokenStream(lexer); HyperTalkParser parser = new HyperTalkParser(tokens); parser.setErrorHandler(new BailErrorStrategy()); parser.removeErrorListeners(); parser.getInterpreter().setPredictionMode(PredictionMode.SLL); try { ParseTree tree = compilationUnit.getParseTree(parser); return new HyperTalkTreeVisitor().visit(tree); } catch (ParseCancellationException e) { return null; } }
public static Justification generateJustification(MainProof proof, String justification) { ANTLRInputStream input = new ANTLRInputStream(justification); JustificationLexer lexer = new JustificationLexer(input); CommonTokenStream tokens = new CommonTokenStream(lexer); JustificationParser parser = new JustificationParser(tokens); parser.setErrorHandler(new BailErrorStrategy()); ParseTree tree; try { tree = parser.prog(); } catch (Exception e) { return null; } return new JustificationGenerator(proof).visit(tree); }
public static FirstOrderLogicFormula generateAst(String line) { ANTLRInputStream input = new ANTLRInputStream(line); FormulaLexer lexer = new FormulaLexer(input); CommonTokenStream tokens = new CommonTokenStream(lexer); FormulaParser parser = new FormulaParser(tokens); parser.setErrorHandler(new BailErrorStrategy()); ParseTree tree; try { tree = parser.prog(); } catch (Exception e) { return null; } return new AstGenerator().visit(tree); }
@Override public TransformExpressionTree compileToExpressionTree(String expression) { CharStream charStream = new ANTLRInputStream(expression); PQL2Lexer lexer = new PQL2Lexer(charStream); lexer.setTokenFactory(new CommonTokenFactory(true)); TokenStream tokenStream = new UnbufferedTokenStream<CommonToken>(lexer); PQL2Parser parser = new PQL2Parser(tokenStream); parser.setErrorHandler(new BailErrorStrategy()); // Parse ParseTree parseTree = parser.expression(); ParseTreeWalker walker = new ParseTreeWalker(); Pql2AstListener listener = new Pql2AstListener(expression, _splitInClause); walker.walk(listener, parseTree); final AstNode rootNode = listener.getRootNode(); return TransformExpressionTree.buildTree(rootNode); }
@Nullable private QueryContext parse(@Nullable String query) { if (query != null) { ANTLRInputStream is = new ANTLRInputStream(query); CommitQueryLexer lexer = new CommitQueryLexer(is); lexer.removeErrorListeners(); lexer.addErrorListener(new BaseErrorListener() { @Override public void syntaxError(Recognizer<?, ?> recognizer, Object offendingSymbol, int line, int charPositionInLine, String msg, RecognitionException e) { if (e != null) { logger.error("Error lexing commit query", e); } else if (msg != null) { logger.error("Error lexing commit query: " + msg); } throw new RuntimeException("Malformed commit query"); } }); CommonTokenStream tokens = new CommonTokenStream(lexer); CommitQueryParser parser = new CommitQueryParser(tokens); parser.removeErrorListeners(); parser.setErrorHandler(new BailErrorStrategy()); return parser.query(); } else { return null; } }
public static ExprContext parse(String expr) { ANTLRInputStream is = new ANTLRInputStream(expr); ReviewAppointmentLexer lexer = new ReviewAppointmentLexer(is); lexer.removeErrorListeners(); CommonTokenStream tokens = new CommonTokenStream(lexer); ReviewAppointmentParser parser = new ReviewAppointmentParser(tokens); parser.removeErrorListeners(); parser.setErrorHandler(new BailErrorStrategy()); return parser.expr(); }
@Test public void parse() throws IOException { System.out.println(file.getName() + "-------------------------------"); final LLVMLexer lexer = new LLVMLexer(new ANTLRFileStream(file.getAbsolutePath())); final CommonTokenStream stream = new CommonTokenStream(lexer); final LLVMParser parser = new LLVMParser(stream); parser.setErrorHandler(new BailErrorStrategy()); final ModuleContext module = parser.module(); System.out.println(Trees.toStringTree(module)); }
public static Term parseTerm(String s, boolean trySLL) { CoqFTParser p = new CoqFTParser(s); if(trySLL) { p.getInterpreter().setPredictionMode(PredictionMode.SLL); p.setErrorHandler(new BailErrorStrategy()); try { return p.parseTerm(); } catch(ParseCancellationException | CoqSyntaxException e) { p = new CoqFTParser(s); } } return p.parseTerm(); }
public static Tactic parseTactic(String s, boolean trySLL) { CoqFTParser p = new CoqFTParser(s); if(trySLL) { p.getInterpreter().setPredictionMode(PredictionMode.SLL); p.setErrorHandler(new BailErrorStrategy()); try { return p.parseTactic(); } catch(ParseCancellationException | CoqSyntaxException e) { p = new CoqFTParser(s); } } return p.parseTactic(); }
public static StringTemplate fromString(String pattern) { class DescriptiveErrorListener extends BaseErrorListener { public List<String> errors = new ArrayList<>(); @Override public void syntaxError(Recognizer<?, ?> recognizer, Object offendingSymbol, int line, int charPositionInLine, String msg, RecognitionException e) { errors.add(String.format("%d:%d: %s", line, charPositionInLine, msg)); } } final DescriptiveErrorListener error_listener = new DescriptiveErrorListener(); final StringSubstitutionLexer lexer = new StringSubstitutionLexer(CharStreams.fromString(pattern)); lexer.removeErrorListeners(); lexer.addErrorListener(error_listener); final StringSubstitutionParser parser = new StringSubstitutionParser(new UnbufferedTokenStream(lexer)); parser.removeErrorListeners(); parser.addErrorListener(error_listener); parser.setErrorHandler(new BailErrorStrategy()); final StringSubstitutionParser.ExprContext result = parser.expr(); if (result.exception != null) throw new IllegalArgumentException("errors during parsing: " + pattern, result.exception); else if (!error_listener.errors.isEmpty()) throw new IllegalArgumentException("syntax errors during parsing:\n" + String.join("\n", error_listener.errors.stream().map(s -> " " + s).collect(Collectors.toList()))); return result.s; }
public static Map<String, Any2<String, Number>> parse(String pattern) { class DescriptiveErrorListener extends BaseErrorListener { public List<String> errors = new ArrayList<>(); @Override public void syntaxError(Recognizer<?, ?> recognizer, Object offendingSymbol, int line, int charPositionInLine, String msg, RecognitionException e) { errors.add(String.format("%d:%d: %s", line, charPositionInLine, msg)); } } final DescriptiveErrorListener error_listener = new DescriptiveErrorListener(); final CollectdTagsLexer lexer = new CollectdTagsLexer(new ANTLRInputStream(pattern)); lexer.removeErrorListeners(); lexer.addErrorListener(error_listener); final CollectdTagsParser parser = new CollectdTagsParser(new UnbufferedTokenStream(lexer)); parser.removeErrorListeners(); parser.addErrorListener(error_listener); parser.setErrorHandler(new BailErrorStrategy()); final CollectdTagsParser.ExprContext result = parser.expr(); if (result.exception != null) throw new IllegalArgumentException("errors during parsing: " + pattern, result.exception); else if (!error_listener.errors.isEmpty()) throw new IllegalArgumentException("syntax errors during parsing:\n" + String.join("\n", error_listener.errors.stream().map(s -> " " + s).collect(Collectors.toList()))); return result.result; }
private TypescriptParser buildParser(File source) throws IOException { CharStream inputCharStream = CharStreams.fromPath(source.toPath()); TokenSource tokenSource = new TypescriptLexer(inputCharStream); TokenStream inputTokenStream = new CommonTokenStream(tokenSource); TypescriptParser parser = new TypescriptParser(inputTokenStream); // make parser throw exception on first error parser.setErrorHandler(new BailErrorStrategy()); // print detailed error messages to System.err parser.addErrorListener(new ConsoleErrorListener()); return parser; }
public static Operator gen(GeneratorContext ctx, Planner cursorMeta, String expr) { CharStream cs = new ANTLRInputStream(expr); MysqlLexer lexer = new MysqlLexer(cs); CommonTokenStream tokens = new CommonTokenStream(lexer); tokens.setTokenSource(lexer); MysqlParser parser = new MysqlParser(tokens); parser.setErrorHandler(new BailErrorStrategy()); MysqlParser.ExprContext rule = parser.expr(); return gen(ctx, cursorMeta, rule); }
static FishParser.ScriptContext parse(CharStream cs) { FishLexer lexer = new FishLexer(cs); CommonTokenStream tokens = new CommonTokenStream(lexer); tokens.setTokenSource(lexer); FishParser parser = new FishParser(tokens); parser.setErrorHandler(new BailErrorStrategy()); FishParser.ScriptContext script = parser.script(); return script; }
private static ParseTree parseSQL(String sql) { CharStream charStream = new ANTLRInputStream(sql); RQLLexer lexer = new RQLLexer(charStream); lexer.setTokenFactory(new CommonTokenFactory(true)); TokenStream tokenStream = new UnbufferedTokenStream<CommonToken>(lexer); RQLParser parser = new RQLParser(tokenStream); parser.setErrorHandler(new BailErrorStrategy()); return parser.root(); }
protected void initialize(String input) { StringTemplateCSSLexer lexer = new StringTemplateCSSLexer(new ANTLRInputStream(input)); CommonTokenStream tokens = new CommonTokenStream(lexer); StringTemplateCSSParser parser = new StringTemplateCSSParser(tokens); parser.setErrorHandler(new BailErrorStrategy()); ParseTree tree = parser.css(); Collection<ParseTree> selectorCombinations = XPath.findAll(tree, "//selectorCombination", parser); final StringUtils stringUtils = StringUtils.getInstance(); this.selectors = new ArrayList<List<String>>(selectorCombinations.size()); this.properties = new HashMap<List<String>, Map<String, String>>(); for (ParseTree selectorCombination : selectorCombinations) { List<String> currentSelectors = new ArrayList<String>(selectorCombination.getChildCount()); this.selectors.add(currentSelectors); for (int index = 0; index < selectorCombination.getChildCount(); index++) { String text = selectorCombination.getChild(index).getText(); currentSelectors.add(text); } Map<String, String> block = retrieveProperties(selectorCombination, stringUtils); this.properties.put(currentSelectors, block); } }
@NonNull public static SQLiteParser createParser(@NonNull String sql) { CharStream source = new ANTLRInputStream(sql); Lexer lexer = new SQLiteLexer(source); TokenStream tokenStream = new CommonTokenStream(lexer); SQLiteParser parser = new SQLiteParser(tokenStream); parser.setErrorHandler(new BailErrorStrategy()); return parser; }
private QueryListener parse(Function<HeroicQueryParser, ParserRuleContext> op, String input) { final HeroicQueryLexer lexer = new HeroicQueryLexer(new ANTLRInputStream(input)); final CommonTokenStream tokens = new CommonTokenStream(lexer); final HeroicQueryParser parser = new HeroicQueryParser(tokens); parser.removeErrorListeners(); parser.setErrorHandler(new BailErrorStrategy()); final ParserRuleContext context; try { context = op.apply(parser); } catch (final ParseCancellationException e) { if (!(e.getCause() instanceof RecognitionException)) { throw e; } throw toParseException((RecognitionException) e.getCause()); } final QueryListener listener = new QueryListener(); ParseTreeWalker.DEFAULT.walk(listener, context); final Token last = lexer.getToken(); if (last.getType() != Token.EOF) { throw new ParseException( String.format("garbage at end of string: '%s'", last.getText()), null, last.getLine(), last.getCharPositionInLine()); } return listener; }
/** * Constructs a new parser. * @param f the formula factory */ public PropositionalParser(final FormulaFactory f) { super(f); this.lexer = new PropositionalLexer(null); CommonTokenStream tokens = new CommonTokenStream(this.lexer); this.parser = new LogicNGPropositionalParser(tokens); this.parser.setFormulaFactory(f); this.lexer.removeErrorListeners(); this.parser.removeErrorListeners(); this.parser.setErrorHandler(new BailErrorStrategy()); }
/** * Constructs a new parser for pseudo boolean formulas. * @param f the formula factory */ public PseudoBooleanParser(final FormulaFactory f) { super(f); this.lexer = new PseudoBooleanLexer(null); CommonTokenStream tokens = new CommonTokenStream(this.lexer); this.parser = new LogicNGPseudoBooleanParser(tokens); this.parser.setFormulaFactory(f); this.lexer.removeErrorListeners(); this.parser.removeErrorListeners(); this.parser.setErrorHandler(new BailErrorStrategy()); }
public static JsonPath compile(String path) { JsonPathLexer lexer = new JsonPathLexer(CharStreams.fromString(path)); CommonTokenStream tokens = new CommonTokenStream(lexer); JsonPathParser parser = new JsonPathParser(tokens); parser.setErrorHandler(new BailErrorStrategy()); JsonPathParser.PathContext tree = parser.path(); JsonPathCompiler compiler = new JsonPathCompiler(); compiler.visit(tree); return compiler.pathBuilder.build(); }
public static SOQLParser.StatementContext getStatementContext(String query) { SOQLLexer lexer = new SOQLLexer(new ANTLRInputStream(query)); CommonTokenStream tokens = new CommonTokenStream(lexer); SOQLParser parser = new SOQLParser(tokens); parser.setErrorHandler(new BailErrorStrategy()); return parser.statement(); }
private ParserRuleContext parseQuery (String q) { // TODO replace AqlLexer with lexer for your Antlr4 grammar! Lexer qlLexer = new AqlLexer((CharStream) null); ParserRuleContext tree = null; // Like p. 111 try { // Tokenize input data ANTLRInputStream input = new ANTLRInputStream(q); qlLexer.setInputStream(input); CommonTokenStream tokens = new CommonTokenStream(qlLexer); // TODO replace AqlParser with parser for your Antlr4 // grammar! parser = new AqlParser(tokens); // Don't throw out erroneous stuff parser.setErrorHandler(new BailErrorStrategy()); parser.removeErrorListeners(); // Get starting rule from parser // TODO replace AqlParser with parser for your Antlr4 // grammar! Method startRule = AqlParser.class.getMethod("start"); tree = (ParserRuleContext) startRule .invoke(parser, (Object[]) null); } // Some things went wrong ... catch (Exception e) { System.err.println(e.getMessage()); } // Return the generated tree return tree; }
private ParserRuleContext parseAnnisQuery (String query) { Lexer lexer = new AqlLexer((CharStream) null); ParserRuleContext tree = null; Antlr4DescriptiveErrorListener errorListener = new Antlr4DescriptiveErrorListener( query); // Like p. 111 try { // Tokenize input data ANTLRInputStream input = new ANTLRInputStream(query); lexer.setInputStream(input); CommonTokenStream tokens = new CommonTokenStream(lexer); parser = new AqlParser(tokens); // Don't throw out erroneous stuff parser.setErrorHandler(new BailErrorStrategy()); lexer.removeErrorListeners(); lexer.addErrorListener(errorListener); parser.removeErrorListeners(); parser.addErrorListener(errorListener); // Get starting rule from parser Method startRule = AqlParser.class.getMethod("start"); tree = (ParserRuleContext) startRule.invoke(parser, (Object[]) null); } // Some things went wrong ... catch (Exception e) { log.error("Could not parse query. " + "Please make sure it is well-formed."); log.error(errorListener.generateFullErrorMsg().toString()); addError(errorListener.generateFullErrorMsg()); } return tree; }