private BooleanExpListNode generateAST(String code) { FormalPropertyDescriptionLexer l = new FormalPropertyDescriptionLexer(new ANTLRInputStream(code)); CommonTokenStream ts = new CommonTokenStream(l); FormalPropertyDescriptionParser p = new FormalPropertyDescriptionParser(ts); BooleanExpScope declaredVars = new BooleanExpScope(); preAndPostConditionsDescription.getSymbolicVariableList().forEach((v) -> { declaredVars.addTypeForId(v.getId(), v.getInternalTypeContainer()); }); return translator.generateFromSyntaxTree( p.booleanExpList(), electionDescription.getInputType().getType(), electionDescription.getOutputType().getType(), declaredVars); }
private SourceContext buildAntlrTree(String source) { ANTLRInputStream stream = new ANTLRInputStream(source); PainlessLexer lexer = new EnhancedPainlessLexer(stream, sourceName); PainlessParser parser = new PainlessParser(new CommonTokenStream(lexer)); ParserErrorStrategy strategy = new ParserErrorStrategy(sourceName); lexer.removeErrorListeners(); parser.removeErrorListeners(); if (settings.isPicky()) { setupPicky(parser); } parser.setErrorHandler(strategy); return parser.source(); }
public PlanNode callSql(String sqlstring, ArrayList list) throws FileNotFoundException, IOException { for (int i=0; i< list.size() ; i++) { sqlstring = sqlstring.replaceFirst("\\?", (String) list.get(i)); } ANTLRInputStream input = new ANTLRInputStream( new ByteArrayInputStream(sqlstring.getBytes())); PLSQLLexer lexer = new PLSQLLexer(input); CommonTokenStream tokens = new CommonTokenStream(lexer); PLSQLParser parser = new PLSQLParser(tokens); parser.setBuildParseTree(true); ParseTree tree = parser.sql_statement(); ParseTreeWalker walker = new ParseTreeWalker(); SQLModel model = new SQLModel(); walker.walk(new MyListener(model), tree); return model.current; }
public GraphParser(InputStream is) throws GraphParserException { DOTLexer lexer = null; DOTParser parser = null; try { lexer = new DOTLexer(new ANTLRInputStream(is)); lexer.addErrorListener(new ErrorListener()); parser = new DOTParser(new CommonTokenStream(lexer)); parser.setErrorHandler(new ExceptionErrorStrategy()); parser.addErrorListener(new ErrorListener()); ParseTree tree = parser.graph(); ParseTreeWalker.DEFAULT.walk(new NodeListener(), tree); ParseTreeWalker.DEFAULT.walk(new EdgeListener(), tree); } catch (Throwable t) { if (mErrMsg != null) throw new GraphParserException(mErrMsg, t); if (lexer != null) { mErrMsg = "at line " + lexer.getLine() + ":" + lexer.getCharPositionInLine(); throw new GraphParserException(mErrMsg, t); } throw new GraphParserException(t); } if (mErrMsg != null) throw new GraphParserException(mErrMsg); }
public void scriptValidation(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { String content=req.getParameter("content"); if(StringUtils.isNotBlank(content)){ ScriptType type=ScriptType.valueOf(req.getParameter("type")); ANTLRInputStream antlrInputStream=new ANTLRInputStream(content); RuleParserLexer lexer=new RuleParserLexer(antlrInputStream); CommonTokenStream steam=new CommonTokenStream(lexer); RuleParserParser parser=new RuleParserParser(steam); parser.removeErrorListeners(); ScriptErrorListener errorListener=new ScriptErrorListener(); parser.addErrorListener(errorListener); switch(type){ case Script: parser.ruleSet(); break; case DecisionNode: parser.condition(); break; case ScriptNode: parser.actions(); } List<ErrorInfo> infos=errorListener.getInfos(); writeObjectToJson(resp, infos); } }
public RuleSet build(String script) throws IOException{ ANTLRInputStream antlrInputStream=new ANTLRInputStream(script); RuleParserLexer lexer=new RuleParserLexer(antlrInputStream); CommonTokenStream tokenStream=new CommonTokenStream(lexer); RuleParserParser parser=new RuleParserParser(tokenStream); ScriptDecisionTableErrorListener errorListener=new ScriptDecisionTableErrorListener(); parser.addErrorListener(errorListener); BuildRulesVisitor visitor=new BuildRulesVisitor(contextBuilders,tokenStream); RuleSet ruleSet=visitor.visitRuleSet(parser.ruleSet()); rebuildRuleSet(ruleSet); String error=errorListener.getErrorMessage(); if(error!=null){ throw new RuleException("Script parse error:"+error); } return ruleSet; }
private static <T extends Parser> T makeBasicParser(Class<T> parserClass, ANTLRInputStream stream, String parsedObjectName, List<AntlrError> errors) { Lexer lexer; Parser parser; if (parserClass.isAssignableFrom(SQLParser.class)) { lexer = new SQLLexer(stream); parser = new SQLParser(new CommonTokenStream(lexer)); } else if (parserClass.isAssignableFrom(IgnoreListParser.class)) { lexer = new IgnoreListLexer(stream); parser = new IgnoreListParser(new CommonTokenStream(lexer)); } else { throw new IllegalArgumentException("Unknown parser class: " + parserClass); } CustomAntlrErrorListener err = new CustomAntlrErrorListener(parsedObjectName, errors); lexer.removeErrorListeners(); lexer.addErrorListener(err); parser.removeErrorListeners(); parser.addErrorListener(err); return parserClass.cast(parser); }
@Test public void itShouldGenerateLingualQuery() throws ParseException { String dateInString = "2012-01-10 00:00:00"; remoteFilterJson.setCondition("(f1 not in ('1') and f2 = '"+dateInString+"') or (f3=23.23 and f4=1234.123)"); String DATE_FORMAT = "yyyy-MM-dd HH:mm:ss"; SimpleDateFormat formatter = new SimpleDateFormat(DATE_FORMAT); formatter.setTimeZone(TimeZone.getDefault()); Date date = formatter.parse(dateInString); formatter.setTimeZone(TimeZone.getTimeZone("GMT")); dateInString = formatter.format(date); ANTLRInputStream stream = new ANTLRInputStream(remoteFilterJson.getCondition()); QueryParserLexer lexer = new QueryParserLexer(stream); CommonTokenStream tokenStream = new CommonTokenStream(lexer); QueryParserParser parser = new QueryParserParser(tokenStream); parser.removeErrorListeners(); LingualQueryCreator customVisitor = new LingualQueryCreator(remoteFilterJson.getSchema()); String lingualExpression = customVisitor.visit(parser.eval()); Assert.assertEquals( "((\"f1\" is not null and \"f1\" not in('1')) and \"f2\" = timestamp '"+dateInString+"') or (\"f3\" = cast(23.23 as float) and \"f4\" = cast(1234.123 as double))", lingualExpression); }
@Test public void itShouldGenerateLingualQueryWithNotLikeClause() throws ParseException { String dateInString = "2012-01-10 00:00:00"; remoteFilterJson.setCondition("(f1 not like 'condition' and f2 = '"+dateInString+"') or (f3=23.23 and f4=1234.123)"); String DATE_FORMAT = "yyyy-MM-dd HH:mm:ss"; SimpleDateFormat formatter = new SimpleDateFormat(DATE_FORMAT); formatter.setTimeZone(TimeZone.getDefault()); Date date = formatter.parse(dateInString); formatter.setTimeZone(TimeZone.getTimeZone("GMT")); dateInString = formatter.format(date); ANTLRInputStream stream = new ANTLRInputStream(remoteFilterJson.getCondition()); QueryParserLexer lexer = new QueryParserLexer(stream); CommonTokenStream tokenStream = new CommonTokenStream(lexer); QueryParserParser parser = new QueryParserParser(tokenStream); parser.removeErrorListeners(); LingualQueryCreator customVisitor = new LingualQueryCreator(remoteFilterJson.getSchema()); String lingualExpression = customVisitor.visit(parser.eval()); Assert.assertEquals( "((\"f1\" is not null and \"f1\" not like 'condition') and \"f2\" = timestamp '"+dateInString+"') or (\"f3\" = cast(23.23 as float) and \"f4\" = cast(1234.123 as double))", lingualExpression); }
@Test public void itShouldGenerateLingualQueryWithBetweenClause() { remoteFilterJson.setCondition("f3 between 10 and 20 and f3 BETWEEN 10 AND 20"); ANTLRInputStream stream = new ANTLRInputStream(remoteFilterJson.getCondition()); QueryParserLexer lexer = new QueryParserLexer(stream); CommonTokenStream tokenStream = new CommonTokenStream(lexer); QueryParserParser parser = new QueryParserParser(tokenStream); parser.removeErrorListeners(); LingualQueryCreator customVisitor = new LingualQueryCreator(remoteFilterJson.getSchema()); String lingualExpression = customVisitor.visit(parser.eval()); Assert.assertEquals( "\"f3\" between cast(10 as float) and cast(20 as float) and \"f3\" BETWEEN cast(10 as float) AND cast(20 as float)", lingualExpression); }
@Test public void itShouldGenerateLingualQueryWithMultipleCondition() { remoteFilterJson.setCondition("f1 = 'or maybe' AND f1 <> 'or mat' and f3 between 10 and 20"); ANTLRInputStream stream = new ANTLRInputStream(remoteFilterJson.getCondition()); QueryParserLexer lexer = new QueryParserLexer(stream); CommonTokenStream tokenStream = new CommonTokenStream(lexer); QueryParserParser parser = new QueryParserParser(tokenStream); parser.removeErrorListeners(); LingualQueryCreator customVisitor = new LingualQueryCreator(remoteFilterJson.getSchema()); String lingualExpression = customVisitor.visit(parser.eval()); Assert.assertEquals( "\"f1\" = 'or maybe' AND \"f1\" <> 'or mat' and \"f3\" between cast(10 as float) and cast(20 as float)", lingualExpression); }
@Test public void itShouldGenerateLingualQueryForBetweenClauseAndTimestampDataType() throws ParseException { String dateInString1 = "1999-12-31 18:30:00"; String dateInString2 = "2000-01-11 18:30:00"; remoteFilterJson.setCondition("f2 between '"+dateInString1+"' AND '"+dateInString2+"'"); String DATE_FORMAT = "yyyy-MM-dd HH:mm:ss"; SimpleDateFormat formatter = new SimpleDateFormat(DATE_FORMAT); formatter.setTimeZone(TimeZone.getDefault()); Date date1 = formatter.parse(dateInString1); Date date2 = formatter.parse(dateInString2); formatter.setTimeZone(TimeZone.getTimeZone("GMT")); dateInString1 = formatter.format(date1); dateInString2 = formatter.format(date2); ANTLRInputStream stream = new ANTLRInputStream(remoteFilterJson.getCondition()); QueryParserLexer lexer = new QueryParserLexer(stream); CommonTokenStream tokenStream = new CommonTokenStream(lexer); QueryParserParser parser = new QueryParserParser(tokenStream); parser.removeErrorListeners(); LingualQueryCreator customVisitor = new LingualQueryCreator(remoteFilterJson.getSchema()); String lingualExpression = customVisitor.visit(parser.eval()); Assert.assertEquals("\"f2\" between timestamp '"+dateInString1+"' AND timestamp '"+dateInString2+"'", lingualExpression); }
/** * Checks that no exception is generated for YANG file with valid syntax. */ @Test public void checkValidYangFileForNoSyntaxError() throws IOException { ANTLRInputStream input = new ANTLRFileStream("src/test/resources/YangFileWithoutSyntaxError.yang"); // Create a lexer that feeds off of input char stream. GeneratedYangLexer lexer = new GeneratedYangLexer(input); // Create a buffer of tokens pulled from the lexer. CommonTokenStream tokens = new CommonTokenStream(lexer); // Create a parser that feeds off the tokens buffer. GeneratedYangParser parser = new GeneratedYangParser(tokens); // Remove console error listener. parser.removeErrorListeners(); // Create instance of customized error listener. ParseTreeErrorListener parseTreeErrorListener = new ParseTreeErrorListener(); // Add customized error listener to catch errors during parsing. parser.addErrorListener(parseTreeErrorListener); // Begin parsing YANG file and generate parse tree. ParseTree tree = parser.yangfile(); }
/** * Checks that exception is generated for YANG file with invalid syntax. */ @Test public void checkInvalidYangFileForSyntaxError() throws IOException { // Get the exception occurred during parsing. thrown.expect(ParserException.class); thrown.expect(CustomExceptionMatcher.errorLocation(3, 0)); thrown.expectMessage("no viable alternative at input 'yang-version 1\\nnamespace'"); ANTLRInputStream input = new ANTLRFileStream("src/test/resources/YangFileWithSyntaxError.yang"); // Create a lexer that feeds off of input char stream. GeneratedYangLexer lexer = new GeneratedYangLexer(input); // Create a buffer of tokens pulled from the lexer. CommonTokenStream tokens = new CommonTokenStream(lexer); // Create a parser that feeds off the tokens buffer. GeneratedYangParser parser = new GeneratedYangParser(tokens); // Remove console error listener. parser.removeErrorListeners(); // Create instance of customized error listener. ParseTreeErrorListener parseTreeErrorListener = new ParseTreeErrorListener(); // Add customized error listener to catch errors during parsing. parser.addErrorListener(parseTreeErrorListener); // Begin parsing YANG file and generate parse tree. ParseTree tree = parser.yangfile(); }
public static <L extends Lexer, P extends Parser> P newParser( Function<CharStream, L> lexerFactory, Function<TokenStream, P> parserFactory, String input, boolean useBailErrorStrategy, boolean removeErrorListeners) { CharStream charStream = new ANTLRInputStream(input); L lexer = lexerFactory.apply(charStream); if (removeErrorListeners) { lexer.removeErrorListeners(); } TokenStream tokenStream = new CommonTokenStream(lexer); P parser = parserFactory.apply(tokenStream); if (useBailErrorStrategy) { parser.setErrorHandler(new BailErrorStrategy()); } if (removeErrorListeners) { parser.removeErrorListeners(); } return parser; }
/** * Parse an interval, for example <tt>[1,-]</tt> or <tt>-</tt> (a wildcard) or <tt>[1,4]</tt>. * Only fixed values are allowed, no variables. * * @param intervalAsString the string to be parsed. * @return a LowerBoundedInterval as the runtime representation of interval strings. * @throws ParseException in case the string doesn't fit the given fixed-interval grammar. */ public static LowerBoundedInterval parse(String intervalAsString) throws ParseException { CharStream charStream = new ANTLRInputStream(intervalAsString); CellExpressionLexer lexer = new CellExpressionLexer(charStream); TokenStream tokens = new CommonTokenStream(lexer); CellExpressionParser parser = new CellExpressionParser(tokens); parser.removeErrorListeners(); parser.addErrorListener(new ThrowingErrorListener()); try { CellExpressionParser.Fixed_intervalContext ctx = parser.fixed_interval(); if (ctx == null) { throw new ParseException(0, 0, "Expected fixed interval"); } return INSTANCE.visit(ctx); } catch (ParseRuntimeException runtimeException) { throw runtimeException.getParseException(); } }
/** * Parse a filter expression. * * @param filter the filter expression * @return compiled nodes */ public List<SquigglyNode> parse(String filter) { filter = StringUtils.trim(filter); if (StringUtils.isEmpty(filter)) { return Collections.emptyList(); } // get it from the cache if we can List<SquigglyNode> cachedNodes = CACHE.getIfPresent(filter); if (cachedNodes != null) { return cachedNodes; } SquigglyExpressionLexer lexer = ThrowingErrorListener.overwrite(new SquigglyExpressionLexer(new ANTLRInputStream(filter))); SquigglyExpressionParser parser = ThrowingErrorListener.overwrite(new SquigglyExpressionParser(new CommonTokenStream(lexer))); Visiter visiter = new Visiter(); List<SquigglyNode> nodes = visiter.visit(parser.parse()); CACHE.put(filter, nodes); return nodes; }
public void walkTree() { try { init(); QueryLexer lexer = new QueryLexer(new ANTLRInputStream(new StringReader(queryStr))); CommonTokenStream token = new CommonTokenStream(lexer); QueryParser parser = new QueryParser(token); parser.setBuildParseTree(true); QueryParser.ScriptContext tree = parser.script(); // parse ParseTreeWalker parseTreeWalker = new ParseTreeWalker(); parseTreeWalker.walk( this, tree); //check for key# if size = 1 checkPredicateStack(); } catch (Exception ex) { logger.error(ex.getMessage()); throw new QueryException( ex.getMessage(), ex ); } }
public DelayedResolutionPredicateRunner(String queryString, Map<String, Column> columnMap, Map<String, Boolean> remoteSourcesLoaded) { this.queryString = queryString; this.columnMap = columnMap; this.remoteSourcesLoaded = remoteSourcesLoaded; try { final StringReader stringReader = new StringReader(queryString); final ANTLRInputStream ais = new ANTLRInputStream(stringReader); final PredicateLexer lexer = new PredicateLexer(ais); final CommonTokenStream token = new CommonTokenStream(lexer); final PredicateParser parser = new PredicateParser(token); parser.setBuildParseTree(true); scriptContext = parser.script(); } catch (Exception ex) { logger.error(ex); throw new QueryException(ex.getMessage(), ex); } }
public static List<ExecutionPlan> parse(String source) throws SiddhiParserException { try { ANTLRInputStream input = new ANTLRInputStream(source); SiddhiQLGrammarLexer lexer = new SiddhiQLGrammarLexer(input); CommonTokenStream tokens = new CommonTokenStream(lexer); SiddhiQLGrammarParser parser = new SiddhiQLGrammarParser(tokens); ParseTree tree = parser.query(); // parse SiddhiQLGrammarVisitor eval = new SiddhiQLGrammarBasedVisitorImpl(); System.out.println(eval.visit(tree)); // return (List<ExecutionPlan>) eval.visit(tree); return null; } catch (Throwable e) { throw new SiddhiParserException(e.getMessage(), e); } }
private static DocumentImpl parse(final String cql, final boolean showAstTree) { final ANTLRInputStream is = new ANTLRInputStream(cql); final CorpusQLLexer lexer = new CorpusQLLexer(is); final CommonTokenStream tokens = new CommonTokenStream(lexer); final CorpusQLParser parser = new CorpusQLParser(tokens); final ParseTree tree = parser.query(); // useful for debugging if(showAstTree) { System.out.println(tree.toStringTree(parser)); } final MemTreeBuilder builder = new MemTreeBuilder(); builder.startDocument(); final CorpusQLXMLVisitor xmlVisitor = new CorpusQLXMLVisitor(builder); xmlVisitor.visit(tree); builder.endDocument(); return builder.getDocument(); }
private Object readMatrix(List<Object> parameters) { String rationalString = in.nextLine(); MatrixLexer matrixLexer = new MatrixLexer(new ANTLRInputStream(rationalString)); MatrixParser matrixParser = new MatrixParser(new CommonTokenStream(matrixLexer)); matrixParser.setErrorHandler(new BailErrorStrategy()); try { MatrixParser.MatrixContext matrixContext = matrixParser.matrix(); return Matrix.fromMatrixContext(matrixContext, Scope.NULL_SCOPE); } catch (ParseCancellationException e) { throw new InvalidReadRuntimeError("Invalid input read from stdin! Expected matrix format!"); } }
/** * Quand un terme sauvegardé dans la liste des termes sauvegardés est séléectionné il est possible * de voir son AST via le menu, ou le raccourci Ctrl+I / Cmd+I * * @param evt L'event qui a trigger l'action. */ private void menuViewASTActionPerformed(java.awt.event.ActionEvent evt) { if (!termSavedList.isSelectionEmpty()) { int index = termSavedList.getSelectedIndex(); String term = saveTermModel.getElementAt(index); try { ANTLRInputStream inputStream = new ANTLRInputStream(term); LambdaLexer lexer = new LambdaLexer(inputStream); CommonTokenStream tokens = new CommonTokenStream(lexer); LambdaParser parser = new LambdaParser(tokens); ParseTree tree = parse(term); TreeViewer viewer = new TreeViewer(Arrays.asList(parser.getRuleNames()), tree); viewer.open(); } catch (ParseCancellationException e) { workSpace.append("Don't try to watch AST of illformed term please"); } } }
/** * Performs the first stage of assembling a program. But stops once it * determines whether the program is valid or not. This is useful when only * the validity of the program needs to be known. * @param input the program string to assemble * @return any problems with the program (empty list if program valid) */ public static List<Problem> checkForProblems(String input) { StoreProblemLogger log = new StoreProblemLogger(); input += '\n'; // to parse correctly, must end with a newline SimpLexer lexer = new SimpLexer(new ANTLRInputStream(input)); SimpParser parser = new SimpParser(new CommonTokenStream(lexer)); // prevent outputting to the console lexer.removeErrorListeners(); parser.removeErrorListeners(); // try to parse a program from the input SimpParser.ProgramContext tree = parser.program(); ProgramExtractor extractor = new ProgramExtractor(log); ParseTreeWalker.DEFAULT.walk(extractor, tree); return log.getProblems(); }
private void compileFiles(List<RawFile> files, OOPSourceCodeModel srcModel, List<String> projectFileTypes) { for (RawFile file : files) { try { CharStream charStream = new ANTLRInputStream(file.content()); GolangLexer lexer = new GolangLexer(charStream); TokenStream tokens = new CommonTokenStream(lexer); GolangParser parser = new GolangParser(tokens); SourceFileContext sourceFileContext = parser.sourceFile(); parser.setErrorHandler(new BailErrorStrategy()); parser.getInterpreter().setPredictionMode(PredictionMode.SLL); ParseTreeWalker walker = new ParseTreeWalker(); GolangBaseListener listener = new GoLangTreeListener(srcModel, projectFileTypes, file); walker.walk(listener, sourceFileContext); } catch (Exception e) { e.printStackTrace(); } } }
private SyntaxErrorListener getSyntaxTreeErrors(String iFlowResource) { InputStream inputStream = null; try { File file = new File(getClass().getResource(iFlowResource).getFile()); inputStream = new FileInputStream(file); CharStream cs = new ANTLRInputStream(inputStream); TokenStream tokenStream = new CommonTokenStream(new WUMLLexer(cs)); WUMLParser parser = new WUMLParser(tokenStream); SyntaxErrorListener errorListener = new SyntaxErrorListener(); parser.addErrorListener(errorListener); ParseTree tree = parser.sourceFile(); return errorListener; } catch (Exception e) { return null; } }
/** * <p>Create an Elasticsearch query for the given search string but does * not apply the {@link ElasticsearchQueryOptimizer} to it.</p> * @param search the search string * @return the Elasticsearch query (may be null) */ protected JsonObject compileQueryNoOptimize(String search) { if (search == null || search.isEmpty()) { // match everything by default return matchAllQuery(); } // parse query QueryLexer lexer = new QueryLexer(new ANTLRInputStream(search.trim())); CommonTokenStream tokens = new CommonTokenStream(lexer); QueryParser parser = new QueryParser(tokens); QueryContext ctx = parser.query(); // compile query to QueryBuilder QueryCompilerListener listener = new QueryCompilerListener(); ParseTreeWalker.DEFAULT.walk(listener, ctx); if (listener.result.isEmpty()) { return matchAllQuery(); } return listener.result.pop(); }
private ScimFilterParser getParser(String filter) throws Exception { log.info(" getParser() "); // Get lexer ANTLRInputStream input = new ANTLRInputStream(filter); ScimFilterLexer lexer = new ScimFilterLexer(input); // Get list of matched tokens CommonTokenStream tokens = new CommonTokenStream(lexer); // Pass tokens to the parser ScimFilterParser parser = new ScimFilterParser(tokens); parser.setBuildParseTree(true); parser.setTrimParseTree(true); parser.setProfile(true); parser.removeErrorListeners(); parser.setErrorHandler(new ScimFilterErrorHandler()); return parser; }
private static CommandResult executeCommand(String command, Transputer[] transputers, PrintWriter output, PrintWriter errOutput) { DebuggerCommandLexer commandLexer = new DebuggerCommandLexer(new ANTLRInputStream(command)); commandLexer.removeErrorListener(ConsoleErrorListener.INSTANCE); CommonTokenStream tokenStream = new CommonTokenStream(commandLexer); ErrorListener errorListener = new ErrorListener(); DebuggerCommandParser commandParser = new DebuggerCommandParser(tokenStream); commandParser.addErrorListener(errorListener); commandParser.removeErrorListener(ConsoleErrorListener.INSTANCE); ParseTree commandTree = commandParser.command(); if (errorListener.errors != 0) { output.println("Command not recognised."); output.flush(); return CommandResult.NOT_RECOGNISED; } CommandExecutor executor = new CommandExecutor(transputers, output, errOutput); return executor.visit(commandTree); }
public ParseTree makeParseTree(InputStream inputStream, Function<OccamParser, ParseTree> startRule, ErrorListener errorListener) { try { ANTLRInputStream antlrInputStream = new ANTLRInputStream(inputStream); OccamLexer synLexer = new OccamLexer(antlrInputStream); CommonTokenStream tokenStream = new CommonTokenStream(synLexer); OccamParser occamParser = new OccamParser(tokenStream); occamParser.addErrorListener(errorListener); return startRule.apply(occamParser); } catch (Exception e) { System.out.println(e.getMessage()); return null; } }
public static Program parse(String source) { RankPLLexer lexer = new RankPLLexer(new ANTLRInputStream(source)); TokenStream tokens = new CommonTokenStream(lexer); RankPLParser parser = new RankPLParser(tokens); parser.setErrorHandler(new BailErrorStrategy()); ConcreteParser classVisitor = new ConcreteParser(); // Parse Program program = null; try { program = (Program) classVisitor.visit(parser.program()); } catch (ParseCancellationException e) { System.out.println("Syntax error"); lexer = new RankPLLexer(new ANTLRInputStream(source)); tokens = new CommonTokenStream(lexer); parser = new RankPLParser(tokens); classVisitor = new ConcreteParser(); try { program = (Program) classVisitor.visit(parser.program()); } catch (Exception ex) { // Ignore } return null; } return program; }
/** * Parse the given Thrift {@code text}, using the given {@code location} * to anchor parsed elements withing the file. * @param location the {@link Location} of the data being parsed. * @param text the text to be parsed. * @param reporter an {@link ErrorReporter} to collect warnings. * @return a representation of the parsed Thrift data. */ public static ThriftFileElement parse(Location location, String text, ErrorReporter reporter) { ANTLRInputStream charStream = new ANTLRInputStream(text); AntlrThriftLexer lexer = new AntlrThriftLexer(charStream); CommonTokenStream tokenStream = new CommonTokenStream(lexer); AntlrThriftParser antlrParser = new AntlrThriftParser(tokenStream); ThriftListener thriftListener = new ThriftListener(tokenStream, reporter, location); ParseTreeWalker walker = new ParseTreeWalker(); walker.walk(thriftListener, antlrParser.document()); if (reporter.hasError()) { String errorReports = Joiner.on('\n').join(reporter.formattedReports()); String message = String.format(Locale.US, "Syntax errors in %s:\n%s", location, errorReports); throw new IllegalStateException(message); } return thriftListener.buildFileElement(); }
@Override public Sequence eval(final Sequence[] args, final Sequence contextSequence) throws XPathException { final ANTLRInputStream is = new ANTLRInputStream(args[0].getStringValue()); final CorpusQLLexer lexer = new CorpusQLLexer(is); final CommonTokenStream tokens = new CommonTokenStream(lexer); final CorpusQLParser parser = new CorpusQLParser(tokens); final ParseTree tree = parser.query(); final MemTreeBuilder builder = new MemTreeBuilder(); builder.startDocument(); final CorpusQLXMLVisitor xmlVisitor = new CorpusQLXMLVisitor(builder); xmlVisitor.visit(tree); builder.endDocument(); return builder.getDocument(); }
public Boolean validateJSON() { ANTLRInputStream input = new ANTLRInputStream(json_template); JSONLexer lexer = new JSONLexer(input); CommonTokenStream tokens = new CommonTokenStream(lexer); JSONParser parser = new JSONParser(tokens); BuildJsonConfigErrorListener errorListener = new BuildJsonConfigErrorListener(); parser.removeErrorListeners(); parser.addErrorListener(errorListener); ParseTree tree = parser.json(); BuildJsonParseTreeConfigVisitor eval = new BuildJsonParseTreeConfigVisitor(inputSchema); eval.visit(tree); return true; }
public SGFReader(String sgfString) { ANTLRInputStream inputStream = new ANTLRInputStream(sgfString); SGFLexer lexer = new SGFLexer(inputStream); CommonTokenStream tokens = new CommonTokenStream(lexer); parser = new SGFParser(tokens); visitor = new SGFVisitor(); }
@Before public void commonSetup() { ANTLRInputStream inputStream = new ANTLRInputStream(""); sgfLexer = new SGFLexer(inputStream); CommonTokenStream tokenStream = new CommonTokenStream(sgfLexer); parser = new SGFParser(tokenStream); StringWriter errorWriter = new StringWriter(); errorListener = new SGFErrorListener(errorWriter); parser.removeErrorListeners(); parser.addErrorListener(errorListener); visitor = new SGFVisitor(); }
/** * Parses the expression using antlr, aborts the parsing on the first error. * * @param expression * an expression, not {@code null} * @return a {@link ParseTree} for further processing * @throws ExpressionException */ ParseTree parseExpression(String expression) throws ExpressionException { if (expression == null) { throw new IllegalArgumentException("expression must not be null"); } ANTLRInputStream in = new ANTLRInputStream(expression); FunctionExpressionLexer lexer = new CapitulatingFunctionExpressionLexer(in); CommonTokenStream tokens = new CommonTokenStream(lexer); FunctionExpressionParser parser = new FunctionExpressionParser(tokens); parser.removeErrorListeners(); lexer.removeErrorListeners(); ExpressionErrorListener listener = new ExpressionErrorListener(); parser.addErrorListener(listener); lexer.addErrorListener(listener); parser.setErrorHandler(new CapitulatingErrorStrategy()); try { ParseTree tree = parser.operationExp(); if (listener.containsError()) { throw new ExpressionException(listener.getErrorMessage(), listener.getErrorLine()); } else { return tree; } } catch (CapitulatingRuntimeException e) { if (listener.containsError()) { throw new ExpressionException(listener.getErrorMessage(), listener.getErrorLine()); } else { // cannot happen since the parser and lexer always register the error before trying // to recover throw new ExpressionException("Unknown error"); } } }
/** * Constructor * @param styledDocument the StyledDocument instance to analyse */ public BooleanExpANTLRHandler(StyledDocument styledDocument) { try { this.styledDocument = styledDocument; lexer = new FormalPropertyDescriptionLexer(new ANTLRInputStream( styledDocument.getText(0, styledDocument.getLength()))); CommonTokenStream ts = new CommonTokenStream(lexer); parser = new FormalPropertyDescriptionParser(ts); } catch (BadLocationException ex) { Logger.getLogger(BooleanExpANTLRHandler.class.getName()).log(Level.SEVERE, null, ex); } }
/** * Method that parses the current input of a BooleanExpCodeArea and returns a * FormalPropertyDescriptionParser.BooleanExpListContext object which can then be used for building an AST * out of the input. * @return a BooleanExpListContext node from the ANTLR generated ParseTree. */ public FormalPropertyDescriptionParser.BooleanExpListContext getParseTree() { String text = null; try { text = styledDocument.getText(0, styledDocument.getLength()); } catch (BadLocationException e) { e.printStackTrace(); } lexer.setInputStream(new ANTLRInputStream(text)); CommonTokenStream ts = new CommonTokenStream(lexer); parser.setTokenStream(ts); return parser.booleanExpList(); }