@Override public void exitNonReserved(SqlBaseParser.NonReservedContext context) { // we can't modify the tree during rule enter/exit event handling unless we're dealing with a terminal. // Otherwise, ANTLR gets confused an fires spurious notifications. if (!(context.getChild(0) instanceof TerminalNode)) { int rule = ((ParserRuleContext) context.getChild(0)).getRuleIndex(); throw new AssertionError("nonReserved can only contain tokens. Found nested rule: " + ruleNames.get(rule)); } // replace nonReserved words with IDENT tokens context.getParent().removeLastChild(); Token token = (Token) context.getChild(0).getPayload(); context.getParent().addChild(new CommonToken( new Pair<>(token.getTokenSource(), token.getInputStream()), SqlBaseLexer.IDENTIFIER, token.getChannel(), token.getStartIndex(), token.getStopIndex())); }
/** * Cloning expression to create new same expression. */ public static ExprContext cloneExprContext(final ExprContext expr) { final ExprContext clone = createContextType(expr); clone.copyFrom(expr); for (final ParseTree child : expr.children) { if (child instanceof TerminalNode) { clone.addChild(new TerminalNodeImpl(((TerminalNode) child).getSymbol())); } else if (child instanceof ExprContext) { final ExprContext cloneChild = cloneExprContext((ExprContext) child); clone.addChild(cloneChild); setLeftRight(clone, cloneChild); } else if (child instanceof Token) { clone.addChild(new CommonToken((Token) child)); } } return clone; }
/** * Creating conjunction expression. */ public static ConjunctionContext createConjunctionContext(final ExprContext leftContext, final ExprContext rightContext) { final ConjunctionContext conjunctionContext = new ConjunctionContext(new ExprContext()); final TerminalNodeImpl andNode = new TerminalNodeImpl(new CommonToken(10, "and")); // Setting context parents. leftContext.parent = conjunctionContext; andNode.parent = conjunctionContext; rightContext.parent = conjunctionContext; conjunctionContext.left = leftContext; conjunctionContext.right = rightContext; // Adding conjunction expression's children. conjunctionContext.addChild(leftContext); conjunctionContext.addChild(andNode); conjunctionContext.addChild(rightContext); return conjunctionContext; }
/** * Creating disjunction expression. */ public static DisjunctionContext createDisjunctionContext(final ExprContext leftContext, final ExprContext rightContext) { final DisjunctionContext disjunctionContext = new DisjunctionContext(new ExprContext()); final TerminalNodeImpl orNode = new TerminalNodeImpl(new CommonToken(12, "or")); // Setting context parents. leftContext.parent = disjunctionContext; rightContext.parent = disjunctionContext; orNode.parent = disjunctionContext; disjunctionContext.left = leftContext; disjunctionContext.right = rightContext; // Adding disjunction expression's children. disjunctionContext.addChild(leftContext); disjunctionContext.addChild(orNode); disjunctionContext.addChild(rightContext); return disjunctionContext; }
/** * Creating negation expression. */ public static NegationContext createNegationContext(final ExprContext expr) { final NegationContext negationContext = new NegationContext(new ExprContext()); final TerminalNodeImpl notNode = new TerminalNodeImpl(new CommonToken(FOLParser.NOT, "not")); // Setting context parents. notNode.parent = negationContext; expr.parent = negationContext; // Adding negation expression's children. negationContext.addChild(notNode); negationContext.addChild(expr); return negationContext; }
/** * Creating parentheses expression. */ public static ParenthesesContext createParenthesesContext(final ExprContext expr) { final ParenthesesContext parenthesesContext = new ParenthesesContext(new ExprContext()); final TerminalNodeImpl leftParenthes = new TerminalNodeImpl(new CommonToken(FOLParser.LP, "(")); final TerminalNodeImpl rightParenthes = new TerminalNodeImpl(new CommonToken(FOLParser.RP, ")")); // Setting context parents. leftParenthes.parent = parenthesesContext; rightParenthes.parent = parenthesesContext; expr.parent = parenthesesContext; // Adding parentheses expression's children. parenthesesContext.addChild(leftParenthes); parenthesesContext.addChild(expr); parenthesesContext.addChild(rightParenthes); return parenthesesContext; }
@Test public void evaluate_returns_result_for_valid_CEF_string() throws Exception { final Map<String, Expression> arguments = ImmutableMap.of( CEFParserFunction.VALUE, new StringExpression(new CommonToken(0), "CEF:0|vendor|product|1.0|id|name|low|dvc=example.com msg=Foobar"), CEFParserFunction.USE_FULL_NAMES, new BooleanExpression(new CommonToken(0), false) ); final FunctionArgs functionArgs = new FunctionArgs(function, arguments); final Message message = new Message("__dummy", "__dummy", DateTime.parse("2010-07-30T16:03:25Z")); final EvaluationContext evaluationContext = new EvaluationContext(message); final CEFParserResult result = function.evaluate(functionArgs, evaluationContext); assertNotNull(result); assertEquals(0, result.get("cef_version")); assertEquals("vendor", result.get("device_vendor")); assertEquals("product", result.get("device_product")); assertEquals("1.0", result.get("device_version")); assertEquals("id", result.get("device_event_class_id")); assertEquals("low", result.get("severity")); assertEquals("example.com", result.get("dvc")); assertEquals("Foobar", result.get("msg")); }
@Test public void evaluate_returns_result_for_valid_CEF_string_with_short_names_if_useFullNames_parameter_is_missing() throws Exception { final Map<String, Expression> arguments = Collections.singletonMap( CEFParserFunction.VALUE, new StringExpression(new CommonToken(0), "CEF:0|vendor|product|1.0|id|name|low|dvc=example.com msg=Foobar") ); final FunctionArgs functionArgs = new FunctionArgs(function, arguments); final Message message = new Message("__dummy", "__dummy", DateTime.parse("2010-07-30T16:03:25Z")); final EvaluationContext evaluationContext = new EvaluationContext(message); final CEFParserResult result = function.evaluate(functionArgs, evaluationContext); assertNotNull(result); assertEquals(0, result.get("cef_version")); assertEquals("vendor", result.get("device_vendor")); assertEquals("product", result.get("device_product")); assertEquals("1.0", result.get("device_version")); assertEquals("id", result.get("device_event_class_id")); assertEquals("low", result.get("severity")); assertEquals("example.com", result.get("dvc")); assertEquals("Foobar", result.get("msg")); }
@Test public void evaluate_returns_result_for_valid_CEF_string_with_full_names() throws Exception { final CEFParserFunction function = new CEFParserFunction(new MetricRegistry()); final Map<String, Expression> arguments = ImmutableMap.of( CEFParserFunction.VALUE, new StringExpression(new CommonToken(0), "CEF:0|vendor|product|1.0|id|name|low|dvc=example.com msg=Foobar"), CEFParserFunction.USE_FULL_NAMES, new BooleanExpression(new CommonToken(0), true) ); final FunctionArgs functionArgs = new FunctionArgs(function, arguments); final Message message = new Message("__dummy", "__dummy", DateTime.parse("2010-07-30T16:03:25Z")); final EvaluationContext evaluationContext = new EvaluationContext(message); final CEFParserResult result = function.evaluate(functionArgs, evaluationContext); assertNotNull(result); assertEquals(0, result.get("cef_version")); assertEquals("vendor", result.get("device_vendor")); assertEquals("product", result.get("device_product")); assertEquals("1.0", result.get("device_version")); assertEquals("id", result.get("device_event_class_id")); assertEquals("low", result.get("severity")); assertEquals("example.com", result.get("deviceAddress")); assertEquals("Foobar", result.get("message")); }
@Test public void evaluate_returns_result_without_message_field() throws Exception { final Map<String, Expression> arguments = ImmutableMap.of( CEFParserFunction.VALUE, new StringExpression(new CommonToken(0), "CEF:0|vendor|product|1.0|id|name|low|dvc=example.com"), CEFParserFunction.USE_FULL_NAMES, new BooleanExpression(new CommonToken(0), false) ); final FunctionArgs functionArgs = new FunctionArgs(function, arguments); final Message message = new Message("__dummy", "__dummy", DateTime.parse("2010-07-30T16:03:25Z")); final EvaluationContext evaluationContext = new EvaluationContext(message); final CEFParserResult result = function.evaluate(functionArgs, evaluationContext); assertNotNull(result); assertEquals(0, result.get("cef_version")); assertEquals("vendor", result.get("device_vendor")); assertEquals("product", result.get("device_product")); assertEquals("1.0", result.get("device_version")); assertEquals("id", result.get("device_event_class_id")); assertEquals("low", result.get("severity")); assertEquals("example.com", result.get("dvc")); assertFalse(result.containsKey("message")); }
public static void wipeCharPositionInfoAndWhitespaceTokens(CodeBuffTokenStream tokens) { tokens.fill(); CommonToken dummy = new CommonToken(Token.INVALID_TYPE, ""); dummy.setChannel(Token.HIDDEN_CHANNEL); Token firstRealToken = tokens.getNextRealToken(-1); for (int i = 0; i<tokens.size(); i++) { if ( i==firstRealToken.getTokenIndex() ) continue; // don't wack first token CommonToken t = (CommonToken)tokens.get(i); if ( t.getText().matches("\\s+") ) { tokens.getTokens().set(i, dummy); // wack whitespace token so we can't use it during prediction } else { t.setLine(0); t.setCharPositionInLine(-1); } } }
public MplProgram assemble(File programFile) throws IOException { programFile = getCanonicalFile(programFile); // Don't cache the first interpreter, because it's program is mutable and will be changed MplInterpreter main = MplInterpreter.interpret(programFile, context); MplProgram program = main.getProgram(); programBuilder = new MplProgramBuilder(program, programFile); resolveReferences(main.getReferences().values()); if (!program.isScript()) { doIncludes(); } MplProgram result = programBuilder.getProgram(); boolean containsRemoteProcess = result.getProcesses().stream()// .anyMatch(p -> p.getType() == ProcessType.REMOTE); if (context.getErrors().isEmpty() && !containsRemoteProcess) { context.addError( new CompilerException(new MplSource(programFile, "", new CommonToken(MplLexer.PROCESS)), "This file does not include any remote processes")); } return result; }
@Override public void syntaxError(Recognizer<?, ?> recognizer, Object offendingSymbol, int line, int charPositionInLine, String msg, RecognitionException e) { if ( offendingSymbol==null ) { final Lexer lexer = (Lexer) recognizer; int i = lexer.getCharIndex(); final int n = lexer.getInputStream().size(); if (i >= n) { i = n - 1; } final String text = lexer.getInputStream().getText(new Interval(i, i)); CommonToken t = (CommonToken) lexer.getTokenFactory().create(Token.INVALID_TYPE, text); t.setStartIndex(i); t.setStopIndex(i); t.setLine(line); t.setCharPositionInLine(charPositionInLine); offendingSymbol = t; } // System.out.println("lex error: " + offendingSymbol); issues.add(new Issue(msg, (Token)offendingSymbol)); }
/** * @see BaseErrorListener#reportAmbiguity */ @Override public void syntaxError(Recognizer<?, ?> recognizer, Object offendingSymbol, int line, int charPositionInLine, String msg, RecognitionException e) { List<String> stack = ((Parser) recognizer).getRuleInvocationStack(); Collections.reverse(stack); String logMsg = "Parser ERROR: line " + line + ":" + charPositionInLine + " at " + offendingSymbol + ": " + msg; CommonToken tok = (CommonToken) offendingSymbol; String s = tok.getText(); logMsg += ": offending token " + s; if (s.equals("<EOF>")) { logMsg += ". Look for tag=(null or empty)."; } else { try { Integer.parseInt(s); } catch (NumberFormatException ex) { logMsg += " not a number. "; } } FixRulesParserErrorListener.logger.error(logMsg + " Tree = {}", stack); throw new RuntimeException(logMsg); }
@Test @Ignore public void testAllRpgSources() throws Exception { List<String> errors = new ArrayList<String>(); List<String> files = new ArrayList<String>(); //File dir = new File(getResourcePath("./")); File dir = new File("c:\\temp\\rpg\\all"); int count=0; for (File file : dir.listFiles()) { if (isRpgSourceFile(file)) { String rpgsource = TestUtils.loadFile(file); rpgsource = TestUtils.padSourceLines(rpgsource, false); List<CommonToken> tokenList = TestUtils.getParsedTokens(rpgsource, errors); if (errors.size() > 0) { System.out.println("The failing file is :" + file.getName()); if(count++>10) break; errors.clear(); } } } assertThat(errors, is(empty())); }
@Test public void testCSpec() { String inputString = " C NL9FACTOR1 XXAL FACTOR2\r\n"; inputString = TestUtils.padSourceLines(inputString, false); // TestUtils.printTokens(inputString, false); // TestUtils.showParseTree(inputString, false); List<String> errors = new ArrayList<String>(); List<CommonToken> tokenList = TestUtils.getParsedTokens(inputString, errors); assertThat(errors, is(empty())); assertEquals(RpgLexer.CS_FactorContent, tokenList.get(4).getType()); assertEquals(RpgLexer.CS_OperationAndExtender, tokenList.get(5).getType()); assertEquals(RpgLexer.CS_FactorContent, tokenList.get(6).getType()); assertEquals("FACTOR1", tokenList.get(4).getText()); assertEquals("XXAL", tokenList.get(5).getText()); assertEquals("FACTOR2", tokenList.get(6).getText()); }
@Override public TransformExpressionTree compileToExpressionTree(String expression) { CharStream charStream = new ANTLRInputStream(expression); PQL2Lexer lexer = new PQL2Lexer(charStream); lexer.setTokenFactory(new CommonTokenFactory(true)); TokenStream tokenStream = new UnbufferedTokenStream<CommonToken>(lexer); PQL2Parser parser = new PQL2Parser(tokenStream); parser.setErrorHandler(new BailErrorStrategy()); // Parse ParseTree parseTree = parser.expression(); ParseTreeWalker walker = new ParseTreeWalker(); Pql2AstListener listener = new Pql2AstListener(expression, _splitInClause); walker.walk(listener, parseTree); final AstNode rootNode = listener.getRootNode(); return TransformExpressionTree.buildTree(rootNode); }
private Token createToken(int tokenType, Token copyFrom) { String tokenTypeStr; if (tokenType == nlToken) { tokenTypeStr = "newline"; } else if (tokenType == indentToken) { tokenTypeStr = "indent"; } else if (tokenType == dedentToken) { tokenTypeStr = "dedent"; } else { tokenTypeStr = null; } CommonToken r = new InjectedToken(copyFrom, tokenTypeStr); r.setType(tokenType); return r; }
public void setIndexOfLastToken(int indexOfLastToken) { System.out.println("setIndexOfLastToken("+indexOfLastToken+")"); if ( indexOfLastToken<0 ) { System.out.println("replacing "+saveToken.getTokenIndex()+" with "+saveToken); tokens.set(saveToken.getTokenIndex(), saveToken); // this.indexOfLastToken = indexOfLastToken; return; } int i = indexOfLastToken + 1; // we want to keep token at indexOfLastToken sync(i); saveToken = tokens.get(i); System.out.println("saving "+saveToken); CommonToken stopToken = new CommonToken(saveToken); stopToken.setType(STOP_TOKEN_TYPE); System.out.println("setting "+i+" to "+stopToken); tokens.set(i, stopToken); // this.indexOfLastToken = indexOfLastToken; }
public static Token nextRealToken(CommonTokenStream tokens, int i) { int n = tokens.size(); i++; // search after current i token if ( i>=n || i<0 ) return null; Token t = tokens.get(i); while ( t.getChannel()==Token.HIDDEN_CHANNEL ) { if ( t.getType()==Token.EOF ) { TokenSource tokenSource = tokens.getTokenSource(); if ( tokenSource==null ) { return new CommonToken(Token.EOF, "EOF"); } TokenFactory<?> tokenFactory = tokenSource.getTokenFactory(); if ( tokenFactory==null ) { return new CommonToken(Token.EOF, "EOF"); } return tokenFactory.create(Token.EOF, "EOF"); } i++; if ( i>=n ) return null; // just in case no EOF t = tokens.get(i); } return t; }
public static Token getTokenUnderCursor(CommonTokenStream tokens, int offset) { Comparator<Token> cmp = new Comparator<Token>() { @Override public int compare(Token a, Token b) { if ( a.getStopIndex() < b.getStartIndex() ) return -1; if ( a.getStartIndex() > b.getStopIndex() ) return 1; return 0; } }; if ( offset<0 || offset >= tokens.getTokenSource().getInputStream().size() ) return null; CommonToken key = new CommonToken(Token.INVALID_TYPE, ""); key.setStartIndex(offset); key.setStopIndex(offset); List<Token> tokenList = tokens.getTokens(); Token tokenUnderCursor = null; int i = Collections.binarySearch(tokenList, key, cmp); if ( i>=0 ) tokenUnderCursor = tokenList.get(i); return tokenUnderCursor; }
@Override public void syntaxError(Recognizer<?, ?> recognizer, Object offendingSymbol, int line, int charPositionInLine, String msg, RecognitionException e) { output.write(msg); output.write("\n"); if ( offendingSymbol instanceof org.antlr.v4.runtime.CommonToken ) symbol = ((CommonToken) offendingSymbol).getText(); }
public void addError(final String messagePattern, final Object... arguments) { final String message = MessageFormat.format(messagePattern, arguments); final Optional<RamlToken> optionalToken = Stream.of(arguments) .filter(RamlToken.class::isInstance) .map(RamlToken.class::cast) .findFirst(); final int line = optionalToken.map(CommonToken::getLine).orElse(-1); final int column = optionalToken.map(CommonToken::getCharPositionInLine).orElse(-1); final String location = optionalToken.map(RamlToken::getLocation).orElse("<UNKNOWN>"); resource.getErrors() .add(RamlDiagnostic.of(message, location, line, column)); }
/** * Extract the token list from the Cypher input. Uses ANTLR classes to perform this. Some tokens * are excluded, such as EOF and semi colons. * * @param cyp Cypher input as text. * @param DEBUG_PRINT Print out debug statements or not. * @return A list of tokens as deciphered by the ANTLR classes, based on the openCypher grammar. */ public static ArrayList<String> getTokenList(String cyp, boolean DEBUG_PRINT) { CypherLexer lexer = new CypherLexer(new ANTLRInputStream(cyp)); CommonTokenStream tokens = new CommonTokenStream(lexer); tokens.fill(); CypherParser parser = new CypherParser(tokens); // dangerous - comment out if something is going wrong. parser.removeErrorListeners(); ParseTree tree = parser.cypher(); ParseTreeWalker walker = new ParseTreeWalker(); cypherWalker = null; cypherWalker = new CypherWalker(); walker.walk(cypherWalker, tree); if (DEBUG_PRINT) cypherWalker.printInformation(); ArrayList<String> tokenList = new ArrayList<>(); for (Object t : tokens.getTokens()) { CommonToken tok = (CommonToken) t; String s = tok.getText().toLowerCase(); // exclude some tokens from the list of tokens. This includes the EOF pointer, // semi-colons, and alias artifacts. if (!" ".equals(s) && !"<eof>".equals(s) && !";".equals(s) && !"as".equals(s) && !cypherWalker.getAlias().contains(s)) { tokenList.add(s); } } return tokenList; }
private void unQuoteToken(Token token) { if (token instanceof CommonToken) { CommonToken commonToken = (CommonToken) token; commonToken.setStartIndex(commonToken.getStartIndex() + 1); commonToken.setStopIndex(commonToken.getStopIndex() - 1); } }
/** * Parses while matching an AQL expression. * * @param expression * the expression to parse * @return the corresponding {@link AstResult} */ private AstResult parseWhileAqlExpression(String expression) { final IQueryBuilderEngine.AstResult result; if (expression != null && expression.length() > 0) { AstBuilderListener astBuilder = AQL56Compatibility.createAstBuilderListener(queryEnvironment); CharStream input = new UnbufferedCharStream(new StringReader(expression), expression.length()); QueryLexer lexer = new QueryLexer(input); lexer.setTokenFactory(new CommonTokenFactory(true)); lexer.removeErrorListeners(); lexer.addErrorListener(astBuilder.getErrorListener()); TokenStream tokens = new UnbufferedTokenStream<CommonToken>(lexer); QueryParser parser = new QueryParser(tokens); parser.addParseListener(astBuilder); parser.removeErrorListeners(); parser.addErrorListener(astBuilder.getErrorListener()); // parser.setTrace(true); parser.expression(); result = astBuilder.getAstResult(); } else { ErrorExpression errorExpression = (ErrorExpression) EcoreUtil .create(AstPackage.eINSTANCE.getErrorExpression()); List<org.eclipse.acceleo.query.ast.Error> errors = new ArrayList<org.eclipse.acceleo.query.ast.Error>(1); errors.add(errorExpression); final Map<Object, Integer> positions = new HashMap<Object, Integer>(); if (expression != null) { positions.put(errorExpression, Integer.valueOf(0)); } final BasicDiagnostic diagnostic = new BasicDiagnostic(); diagnostic.add(new BasicDiagnostic(Diagnostic.ERROR, AstBuilderListener.PLUGIN_ID, 0, "null or empty string.", new Object[] {errorExpression })); result = new AstResult(errorExpression, positions, positions, errors, diagnostic); } return result; }
/** * Parses while matching an AQL expression. * * @param queryEnvironment * the {@link IReadOnlyQueryEnvironment} * @param type * the type to parse * @return the corresponding {@link AstResult} */ private AstResult parseWhileAqlTypeLiteral(IReadOnlyQueryEnvironment queryEnvironment, String type) { final IQueryBuilderEngine.AstResult result; if (type != null && type.length() > 0) { AstBuilderListener astBuilder = AQL56Compatibility .createAstBuilderListener((IQueryEnvironment) queryEnvironment); CharStream input = new UnbufferedCharStream(new StringReader(type), type.length()); QueryLexer lexer = new QueryLexer(input); lexer.setTokenFactory(new CommonTokenFactory(true)); lexer.removeErrorListeners(); lexer.addErrorListener(astBuilder.getErrorListener()); TokenStream tokens = new UnbufferedTokenStream<CommonToken>(lexer); QueryParser parser = new QueryParser(tokens); parser.addParseListener(astBuilder); parser.removeErrorListeners(); parser.addErrorListener(astBuilder.getErrorListener()); // parser.setTrace(true); parser.typeLiteral(); result = astBuilder.getAstResult(); } else { ErrorTypeLiteral errorTypeLiteral = (ErrorTypeLiteral) EcoreUtil .create(AstPackage.eINSTANCE.getErrorTypeLiteral()); List<org.eclipse.acceleo.query.ast.Error> errors = new ArrayList<org.eclipse.acceleo.query.ast.Error>(1); errors.add(errorTypeLiteral); final Map<Object, Integer> positions = new HashMap<Object, Integer>(); if (type != null) { positions.put(errorTypeLiteral, Integer.valueOf(0)); } final BasicDiagnostic diagnostic = new BasicDiagnostic(); diagnostic.add(new BasicDiagnostic(Diagnostic.ERROR, AstBuilderListener.PLUGIN_ID, 0, "null or empty type.", new Object[] {errorTypeLiteral })); result = new AstResult(errorTypeLiteral, positions, positions, errors, diagnostic); } return result; }
@Test public void evaluate_returns_null_for_empty_CEF_string() throws Exception { final Map<String, Expression> arguments = Collections.singletonMap( CEFParserFunction.VALUE, new StringExpression(new CommonToken(0), "") ); final FunctionArgs functionArgs = new FunctionArgs(function, arguments); final Message message = new Message("__dummy", "__dummy", DateTime.parse("2010-07-30T16:03:25Z")); final EvaluationContext evaluationContext = new EvaluationContext(message); final CEFParserResult result = function.evaluate(functionArgs, evaluationContext); assertNull(result); }
@Test public void evaluate_returns_null_for_invalid_CEF_string() throws Exception { final Map<String, Expression> arguments = ImmutableMap.of( CEFParserFunction.VALUE, new StringExpression(new CommonToken(0), "CEF:0|Foobar"), CEFParserFunction.USE_FULL_NAMES, new BooleanExpression(new CommonToken(0), false) ); final FunctionArgs functionArgs = new FunctionArgs(function, arguments); final Message message = new Message("__dummy", "__dummy", DateTime.parse("2010-07-30T16:03:25Z")); final EvaluationContext evaluationContext = new EvaluationContext(message); final CEFParserResult result = function.evaluate(functionArgs, evaluationContext); assertNull(result); }
public CodeBuffTokenStream(CommonTokenStream stream) { super(stream.getTokenSource()); this.fetchedEOF = false; for (Token t : stream.getTokens()) { tokens.add(new CommonToken(t)); } reset(); }
private static RuleContext createColumnName_(Expr_functionContext rule, OutputField field) { Column_name_Context column_name_ = new Column_name_Context(rule.getParent(), rule.invokingState); Column_nameContext column_name = new Column_nameContext(column_name_.getParent(), rule.invokingState); IdentifierContext identifier = new IdentifierContext(column_name, rule.invokingState); CommonToken token = CommonTokenFactory.DEFAULT.create( MysqlParser.BACKTICK_QUOTED_IDENTIFIER, '`' + field.name + '`' ); TerminalNode term = new TerminalNodeImpl(token); identifier.addChild(term); column_name.addChild(identifier); column_name_.addChild(column_name); return column_name_; }
private static ParseTree parseSQL(String sql) { CharStream charStream = new ANTLRInputStream(sql); RQLLexer lexer = new RQLLexer(charStream); lexer.setTokenFactory(new CommonTokenFactory(true)); TokenStream tokenStream = new UnbufferedTokenStream<CommonToken>(lexer); RQLParser parser = new RQLParser(tokenStream); parser.setErrorHandler(new BailErrorStrategy()); return parser.root(); }
@Override public void exitScriptFile(ScriptFileContext ctx) { process = new MplProcess(toSource(new CommonToken(MplLexer.PROCESS))); process.setChainParts(chainBuffer.getChainParts()); program.addProcess(process); process = null; popChainBuffer(); }
protected MplSourceBuilder $MplSource() { return new MplSourceBuilder()// .withFile(new File(""))// .withToken(new CommonToken(MplLexer.PROCESS))// .withLine($String())// ; }