@Override protected void reportNoViableAlternative(Parser recognizer, NoViableAltException e) { // change error message from default implementation TokenStream tokens = recognizer.getInputStream(); String input; if (tokens != null) { if (e.getStartToken().getType() == Token.EOF) { input = "the end"; } else { input = escapeWSAndQuote(tokens.getText(e.getStartToken(), e.getOffendingToken())); } } else { input = escapeWSAndQuote("<unknown input>"); } String msg = "inadmissible input at " + input; recognizer.notifyErrorListeners(e.getOffendingToken(), msg, e); }
@Override public void recover(final Parser recognizer, final RecognitionException re) { final Token token = re.getOffendingToken(); String message; if (token == null) { message = "no parse token found."; } else if (re instanceof InputMismatchException) { message = "unexpected token [" + getTokenErrorDisplay(token) + "]" + " was expecting one of [" + re.getExpectedTokens().toString(recognizer.getVocabulary()) + "]."; } else if (re instanceof NoViableAltException) { if (token.getType() == PainlessParser.EOF) { message = "unexpected end of script."; } else { message = "invalid sequence of tokens near [" + getTokenErrorDisplay(token) + "]."; } } else { message = "unexpected token near [" + getTokenErrorDisplay(token) + "]."; } Location location = new Location(sourceName, token == null ? -1 : token.getStartIndex()); throw location.createError(new IllegalArgumentException(message, re)); }
/** * Make sure we don't attempt to recover inline; if the parser successfully * recovers, it won't throw an exception. */ @Override public Token recoverInline(Parser recognizer) throws RecognitionException { InputMismatchException e = new InputMismatchException(recognizer); String policies = recognizer.getInputStream().getText(); StringTokenizer tk = new StringTokenizer(policies, ";"); String policy = ""; int idx = 0; while (tk.hasMoreElements()) { policy = (String) tk.nextElement(); idx += policy.length(); if (idx >= e.getOffendingToken().getStartIndex()) { break; } } String message = Messages.get(Messages.DEFAULT_LOCALE, "error_invalid_firewallconfig", new Object[] { e.getOffendingToken().getText(), policy }); throw new RuntimeException(message); }
private static <T extends Parser> T makeBasicParser(Class<T> parserClass, ANTLRInputStream stream, String parsedObjectName, List<AntlrError> errors) { Lexer lexer; Parser parser; if (parserClass.isAssignableFrom(SQLParser.class)) { lexer = new SQLLexer(stream); parser = new SQLParser(new CommonTokenStream(lexer)); } else if (parserClass.isAssignableFrom(IgnoreListParser.class)) { lexer = new IgnoreListLexer(stream); parser = new IgnoreListParser(new CommonTokenStream(lexer)); } else { throw new IllegalArgumentException("Unknown parser class: " + parserClass); } CustomAntlrErrorListener err = new CustomAntlrErrorListener(parsedObjectName, errors); lexer.removeErrorListeners(); lexer.addErrorListener(err); parser.removeErrorListeners(); parser.addErrorListener(err); return parserClass.cast(parser); }
public static <L extends Lexer, P extends Parser> P newParser( Function<CharStream, L> lexerFactory, Function<TokenStream, P> parserFactory, String input, boolean useBailErrorStrategy, boolean removeErrorListeners) { CharStream charStream = new ANTLRInputStream(input); L lexer = lexerFactory.apply(charStream); if (removeErrorListeners) { lexer.removeErrorListeners(); } TokenStream tokenStream = new CommonTokenStream(lexer); P parser = parserFactory.apply(tokenStream); if (useBailErrorStrategy) { parser.setErrorHandler(new BailErrorStrategy()); } if (removeErrorListeners) { parser.removeErrorListeners(); } return parser; }
@Override public void syntaxError(Recognizer<?, ?> recognizer, Object offendingSymbol, int line, int charPositionInLine, String msg, RecognitionException e) { List<String> stack = ((Parser)recognizer).getRuleInvocationStack(); Collections.reverse(stack); System.err.println("rule stack: "+stack); System.err.println("linea "+line+":"+charPositionInLine+" at "+ offendingSymbol+": "+msg); String rule = "rule stack: "+stack; String mensaje = "linea "+line+":"+charPositionInLine+" at "+ offendingSymbol+": "+msg + "\n\r"; agregarLog("Un error inesperado ha ocurrido " +"\n" + mensaje, line, charPositionInLine,true); }
public static AstBuilder createAstBuilder(CompilationUnit source,TokenStream tokens){ KalangParser p = new KalangParser(tokens); AstBuilder sp = new AstBuilder(source, p); p.setErrorHandler(new DefaultErrorStrategy() { @Override public void reportError(Parser recognizer, RecognitionException e) { String msg = AntlrErrorString.exceptionString(recognizer, e); Token end = e.getOffendingToken(); Token start; RuleContext ctx = e.getCtx(); if(ctx instanceof ParserRuleContext){ start = ((ParserRuleContext) ctx).getStart(); }else{ start = end; } sp.getDiagnosisReporter().report(Diagnosis.Kind.ERROR, msg,start,end); } }); return sp; }
@Override public KalangParser createParser(CompilationUnit compilationUnit, CommonTokenStream tokenStream) { KalangParser parser = new KalangParser(tokenStream); parser.setErrorHandler(new DefaultErrorStrategy(){ @Override public void reportError(Parser recognizer, RecognitionException e) { String msg = AntlrErrorString.exceptionString(recognizer, e); Diagnosis diagnosis = new Diagnosis( compilationUnit.getCompileContext() , Diagnosis.Kind.ERROR , OffsetRangeHelper.getOffsetRange(e.getOffendingToken()) , msg , compilationUnit.getSource() ); diagnosisHandler.handleDiagnosis(diagnosis); } }); return parser; }
public LangDescriptor(String name, String corpusDir, String fileRegex, Class<? extends Lexer> lexerClass, Class<? extends Parser> parserClass, String startRuleName, int indentSize, int singleLineCommentType) { this.name = name; this.corpusDir = corpusDir; this.fileRegex = fileRegex; this.lexerClass = lexerClass; this.parserClass = parserClass; this.startRuleName = startRuleName; this.indentSize = indentSize; this.singleLineCommentType = singleLineCommentType; }
public static Pair<Parser, Lexer> parsePHP(String filePath) { AntlrCaseInsensitiveFileStream input; try { input = new AntlrCaseInsensitiveFileStream(filePath); } catch (IOException e) { e.printStackTrace(); return null; } PHPLexer lexer = new PHPLexer(input); CommonTokenStream tokens = new CommonTokenStream(lexer); PHPParser parser = new InterruptablePHPParser(tokens, filePath); /* turn on prediction mode to speed up parsing */ parser.getInterpreter().setPredictionMode(PredictionMode.SLL); Pair<Parser, Lexer> retval = new Pair<Parser, Lexer>(parser, lexer); return retval; }
public static Document processFile(String filePath) { Pair<Parser, Lexer> pl = parsePHP(filePath); PHPParser parser = (PHPParser) pl.a; parser.setBuildParseTree(true); /* * htmlDocument is the start rule (the top-level rule) * for the PHP grammar */ ParserRuleContext tree = parser.htmlDocument(); List<String> ruleNames = Arrays.asList(parser.getRuleNames()); Map<Integer, String> invTokenMap = getInvTokenMap(parser); TokenStream tokenStream = parser.getTokenStream(); ParseTreeDOMSerializer ptSerializer = new ParseTreeDOMSerializer(ruleNames, invTokenMap, tokenStream); ParseTreeWalker.DEFAULT.walk(ptSerializer, tree); Document result= ptSerializer.getDOMDocument(); return result; }
public void reportError(Parser recognizer, RecognitionException e) { if (!this.inErrorRecoveryMode(recognizer)) { this.beginErrorCondition(recognizer); if (e instanceof NoViableAltException) { this.reportNoViableAlternative(recognizer, (NoViableAltException) e); } else if (e instanceof InputMismatchException) { this.reportInputMismatch(recognizer, (InputMismatchException) e); } else if (e instanceof FailedPredicateException) { this.reportFailedPredicate(recognizer, (FailedPredicateException) e); } else { System.err.println("unknown recognition error type: " + e.getClass().getName()); recognizer.notifyErrorListeners(e.getOffendingToken(), e.getMessage(), e); } } }
protected void reportNoViableAlternative(Parser recognizer, NoViableAltException e) { TokenStream tokens = recognizer.getInputStream(); String input; if (tokens != null) { if (e.getStartToken().getType() == -1) { input = "<EOF>"; } else { input = tokens.getText(e.getStartToken(), e.getOffendingToken()); } } else { input = "<unknown input>"; } String msg = "no viable alternative at input " + this.escapeWSAndQuote(input); recognizer.notifyErrorListeners(e.getOffendingToken(), msg, e); }
private String regStr(ParseTree tree, Parser p){ if(tree instanceof VariableContext){ return preProcessor.getVarName(tree); } else if(tree instanceof PredicateContext){ String functor = WAMTokenizer.predicateFunctor((PredicateContext)tree); if(tree.getChildCount()>1){ functor += "("; ParseTree parenthesized = tree.getChild(1); ParseTree argseq = parenthesized.getChild(1); int nrArgs = ((argseq.getChildCount()-1)/2) + 1; for(int j = 0; j < nrArgs; j++){ Integer child = getPrimeRegister(argseq.getChild(j*2).getChild(0)); String childReg = child<0?("Y"+(child*-1)):"X"+(child+1); functor += childReg+(j!=(nrArgs-1)?",":""); } functor += ")"; } return functor; } return " error "; }
/** * {@inheritDoc} */ @Override public void reportNoViableAlternative(@NotNull Parser recognizer, @NotNull NoViableAltException e) { TokenStream tokens = recognizer.getInputStream(); String input; if (tokens instanceof TokenStream) { if (e.getStartToken().getType() == Token.EOF) input = "<EOF>"; else input = getText(tokens, e.getStartToken(), e.getOffendingToken()); } else { input = "<unknown input>"; } String msg = "no viable alternative at input " + escapeWSAndQuote(input); recognizer.notifyErrorListeners(e.getOffendingToken(), msg, e); }
@Override public void recover(Parser recognizer, RecognitionException e) { for (ParserRuleContext context = recognizer.getContext(); context != null; context = context.getParent()) { context.exception = e; } if (PredictionMode.LL.equals(recognizer.getInterpreter().getPredictionMode())) { if (e instanceof NoViableAltException) { this.reportNoViableAlternative(recognizer, (NoViableAltException) e); } else if (e instanceof InputMismatchException) { this.reportInputMismatch(recognizer, (InputMismatchException) e); } else if (e instanceof FailedPredicateException) { this.reportFailedPredicate(recognizer, (FailedPredicateException) e); } } throw new ParseCancellationException(e); }
/** * @see BaseErrorListener#reportAmbiguity */ @Override public void syntaxError(Recognizer<?, ?> recognizer, Object offendingSymbol, int line, int charPositionInLine, String msg, RecognitionException e) { List<String> stack = ((Parser) recognizer).getRuleInvocationStack(); Collections.reverse(stack); String logMsg = "Parser ERROR: line " + line + ":" + charPositionInLine + " at " + offendingSymbol + ": " + msg; CommonToken tok = (CommonToken) offendingSymbol; String s = tok.getText(); logMsg += ": offending token " + s; if (s.equals("<EOF>")) { logMsg += ". Look for tag=(null or empty)."; } else { try { Integer.parseInt(s); } catch (NumberFormatException ex) { logMsg += " not a number. "; } } FixRulesParserErrorListener.logger.error(logMsg + " Tree = {}", stack); throw new RuntimeException(logMsg); }
public ANTLRParseTreeToPSIConverter(Language language, Parser parser, PsiBuilder builder) { this.language = language; this.builder = builder; this.tokenElementTypes = PSIElementTypeFactory.getTokenIElementTypes(language); this.ruleElementTypes = PSIElementTypeFactory.getRuleIElementTypes(language); for (ANTLRErrorListener listener : parser.getErrorListeners()) { if (listener instanceof SyntaxErrorListener) { syntaxErrors = ((SyntaxErrorListener)listener).getSyntaxErrors(); for (SyntaxError error : syntaxErrors) { // record first error per token int StartIndex = error.getOffendingSymbol().getStartIndex(); if ( !tokenToErrorMap.containsKey(StartIndex) ) { tokenToErrorMap.put(StartIndex, error); } } } } }
/** * Create an crsx ANTLR listener for CRSX4 * @param factory * @param sink * @param prefix Prefix to apply to constructor names * @param metachar Language specific meta variable prefix * @param parser */ public SinkAntlrListener(org.transscript.runtime.Sink sink, String prefix, String metachar, Parser parser, Map<String, org.transscript.runtime.Variable> bounds) { this.sink4 = sink; this.consCount = new ArrayDeque<>(); this.ruleContext = new ArrayDeque<>(); this.parser = parser; this.prefix = prefix; this.metachar = metachar; this.state = State.PARSE; this.sort = TokenSort.STRING; this.binderNames = new HashMap<>(); this.bounds = new ArrayDeque<>(); if (bounds != null) this.bounds.addAll(bounds.values()); this.freshes = new ArrayDeque<>(); this.embedCrsx4 = prefix.equals("Text4_") || prefix.equals("TransScript_"); this.nilDesc = sink.context().lookupDescriptor("Nil"); this.consDesc = sink.context().lookupDescriptor("Cons"); }
public StatisticsParserATNSimulator(Parser parser, ATN atn) { super(parser, atn); decisionInvocations = new long[atn.decisionToState.size()]; fullContextFallback = new long[atn.decisionToState.size()]; nonSll = new long[atn.decisionToState.size()]; ambiguousResult = new long[atn.decisionToState.size()]; totalTransitions = new long[atn.decisionToState.size()]; computedTransitions = new long[atn.decisionToState.size()]; fullContextTransitions = new long[atn.decisionToState.size()]; totalLookaheadSll = new long[atn.decisionToState.size()]; totalLookaheadLl = new long[atn.decisionToState.size()]; minLookaheadSll = new long[atn.decisionToState.size()]; maxLookaheadSll = new long[atn.decisionToState.size()]; minLookaheadLl = new long[atn.decisionToState.size()]; maxLookaheadLl = new long[atn.decisionToState.size()]; for (int i = 0; i < minLookaheadSll.length; i++) { minLookaheadSll[i] = Long.MAX_VALUE; minLookaheadLl[i] = Long.MAX_VALUE; maxLookaheadSll[i] = Long.MIN_VALUE; maxLookaheadLl[i] = Long.MIN_VALUE; } }
public static RuleContext getTopContext(Parser parser, RuleContext context, IntervalSet values, boolean checkTop) { if (checkTop && context instanceof ParserRuleContext) { if (values.contains(context.getRuleIndex())) { return context; } } if (context.isEmpty()) { return null; } if (values.contains(parser.getATN().states.get(context.invokingState).ruleIndex)) { return context.parent; } return getTopContext(parser, context.parent, values, false); }
@Override public Map<RuleContext, CaretReachedException> getParseTrees(TParser parser) { List<MultipleDecisionData> potentialAlternatives = new ArrayList<>(); IntegerList currentPath = new IntegerList(); Map<RuleContext, CaretReachedException> results = new IdentityHashMap<>(); // make sure the token stream is initialized before getting the index parser.getInputStream().LA(1); int initialToken = parser.getInputStream().index(); while (true) { parser.getInputStream().seek(initialToken); tryParse(parser, potentialAlternatives, currentPath, results); if (!incrementCurrentPath(potentialAlternatives, currentPath)) { break; } } LOGGER.log(Level.FINE, "Forest parser constructed {0} parse trees.", results.size()); if (LOGGER.isLoggable(Level.FINEST)) { for (Map.Entry<RuleContext, CaretReachedException> entry : results.entrySet()) { LOGGER.log(Level.FINEST, entry.getKey().toStringTree(parser instanceof Parser ? (Parser)parser : null)); } } return results; }
@Override public void recover(Parser recognizer, RecognitionException e) { if (recognizer instanceof CodeCompletionParser && ((CodeCompletionParser)recognizer).getInterpreter().getCaretTransitions() != null) { // int stateNumber = recognizer.getContext().s; // ATNState state = recognizer.getATN().states.get(stateNumber); // if (state instanceof DecisionState && recognizer.getInputStream() instanceof ObjectStream) { // int decision = ((DecisionState)state).decision; // ParserATNSimulator simulator = recognizer.getInterpreter(); // int prediction = simulator.adaptivePredict((ObjectStream)recognizer.getInputStream(), decision, recognizer.getContext()); // } CodeCompletionParser parser = (CodeCompletionParser)recognizer; CaretToken token = parser.getInterpreter().getCaretToken(); AbstractCompletionParserATNSimulator interpreter = parser.getInterpreter(); throw new CaretReachedException(parser.getContext(), token, interpreter.getCaretTransitions(), e); } // TODO: incorporate error recovery as a fallback option if no trees match correctly throw e; //super.recover(recognizer, e); }
@Override public final void syntaxError(final Recognizer<?, ?> recognizer, final Object offendingSymbol, final int line, final int charPositionInLine, final String msg, final RecognitionException e) { String input; if (recognizer instanceof Lexer) { final CharStream cs = ((Lexer) recognizer).getInputStream(); input = cs.getText(new Interval(0, cs.size())); } else if (recognizer instanceof Parser) { final TokenStream tokens = ((Parser) recognizer).getInputStream(); if (tokens != null) { input = tokens.getText(); } else { input = "<unknown input>"; } } else { input = "<unknown input>"; } throw new AntlrParseException(input, line, charPositionInLine, msg); }
public static <P extends Parser> P getParser(Class<? extends Lexer> lexerClass, Class<P> parserClass, String source) { Lexer lexer = getLexer(lexerClass, source); TokenStream tokens = new CommonTokenStream(lexer); P parser; try { parser = parserClass.getConstructor(TokenStream.class).newInstance(tokens); } catch (Exception e) { throw new IllegalArgumentException("couldn't invoke parser constructor", e); } parser.getInterpreter().setPredictionMode(PredictionMode.LL_EXACT_AMBIG_DETECTION); parser.removeErrorListeners(); // don't spit to stderr parser.addErrorListener(new DiagnosticErrorListener()); parser.addErrorListener(new AntlrFailureListener()); return parser; }
public static TerminalNode getRuleDefNameNode(Parser parser, ParseTree tree, String ruleName) { Collection<ParseTree> ruleDefRuleNodes; if ( Grammar.isTokenName(ruleName) ) { ruleDefRuleNodes = XPath.findAll(tree, "//lexerRule/TOKEN_REF", parser); } else { ruleDefRuleNodes = XPath.findAll(tree, "//parserRuleSpec/RULE_REF", parser); } for (ParseTree node : ruleDefRuleNodes) { String r = node.getText(); // always a TerminalNode; just get rule name of this def if ( r.equals(ruleName) ) { return (TerminalNode)node; } } return null; }
public static List<TerminalNode> getAllRuleRefNodes(Parser parser, ParseTree tree, String ruleName) { List<TerminalNode> nodes = new ArrayList<TerminalNode>(); Collection<ParseTree> ruleRefs; if ( Grammar.isTokenName(ruleName) ) { ruleRefs = XPath.findAll(tree, "//lexerRuleBlock//TOKEN_REF", parser); } else { ruleRefs = XPath.findAll(tree, "//ruleBlock//RULE_REF", parser); } for (ParseTree node : ruleRefs) { TerminalNode terminal = (TerminalNode)node; Token rrefToken = terminal.getSymbol(); String r = rrefToken.getText(); if ( r.equals(ruleName) ) { nodes.add(terminal); } } if ( nodes.size()==0 ) return null; return nodes; }
public AstBuilderListener(Language language, Parser parser, PsiBuilder builder) { this.language = language; this.builder = builder; this.tokenElementTypes = PSIElementTypeFactory.getTokenIElementTypes(language, Arrays.asList(parser.getTokenNames())); this.ruleElementTypes = PSIElementTypeFactory.getRuleIElementTypes(language, Arrays.asList(parser.getRuleNames())); for (ANTLRErrorListener listener : parser.getErrorListeners()) { if (listener instanceof SyntaxErrorListener) { syntaxErrors.addAll(((SyntaxErrorListener)listener).getSyntaxErrors()); } } Collections.sort(syntaxErrors, new Comparator<SyntaxError>() { @Override public int compare(SyntaxError o1, SyntaxError o2) { return Integer.valueOf(getStart(o1)).compareTo(getStart(o2)); } }); }
public StatisticsParserATNSimulator(Parser parser, ATN atn) { super(parser, atn); decisionInvocations = new long[atn.decisionToState.size()]; decisionCost = new long[atn.decisionToState.size()]; decisionLlCost = new long[atn.decisionToState.size()]; fullContextFallback = new long[atn.decisionToState.size()]; nonSll = new long[atn.decisionToState.size()]; ambiguousResult = new long[atn.decisionToState.size()]; totalTransitions = new long[atn.decisionToState.size()]; computedTransitions = new long[atn.decisionToState.size()]; fullContextTransitions = new long[atn.decisionToState.size()]; totalLookaheadSll = new long[atn.decisionToState.size()]; totalLookaheadLl = new long[atn.decisionToState.size()]; minLookaheadSll = new long[atn.decisionToState.size()]; maxLookaheadSll = new long[atn.decisionToState.size()]; minLookaheadLl = new long[atn.decisionToState.size()]; maxLookaheadLl = new long[atn.decisionToState.size()]; for (int i = 0; i < minLookaheadSll.length; i++) { minLookaheadSll[i] = Long.MAX_VALUE; minLookaheadLl[i] = Long.MAX_VALUE; maxLookaheadSll[i] = Long.MIN_VALUE; maxLookaheadLl[i] = Long.MIN_VALUE; } }
/** Make sure we don't attempt to recover inline; if the parser * successfully recovers, it won't throw an exception. */ @Override public Token recoverInline(Parser recognizer) throws RecognitionException { // SINGLE TOKEN DELETION Token matchedSymbol = singleTokenDeletion(recognizer); if (matchedSymbol != null) { // we have deleted the extra token. // now, move past ttype token as if all were ok recognizer.consume(); return matchedSymbol; } // SINGLE TOKEN INSERTION if (singleTokenInsertion(recognizer)) { return getMissingSymbol(recognizer); } // BeetlException exception = new BeetlParserException(BeetlException.PARSER_MISS_ERROR); // exception.pushToken(this.getGrammarToken(recognizer.getCurrentToken())); // throw exception; throw new InputMismatchException(recognizer); }
protected void reportUnwantedToken(@NotNull Parser recognizer) { if (inErrorRecoveryMode(recognizer)) { return; } beginErrorCondition(recognizer); Token t = recognizer.getCurrentToken(); String tokenName = getTokenErrorDisplay(t); IntervalSet expecting = getExpectedTokens(recognizer); String msg = "多余输入 " + tokenName + " 期望 " + expecting.toString(recognizer.getTokenNames()); BeetlException exception = new BeetlParserException(BeetlException.PARSER_MISS_ERROR, msg); // exception.token = this.getGrammarToken(t); exception.pushToken(this.getGrammarToken(t)); throw exception; }
@Override protected void reportUnwantedToken(Parser recognizer) { // change error message from default implementation if (inErrorRecoveryMode(recognizer)) { return; } beginErrorCondition(recognizer); Token t = recognizer.getCurrentToken(); String tokenName = getTokenErrorDisplay(t); String msg = "extraneous input " + tokenName + " expecting operator"; recognizer.notifyErrorListeners(t, msg, null); }
@Override public Token recoverInline(final Parser recognizer) throws RecognitionException { final Token token = recognizer.getCurrentToken(); final String message = "unexpected token [" + getTokenErrorDisplay(token) + "]" + " was expecting one of [" + recognizer.getExpectedTokens().toString(recognizer.getVocabulary()) + "]."; Location location = new Location(sourceName, token.getStartIndex()); throw location.createError(new IllegalArgumentException(message)); }
@Override public void reportAmbiguity(@NotNull Parser recognizer, @NotNull DFA dfa, int startIndex, int stopIndex, boolean exact, @Nullable BitSet ambigAlts, @NotNull ATNConfigSet configs) { }