public LexerInterpreter createLexerInterpreter(CharStream input) { if (this.isParser()) { throw new IllegalStateException("A lexer interpreter can only be created for a lexer or combined grammar."); } if (this.isCombined()) { return implicitLexer.createLexerInterpreter(input); } char[] serializedAtn = ATNSerializer.getSerializedAsChars(atn); ATN deserialized = new ATNDeserializer().deserialize(serializedAtn); return new LexerInterpreter(fileName, getVocabulary(), Arrays.asList(getRuleNames()), ((LexerGrammar)this).modes.keySet(), deserialized, input); }
/** @since 4.5.1 */ public GrammarParserInterpreter createGrammarParserInterpreter(TokenStream tokenStream) { if (this.isLexer()) { throw new IllegalStateException("A parser interpreter can only be created for a parser or combined grammar."); } char[] serializedAtn = ATNSerializer.getSerializedAsChars(atn); ATN deserialized = new ATNDeserializer().deserialize(serializedAtn); return new GrammarParserInterpreter(this, deserialized, tokenStream); }
public ParserInterpreter createParserInterpreter(TokenStream tokenStream) { if (this.isLexer()) { throw new IllegalStateException("A parser interpreter can only be created for a parser or combined grammar."); } char[] serializedAtn = ATNSerializer.getSerializedAsChars(atn); ATN deserialized = new ATNDeserializer().deserialize(serializedAtn); return new ParserInterpreter(fileName, getVocabulary(), Arrays.asList(getRuleNames()), deserialized, tokenStream); }
public SerializedATN(OutputModelFactory factory, ATN atn) { super(factory); IntegerList data = ATNSerializer.getSerialized(atn); serialized = new ArrayList<String>(data.size()); for (int c : data.toArray()) { String encoded = factory.getGenerator().getTarget().encodeIntAsCharEscape(c == -1 ? Character.MAX_VALUE : c); serialized.add(encoded); } // System.out.println(ATNSerializer.getDecoded(factory.getGrammar(), atn)); }
public static ParserInterpreterData buildFromSnapshot(DocumentSnapshot snapshot) { LexerInterpreterData lexerInterpreterData = LexerInterpreterData.buildFromSnapshot(snapshot); if (lexerInterpreterData == null) { return null; } List<SyntaxError> syntaxErrors = new ArrayList<>(); Tool tool = new CustomTool(snapshot); tool.errMgr = new CustomErrorManager(tool); tool.addListener(new ErrorListener(snapshot, tool, syntaxErrors)); tool.libDirectory = new File(snapshot.getVersionedDocument().getFileObject().getPath()).getParent(); ANTLRStringStream stream = new ANTLRStringStream(snapshot.getText()); stream.name = snapshot.getVersionedDocument().getFileObject().getNameExt(); GrammarRootAST ast = tool.parse(stream.name, stream); Grammar grammar = tool.createGrammar(ast); if (grammar instanceof LexerGrammar) { return null; } tool.process(grammar, false); ParserInterpreterData data = new ParserInterpreterData(); // start by filling in the lexer data data.lexerInterpreterData = lexerInterpreterData; // then fill in the parser data data.grammarFileName = grammar.fileName; data.serializedAtn = ATNSerializer.getSerializedAsString(grammar.atn, Arrays.asList(grammar.getRuleNames())); data.vocabulary = grammar.getVocabulary(); data.ruleNames = new ArrayList<>(grammar.rules.keySet()); return data; }
public PreviewParser(Grammar g, TokenStream input) { this(g, new ATNDeserializer().deserialize(ATNSerializer.getSerializedAsChars(g.getATN())), input); }