/** * Just call a parser method in {@link CqlParser} - does not do any error handling. */ public static <R> R parseAnyUnhandled(CQLParserFunction<R> parserFunction, String input) throws RecognitionException { // Lexer and parser ErrorCollector errorCollector = new ErrorCollector(input); CharStream stream = new ANTLRStringStream(input); CqlLexer lexer = new CqlLexer(stream); lexer.addErrorListener(errorCollector); TokenStream tokenStream = new CommonTokenStream(lexer); CqlParser parser = new CqlParser(tokenStream); parser.addErrorListener(errorCollector); // Parse the query string to a statement instance R r = parserFunction.parse(parser); // The errorCollector has queue up any errors that the lexer and parser may have encountered // along the way, if necessary, we turn the last error into exceptions here. errorCollector.throwFirstSyntaxError(); return r; }
@Test public void test1() throws RecognitionException { CharStream input = new ANTLRStringStream("version(1.1.0)\n" + "minVer(1.1.0)\n" + "[ The Admin API is used to manipulate and access the low level entities in Rapture. Typically the methods in this API\n" + "are only used during significant setup events in a Rapture environment.]\n" + "api(Admin) {\n" + " [This method restores a user that has been deleted]\n" + " @entitle=/admin/main\n" + " @public Boolean restoreUser(String userName);\n" + "}\n" + "[A return value from a native query]\n" + "type RaptureQueryResult(@package=rapture.common) {\n" + " List(JsonContent) rows;\n" + "}\n"); TLexer lexer = new TLexer(input); TokenStream tokenInputStream = new CommonTokenStream(lexer); TParser parser = new TParser(tokenInputStream); hmxdef_return returnVal = parser.hmxdef(); System.out.println("Done " + returnVal.getTree().toStringTree()); TreeNodeStream treeInput = new CommonTreeNodeStream(returnVal.getTree()); TTree walker = new TTree(treeInput); walker.setTemplateLib(TemplateRepo.getApiTemplates("Java")); com.incapture.rapgen.TTree.hmxdef_return walkerResult = walker.hmxdef(); System.out.println("Done, result=" + walkerResult.toString()); }
@Test public void test4() throws RecognitionException { CharStream input = new ANTLRStringStream("sdk(alan)\n" + "version(0.0.1)\n" + "minVer(0.0.1)\n" + "[ The Admin API is used to manipulate and access the low level entities in Rapture. Typically the methods in this API\n" + "are only used during significant setup events in a Rapture environment.]\n" + "api(Admin) {\n" + " [This method restores a user that has been deleted]\n" + " @entitle=/admin/main\n" + " @public Boolean restoreUser(String userName);\n" + "}\n" + "[A return value from a native query]\n" + "type RaptureQueryResult(@package=rapture.common) {\n" + " List(JsonContent) rows;\n" + "}\n"); TLexer lexer = new TLexer(input); TokenStream tokenInputStream = new CommonTokenStream(lexer); TParser parser = new TParser(tokenInputStream); hmxdef_return returnVal = parser.hmxdef(); System.out.println("Done " + returnVal.getTree().toStringTree()); TreeNodeStream treeInput = new CommonTreeNodeStream(returnVal.getTree()); TTree walker = new TTree(treeInput); walker.setTemplateLib(TemplateRepo.getSdkTemplates("Java")); walker.sdkGen(); }
@Test public void testBean() throws RecognitionException { CharStream input = new ANTLRStringStream("version(1.1.0)\n" + "minVer(1.1.0)\n" + "[ The Admin API is used to manipulate and access the low level entities in Rapture. Typically the methods in this API\n" + "are only used during significant setup events in a Rapture environment.]\n" + "api(Admin) {\n" + " [This method restores a user that has been deleted]\n" + " @entitle=/admin/main\n" + " @public Boolean restoreUser(String userName);\n" + "}\n" + "[A Graph node]\n" + "@Bean\n" + "type Node(@package=rapture.common.dp) {\n" + " String nodeId; //this is not a URI, just a String id\n" + " List<XFer> xferValues;\n" + "}\n" + ""); TLexer lexer = new TLexer(input); TokenStream tokenInputStream = new CommonTokenStream(lexer); TParser parser = new TParser(tokenInputStream); hmxdef_return returnVal = parser.hmxdef(); System.out.println("Done " + returnVal.getTree().toStringTree()); TreeNodeStream treeInput = new CommonTreeNodeStream(returnVal.getTree()); TTree walker = new TTree(treeInput); walker.setTemplateLib(TemplateRepo.getApiTemplates("Java")); apiGen_return walkerResult = walker.apiGen(); System.out.println("Done, result=" + walkerResult.toString()); }
protected IParseResult doParse(String ruleName, CharStream in, NodeModelBuilder nodeModelBuilder, int initialLookAhead) { TokenSource tokenSource = createLexer(in); XtextTokenStream tokenStream = createTokenStream(tokenSource); tokenStream.initCurrentLookAhead(initialLookAhead); setInitialHiddenTokens(tokenStream); AbstractInternalAntlrParser parser = createParser(tokenStream); parser.setTokenTypeMap(getTokenDefProvider().getTokenDefMap()); parser.setSyntaxErrorProvider(getSyntaxErrorProvider()); parser.setNodeModelBuilder(nodeModelBuilder); parser.setSemanticModelBuilder(getElementFactory()); IUnorderedGroupHelper helper = getUnorderedGroupHelper().get(); parser.setUnorderedGroupHelper(helper); helper.initializeWith(parser); try { if(ruleName != null) return parser.parse(ruleName); return parser.parse(); } catch (Exception re) { throw new ParseException(re.getMessage(),re); } }
/** Load template stream into this group. {@code unqualifiedFileName} is * {@code "a.st"}. The {@code prefix} is path from group root to * {@code unqualifiedFileName} like {@code "/subdir"} if file is in * {@code /subdir/a.st}. */ public CompiledST loadTemplateFile(String prefix, String unqualifiedFileName, CharStream templateStream) { GroupLexer lexer = new GroupLexer(templateStream); CommonTokenStream tokens = new CommonTokenStream(lexer); GroupParser parser = new GroupParser(tokens); parser.group = this; lexer.group = this; try { parser.templateDef(prefix); } catch (RecognitionException re) { errMgr.groupSyntaxError(ErrorType.SYNTAX_ERROR, unqualifiedFileName, re, re.getMessage()); } String templateName = Misc.getFileNameNoSuffix(unqualifiedFileName); if ( prefix !=null && prefix.length()>0 ) templateName = prefix+templateName; CompiledST impl = rawGetTemplate(templateName); impl.prefix = prefix; return impl; }
@Test public void test1() throws RecognitionException, IOException { CharStream input = new ANTLRStringStream("version(1.1.0)\n" + "minVer(1.1.0)\n" + "[ The Admin API is used to manipulate and access the low level entities in Rapture. Typically the methods in this API\n" + "are only used during significant setup events in a Rapture environment.]\n" + "api(Admin) {\n" + " [This method restores a user that has been deleted]\n" + " @entitle=/admin/main\n" + " @public Boolean restoreUser(String userName);\n" + "}\n" + "[A return value from a native query]\n" + "type RaptureQueryResult(@package=rapture.common) {\n" + " List(JsonContent) rows;\n" + "}\n"); TLexer lexer = new TLexer(input); TokenStream tokenInputStream = new CommonTokenStream(lexer); TParser parser = new TParser(tokenInputStream); hmxdef_return returnVal = parser.hmxdef(); System.out.println("Done " + returnVal.getTree().toStringTree()); TreeNodeStream treeInput = new CommonTreeNodeStream(returnVal.getTree()); TTreeJS walker = new TTreeJS(treeInput); walker.setTemplateLib(getJsTemplate()); }
/** Load template stream into this group. {@code unqualifiedFileName} is * {@code "a.st"}. The {@code prefix} is path from group root to * {@code unqualifiedFileName} like {@code "/subdir"} if file is in * {@code /subdir/a.st}. */ public CompiledST loadTemplateFile(String prefix, String unqualifiedFileName, CharStream templateStream) { GroupLexer lexer = new GroupLexer(templateStream); CommonTokenStream tokens = new CommonTokenStream(lexer); GroupParser parser = new GroupParser(tokens); parser.group = this; lexer.group = this; try { parser.templateDef(prefix); } catch (RecognitionException re) { errMgr.groupSyntaxError(ErrorType.SYNTAX_ERROR, unqualifiedFileName, re, re.getMessage()); } String templateName = Misc.getFileNameNoSuffix(unqualifiedFileName); if ( prefix!=null && prefix.length()>0 ) templateName = prefix+templateName; CompiledST impl = rawGetTemplate(templateName); impl.prefix = prefix; return impl; }
public String toTokenString() { CharStream input = this.token.getInputStream(); GrammarASTAdaptor adaptor = new GrammarASTAdaptor(input); CommonTreeNodeStream nodes = new CommonTreeNodeStream(adaptor, this); StringBuilder buf = new StringBuilder(); GrammarAST o = (GrammarAST)nodes.LT(1); int type = adaptor.getType(o); while ( type!=Token.EOF ) { buf.append(" "); buf.append(o.getText()); nodes.consume(); o = (GrammarAST)nodes.LT(1); type = adaptor.getType(o); } return buf.toString(); }
private List<Token> doParse(CharStream in) { TokenSource tokenSource = createLexer(in); LazyTokenStream tokenStream = createTokenStream(tokenSource); setInitialHiddenTokens(tokenStream); InternalN4JSParser parser = createParser(tokenStream); IUnorderedGroupHelper helper = unorderedGroupHelper.get(); if (!(helper instanceof IUnorderedGroupHelper.Null)) { throw new IllegalStateException("Unexpected usage of unordered groups."); } Stopwatch stopwatch = null; boolean debug = LOGGER.isDebugEnabled(); // boolean debug = true; if (debug) { stopwatch = Stopwatch.createStarted(); } try { parser.entryRuleScript(); while (tokenStream.LT(1) != Token.EOF_TOKEN) { tokenStream.consume(); } @SuppressWarnings("unchecked") List<Token> result = tokenStream.getTokens(); return result; } catch (Exception re) { throw new ParseException(re.getMessage(), re); } finally { if (debug) { assert stopwatch != null; long elapsed = stopwatch.stop().elapsed(TimeUnit.MILLISECONDS); if (elapsed > 5) { LOGGER.warn("Coloring parser took: " + elapsed); } } } }
/** * {@inheritDoc} */ @Override public String toString(int start, int stop) { if (start < 0 || stop < 0) { return null; } CommonToken startToken = (CommonToken) tokens.get(start); CommonToken stopToken = (CommonToken) tokens.get(stop); CharStream charStream = ((Lexer) tokenSource).getCharStream(); String result = charStream.toString().substring(startToken.getStartIndex(), stopToken.getStopIndex()); return result; }
private int read() { int result = li.read(); if (result == LexerInput.EOF) { result = CharStream.EOF; } return result; }
@Override public int LA(int i) { if (i == 0) return 0; // undefined if (i < 0) i++; // e.g., translate LA(-1) to use offset 0 if ((p + i - 1) >= n) return CharStream.EOF; return Character.toUpperCase(data[p + i - 1]); }
private CommonTree parse(String query) throws RecognitionException { CharStream cs = new ANTLRStringStream(query); CMISLexer lexer = new CMISLexer(cs); CommonTokenStream tokens = new CommonTokenStream(lexer); CMISParser parser = new CMISParser(tokens); CommonTree queryNode = (CommonTree) parser.query().getTree(); return queryNode; }
/** Factory method for a lexer generating this kind of tree. */ public Lexer createLexer(Class<? extends Lexer> lexerType, I info, String term) { try { // instantiate the lexer ANTLRStringStream input = new ANTLRStringStream(term); Constructor<? extends Lexer> lexerConstructor = lexerType.getConstructor(CharStream.class); Lexer result = lexerConstructor.newInstance(input); callInitialise(result, info); return result; } catch (Exception e) { throw toRuntime(e); } }
@Override public int LA(int i) { int result = stream.LT(i); switch (result) { case 0: case CharStream.EOF: return result; default: return Character.toUpperCase(result); } }
@SuppressWarnings("unused") static public Constraint buildFTS(String ftsExpression, QueryModelFactory factory, FunctionEvaluationContext functionEvaluationContext, Selector selector, Map<String, Column> columnMap, String defaultField) { // TODO: Decode sql escape for '' should do in CMIS layer // parse templates to trees ... CMIS_FTSParser parser = null; try { CharStream cs = new ANTLRStringStream(ftsExpression); CMIS_FTSLexer lexer = new CMIS_FTSLexer(cs); CommonTokenStream tokens = new CommonTokenStream(lexer); parser = new CMIS_FTSParser(tokens); CommonTree ftsNode = (CommonTree) parser.cmisFtsQuery().getTree(); return buildFTSConnective(ftsNode, factory, functionEvaluationContext, selector, columnMap, defaultField); } catch (RecognitionException e) { if (parser != null) { String[] tokenNames = parser.getTokenNames(); String hdr = parser.getErrorHeader(e); String msg = parser.getErrorMessage(e, tokenNames); throw new FTSQueryException(hdr + "\n" + msg, e); } return null; } }
public void testLexer() throws IOException, RecognitionException { ClassLoader cl = FTSTest.class.getClassLoader(); InputStream modelStream = cl.getResourceAsStream("org/alfresco/repo/search/impl/parsers/fts_test.gunit"); CharStream input = new ANTLRInputStream(modelStream); gUnitExecutor executer = new gUnitExecutor(parse(input), "FTS"); String result = executer.execTest(); System.out.print(executer.execTest()); // unit test result assertEquals("Failures: " + result, 0, executer.failures.size()); assertEquals("Invalids " + result, 0, executer.invalids.size()); }
private GrammarInfo parse(CharStream input) throws RecognitionException { gUnitLexer lexer = new gUnitLexer(input); CommonTokenStream tokens = new CommonTokenStream(lexer); GrammarInfo grammarInfo = new GrammarInfo(); gUnitParser parser = new gUnitParser(tokens, grammarInfo); parser.gUnitDef(); // parse gunit script and save elements to grammarInfo return grammarInfo; }
public void testLexerOutput() throws IOException { String str = "~woof^2"; CharStream input = new ANTLRInputStream(new ByteArrayInputStream(str.getBytes("UTF-8"))); FTSLexer lexer = new FTSLexer(input); CommonTokenStream tokenStream = new CommonTokenStream(lexer); for(CommonToken token : (List<CommonToken>)tokenStream.getTokens()) { System.out.println(token.toString()); } }
public static void parseFile(Collection files, File src) throws Exception { // System.out.println("in folder:"+file.getParent()); for (int i=0; i<files.toArray().length;i++){ File file = ((FileModel)files.toArray()[i]).getFile(); //PackageDef package_= ((FileModel)files.toArray()[i]).getPackage(); String tempName = (file.getParent().replace(src.getAbsolutePath(), "").trim().replaceFirst("\\W", "").replaceAll( "\\W", ".")).trim(); System.out.println("src:" + src.getAbsolutePath() + " parent:"+file.getParent() + " tempName:" +tempName +";" ); PackageDef package_ = (PackageDef) SymbolTable.getInstance().getObject(tempName, IdentifierType.PACKAGE); if(package_!=null){ SymbolTable.getInstance().setCurrentPackage(package_); } else{ package_ = new PackageDef(null, IdentifierType.PACKAGE, -1); package_.setFile(file); package_.addPackageName(tempName); SymbolTable.getInstance().addObject(tempName, package_); SymbolTable.getInstance().setCurrentPackage(package_); } SymbolTable.getInstance().setCurrentFile(file); //SymbolTable.getInstance().setCurrentPackage(package_); CharStream input = new ANTLRFileStream(file.getAbsolutePath()); ObjectiveCLexer lex= new ObjectiveCLexer(input); CommonTokenStream tokens = new CommonTokenStream(lex); ObjectiveCParser parser = new ObjectiveCParser(tokens); parser.translation_unit(); } }
@Override public CompiledST loadTemplateFile(String prefix, String unqualifiedFileName, CharStream templateStream) { String template = templateStream.substring(0, templateStream.size()- 1); String templateName = Misc.getFileNameNoSuffix(unqualifiedFileName); String fullyQualifiedTemplateName = prefix+templateName; CompiledST impl = new Compiler(this).compile(fullyQualifiedTemplateName, template); CommonToken nameT = new CommonToken(STLexer.SEMI); // Seems like a hack, best I could come up with. nameT.setInputStream(templateStream); rawDefineTemplate(fullyQualifiedTemplateName, impl, nameT); impl.defineImplicitlyDefinedTemplates(this); return impl; }
public static String displayError(IntStream stream, int line, int position, int length) { String[] lines = null; StringBuilder sb = new StringBuilder(); if (stream instanceof CharStream) { lines = ((CharStream) stream).substring(0, stream.size() - 1).split("\n"); } else if (stream instanceof TokenStream) { lines = ((TokenStream) stream).toString(0, stream.size() - 1).split("\n"); } sb.append(" at line ").append(line).append("\n"); if (lines != null) { int start = Math.max(0, line - 5); int end = Math.min(lines.length, line + 5); int badline = line - 1; for (int i = start; i < end; i++) { sb.append(String.format("%5d: %s\n", i + 1, lines[i])); if (i == badline) { for (int j = 0; j < position + 7; j++) sb.append("-"); for (int j = 0; j <= length; j++) sb.append("^"); sb.append("\n"); } } } return sb.toString(); }
private void testQuery(String query) throws RecognitionException { CharStream cs = new AntlrNoCaseStringStream(query); JPA2Lexer lexer = new JPA2Lexer(cs); TokenStream tstream = new CommonTokenStream(lexer); JPA2Parser jpa2Parser = new JPA2Parser(tstream); JPA2Parser.ql_statement_return aReturn = jpa2Parser.ql_statement(); Assert.assertTrue(isValid((CommonTree) aReturn.getTree())); }
public STLexer(ErrorManager errMgr, CharStream input, Token templateToken, char delimiterStartChar, char delimiterStopChar) { this.errMgr = errMgr; this.input = input; c = (char)input.LA(1); // prime lookahead this.templateToken = templateToken; this.delimiterStartChar = delimiterStartChar; this.delimiterStopChar = delimiterStopChar; }
public GrammarRootAST parse(String fileName, CharStream in) { try { GrammarASTAdaptor adaptor = new GrammarASTAdaptor(in); ToolANTLRLexer lexer = new ToolANTLRLexer(in, this); CommonTokenStream tokens = new CommonTokenStream(lexer); lexer.tokens = tokens; ToolANTLRParser p = new ToolANTLRParser(tokens, this); p.setTreeAdaptor(adaptor); try { ParserRuleReturnScope r = p.grammarSpec(); GrammarAST root = (GrammarAST)r.getTree(); if ( root instanceof GrammarRootAST) { ((GrammarRootAST)root).hasErrors = lexer.getNumberOfSyntaxErrors()>0 || p.getNumberOfSyntaxErrors()>0; assert ((GrammarRootAST)root).tokenStream == tokens; if ( grammarOptions!=null ) { ((GrammarRootAST)root).cmdLineOptions = grammarOptions; } return ((GrammarRootAST)root); } } catch (v3TreeGrammarException e) { errMgr.grammarError(ErrorType.V3_TREE_GRAMMAR, fileName, e.location); } return null; } catch (RecognitionException re) { // TODO: do we gen errors now? ErrorManager.internalError("can't generate this message at moment; antlr recovers"); } return null; }
/** * Creates a lexer instance via reflection. The object is not created with the default * constructor because it will not initialize the backtracking state of the lexer. * Instead, we pass <code>null</code> as CharStream argument. */ @Override public T get() { try { return clazz.getConstructor(CharStream.class).newInstance(new Object[] { null }); } catch (Exception e) { throw new RuntimeException(e); } }
@Test public void testTypeGenericURI() throws RecognitionException { CharStream input = new ANTLRStringStream("version(1.1.0)\n" + "minVer(1.1.0)\n" + "[ The Admin API is used to manipulate and access the low level entities in Rapture. Typically the methods in this API\n" + "are only used during significant setup events in a Rapture environment.]\n" + "api(Admin) {\n" + " [This method restores a user that has been deleted]\n" + " @entitle=/admin/main\n" + " @public Boolean restoreUser(String userName);\n" + "}\n" + "[A Graph node]\n" + "@Bean\n" + "type Node(@package=rapture.common.dp) {\n" + " String nodeId; //this is not a URI, just a String id\n" + " List<String> stringValues;\n" + " List<JobURI> jobUriValues;\n" + " JobURI myJobURI;\n" + "}\n" + ""); TLexer lexer = new TLexer(input); TokenStream tokenInputStream = new CommonTokenStream(lexer); TParser parser = new TParser(tokenInputStream); hmxdef_return returnVal = parser.hmxdef(); System.out.println("Done " + returnVal.getTree().toStringTree()); TreeNodeStream treeInput = new CommonTreeNodeStream(returnVal.getTree()); TTree walker = new TTree(treeInput); walker.setTemplateLib(TemplateRepo.getApiTemplates("Java")); walker.apiGen(); walker.dumpFiles(outputKernelFolder, outputApiFolder, outputWebFolder); System.out.println("Done, folder=" + parentFolder.getAbsolutePath()); }
@Test public void test2() throws RecognitionException { System.out.println("two"); CharStream input = new ANTLRStringStream("@Storable ( storagePath : {\"a\"})"); TLexer lexer = new TLexer(input); TokenStream tokenInputStream = new CommonTokenStream(lexer); TParser parser = new TParser(tokenInputStream); typeAnnotation_return returnVal = parser.typeAnnotation(); System.out.println("Done " + returnVal.getTree().toStringTree()); }
@Override public TokenSource createTokenSource(final CharStream stream) { if ((this.parser instanceof AbstractAntlrParser)) { return ((AbstractAntlrParser)this.parser).createLexer(stream); } StringConcatenation _builder = new StringConcatenation(); String _name = this.parser.getClass().getName(); _builder.append(_name); _builder.append(" should be a subclass of "); String _name_1 = AbstractAntlrParser.class.getName(); _builder.append(_name_1); throw new IllegalStateException(_builder.toString()); }
@Test public void test5() throws RecognitionException { System.out.println("five"); CharStream input = new ANTLRStringStream("[ This is a mailbox message, usually posted by an external user]\n" + "@Addressable(scheme = MAILBOX)\n" + "@Storable(storagePath : {authority, documentPath, id})\n" + "type RaptureMailMessage(@package=rapture.common.model) {\n" + " String id;\n" + " String authority;\n" + " String category;\n" + " String content;\n" + " Date when;\n" + " String who;\n" + "}\n" + ""); TLexer lexer = new TLexer(input); TokenStream tokenInputStream = new CommonTokenStream(lexer); TParser parser = new TParser(tokenInputStream); typeExpr_return returnVal = parser.typeExpr(); System.out.println("Done " + returnVal.getTree().toStringTree()); }