/** Creates a parser for a given term, generating trees of this kind. */ public <P extends Parser> P createParser(Class<P> parserType, I info, String term) { try { // find the lexer type String parserName = parserType.getName(); String lexerName = parserName.substring(0, parserName.indexOf("Parser")) .concat("Lexer"); @SuppressWarnings("unchecked") Class<? extends Lexer> lexerType = (Class<? extends Lexer>) Class.forName(lexerName); Lexer lexer = createLexer(lexerType, info, term); // instantiate the parser CommonTokenStream tokenStream = new CommonTokenStream(lexer); Constructor<P> parserConstructor = parserType.getConstructor(TokenStream.class); P result = parserConstructor.newInstance(tokenStream); Method adaptorSetter = parserType.getMethod("setTreeAdaptor", TreeAdaptor.class); adaptorSetter.invoke(result, new ParseTreeAdaptor<>(this, info, tokenStream)); callInitialise(result, info); return result; } catch (Exception e) { throw toRuntime(e); } }
public static List<Token> getTokens(String sourceCode, String language) throws Exception { Lexer lexer = null; if (language.compareTo("C++") == 0) { lexer = new PlagiCOJCppLexer(new ANTLRStringStream(sourceCode)); } else if (language.compareTo("Java") == 0) { lexer = new PlagiCOJJavaLexer(new ANTLRStringStream(sourceCode)); } else if (language.compareTo("C#") == 0) { lexer = new PlagiCOJCSharpLexer(new ANTLRStringStream(sourceCode)); } else if (language.compareTo("C") == 0) { lexer = new PlagiCOJCLexer(new ANTLRStringStream(sourceCode)); } else if (language.compareTo("Python") == 0) { lexer = new PlagiCOJPythonLexer(new ANTLRStringStream(sourceCode)); } else if (language.compareTo("Pascal") == 0) { lexer = new PlagiCOJPascalLexer(new ANTLRStringStream(sourceCode)); } else { throw new PlagiCOJUnsupportedLanguageException(); } CommonTokenStream tokens = new CommonTokenStream(lexer); return tokens.getTokens(); }
public CompiledFileModelV4(@NullAllowed Grammar grammar, @NullAllowed GrammarRootAST result, @NonNull List<? extends SyntaxError> syntaxErrors, @NonNull FileObject fileObject, @NullAllowed CommonToken[] tokens) { super(fileObject, tokens); Parameters.notNull("syntaxErrors", syntaxErrors); this.grammar = grammar; this.result = result; this.syntaxErrors = syntaxErrors; this.importedResults = new ArrayList<>(); if (grammar != null && grammar.importedGrammars != null) { for (Grammar imported : grammar.importedGrammars) { CommonTokenStream importedTokenStream = (CommonTokenStream)imported.ast.tokenStream; String fileName = ((Lexer)importedTokenStream.getTokenSource()).getCharStream().getSourceName(); FileObject importedFileObject = FileUtil.toFileObject(new File(fileName)); List<? extends Token> importedTokensList = importedTokenStream.getTokens(); CommonToken[] importedTokens = importedTokensList.toArray(new CommonToken[0]); importedResults.add(new CompiledFileModelV4(imported, imported.ast, Collections.<SyntaxError>emptyList(), importedFileObject, importedTokens)); } } }
public void testCaseInsensivityOfSomeAtTokens() throws Exception { String source = "@FONT-face @charset @CHARSET @charSeT "; Lexer lexer = createLexer(source); assertANTLRToken("@FONT-face" ,Css3Lexer.FONT_FACE_SYM, lexer.nextToken()); assertANTLRToken(null ,Css3Lexer.WS, lexer.nextToken()); assertANTLRToken("@charset" ,Css3Lexer.CHARSET_SYM, lexer.nextToken()); assertANTLRToken(null ,Css3Lexer.WS, lexer.nextToken()); assertANTLRToken("@CHARSET" ,Css3Lexer.CHARSET_SYM, lexer.nextToken()); assertANTLRToken(null ,Css3Lexer.WS, lexer.nextToken()); assertANTLRToken("@charSeT" ,Css3Lexer.CHARSET_SYM, lexer.nextToken()); }
public void testExtendOnlySelector2() throws Exception { String source = "#context a%extreme {"; Lexer lexer = createLexer(source); assertANTLRToken(null ,Css3Lexer.HASH, lexer.nextToken()); assertANTLRToken(null ,Css3Lexer.WS, lexer.nextToken()); assertANTLRToken(null ,Css3Lexer.IDENT, lexer.nextToken()); assertANTLRToken(null ,Css3Lexer.SASS_EXTEND_ONLY_SELECTOR, lexer.nextToken()); assertANTLRToken(null ,Css3Lexer.WS, lexer.nextToken()); assertANTLRToken(null ,Css3Lexer.LBRACE, lexer.nextToken()); }
public void testSASS_Else() throws Exception { String source = "@else cau"; Lexer lexer = createLexer(source); assertANTLRToken(null ,Css3Lexer.SASS_ELSE, lexer.nextToken()); assertANTLRToken(null ,Css3Lexer.WS, lexer.nextToken()); assertANTLRToken(null ,Css3Lexer.IDENT, lexer.nextToken()); }
public void testLineComment() throws Exception { String source = "//comment\na"; Lexer lexer = createLexer(source); assertANTLRToken("//comment" ,Css3Lexer.LINE_COMMENT, lexer.nextToken()); assertANTLRToken(null ,Css3Lexer.NL, lexer.nextToken()); assertANTLRToken("a",Css3Lexer.IDENT, lexer.nextToken()); }
public void testLexingOfPercentageWithoutNumberPrefix() throws Exception { String source = "font: %/20 "; Lexer lexer = createLexer(source); assertANTLRToken("font" ,Css3Lexer.IDENT, lexer.nextToken()); assertANTLRToken(":" ,Css3Lexer.COLON, lexer.nextToken()); assertANTLRToken(" " ,Css3Lexer.WS, lexer.nextToken()); assertANTLRToken("%" , Css3Lexer.PERCENTAGE_SYMBOL, lexer.nextToken()); assertANTLRToken("/" ,Css3Lexer.SOLIDUS, lexer.nextToken()); assertANTLRToken("20" , Css3Lexer.NUMBER, lexer.nextToken()); assertANTLRToken(" " , Css3Lexer.WS, lexer.nextToken()); }
/** Factory method for a lexer generating this kind of tree. */ public Lexer createLexer(Class<? extends Lexer> lexerType, I info, String term) { try { // instantiate the lexer ANTLRStringStream input = new ANTLRStringStream(term); Constructor<? extends Lexer> lexerConstructor = lexerType.getConstructor(CharStream.class); Lexer result = lexerConstructor.newInstance(input); callInitialise(result, info); return result; } catch (Exception e) { throw toRuntime(e); } }
public void enough(PE_MySQL parser, List<List<ExpressionNode>> insertValues) { Lexer lexer = (Lexer) parser.getTokenStream().getTokenSource(); if (lexer.getCharIndex() - initialOffset > continuationThreshold) { finalOffset = lexer.getCharIndex(); throw new EnoughException(buildInsertStatement(insertValues,true, (initialOffset == 0 ? TransactionStatement.Kind.START : null),null,false)); } }
public Block(Architecture arch) { this.architecture = arch; Lexer lex = null; Assembler assembler = null; switch (architecture) { // no parsing necessary if we're writing bytes to memory case raw: return; // arm architectures trickle down case armv7: case armv9: // lex = new armLexer(new ANTLRStringStream(program)); // assembler = new armAssembler(new CommonTokenStream(lex)); break; case x64: lex = new x64Lexer(new ANTLRStringStream(program)); assembler = new x64Assembler(new CommonTokenStream(lex)); break; case x86: break; default: break; } try { if (assembler != null) { assembler.start(); this.instructions = assembler.getMachineCode(); } else { throw new JssemblyException("Assembler not found for architecture: " + this.architecture.name()); } } catch (RecognitionException e) { // TODO Auto-generated catch block e.printStackTrace(); } }
public Lexer[] getDelegates() { return new Lexer[] {gLexer}; }
@Override public Lexer get() { return new InternalN4JSLexer(null); }
public void testLexingURLToken() throws Exception { String source = "url(http://fonts.googleapis.com/css?family=Syncopate) "; Lexer lexer = createLexer(source); assertANTLRToken(null ,Css3Lexer.URI, lexer.nextToken()); }
public void testSassVar() throws Exception { String source = "$var "; Lexer lexer = createLexer(source); assertANTLRToken(null ,Css3Lexer.SASS_VAR, lexer.nextToken()); }
public void testCPLineComment() throws Exception { String source = "//line comment\n"; Lexer lexer = createLexer(source); assertANTLRToken(null ,Css3Lexer.LINE_COMMENT, lexer.nextToken()); }
public void testExtendOnlySelector() throws Exception { String source = "body%my"; Lexer lexer = createLexer(source); assertANTLRToken(null ,Css3Lexer.IDENT, lexer.nextToken()); assertANTLRToken(null ,Css3Lexer.SASS_EXTEND_ONLY_SELECTOR, lexer.nextToken()); }
public void testLineCommentAtTheFileEnd() throws Exception { String source = "//comment"; Lexer lexer = createLexer(source); assertANTLRToken("//comment" ,Css3Lexer.LINE_COMMENT, lexer.nextToken()); }
public void testLESS_JS_STRING() throws Exception { String source = "`\"hello\".toUpperCase() + '!'`;"; Lexer lexer = createLexer(source); assertANTLRToken("`\"hello\".toUpperCase() + '!'`" ,Css3Lexer.LESS_JS_STRING, lexer.nextToken()); assertANTLRToken(";" ,Css3Lexer.SEMI, lexer.nextToken()); }
public void testIssue236649() throws Exception { String source = "url(http://fonts.googleapis.com/css?family=Josefin+Sans|Sigmar+One|Maven+Pro)"; Lexer lexer = createLexer(source); assertANTLRToken("url(http://fonts.googleapis.com/css?family=Josefin+Sans|Sigmar+One|Maven+Pro)", Css3Lexer.URI, lexer.nextToken()); }
public Lexer[] getDelegates() { return new Lexer[] {}; }
public String getLexerErrorMessage(Token invalidToken) { if (tokenSource instanceof org.eclipse.xtext.parser.antlr.Lexer) { return ((org.eclipse.xtext.parser.antlr.Lexer) tokenSource).getErrorMessage(invalidToken); } return (invalidToken.getType() == Token.INVALID_TOKEN_TYPE) ? "Invalid token " + invalidToken.getText() : null; }
public static <T extends Lexer> LexerProvider<T> create(Class<T> clazz) { return new LexerProvider<T>(clazz); }
public MyLexerErrorReporter(Lexer lexer, LinkedList<RecognitionExceptionHolder> exceptions, String parsedLine) { super(exceptions, parsedLine); this.lexer = lexer; }
public void testRemUnit() throws Exception { String source = "10rad 20rem "; Lexer lexer = createLexer(source); assertANTLRToken(null ,Css3Lexer.ANGLE, lexer.nextToken()); assertANTLRToken(null ,Css3Lexer.WS, lexer.nextToken()); assertANTLRToken(null ,Css3Lexer.REM, lexer.nextToken()); assertANTLRToken(null ,Css3Lexer.WS, lexer.nextToken()); }