/** * First pass. Use the tokens that have been computed from the production parser's result and collect the follow * elements from those. */ private CustomInternalN4JSParser collectFollowElements(TokenSource tokens, boolean strict, Set<FollowElement> result) { CustomInternalN4JSParser parser = createParser(); parser.setStrict(strict); try { ObservableXtextTokenStream tokenStream = new ObservableXtextTokenStream(tokens, parser); result.addAll(doGetFollowElements(parser, tokenStream)); } catch (InfiniteRecursion infinite) { // this guards against erroneous infinite recovery loops in Antlr. // Grammar dependent and not expected something that is expected for N4JS // is used in the base class thus also used here. result.addAll(parser.getFollowElements()); } return parser; }
@Override protected TokenSource createTokenSource(String string) { List<Token> tokens = highlightingParser.getTokens(string); Iterator<Token> iter = tokens.iterator(); return new TokenSource() { @Override public Token nextToken() { if (iter.hasNext()) { return iter.next(); } return Token.EOF_TOKEN; } @Override public String getSourceName() { return "Text: " + string; } }; }
protected IParseResult doParse(String ruleName, CharStream in, NodeModelBuilder nodeModelBuilder, int initialLookAhead) { TokenSource tokenSource = createLexer(in); XtextTokenStream tokenStream = createTokenStream(tokenSource); tokenStream.initCurrentLookAhead(initialLookAhead); setInitialHiddenTokens(tokenStream); AbstractInternalAntlrParser parser = createParser(tokenStream); parser.setTokenTypeMap(getTokenDefProvider().getTokenDefMap()); parser.setSyntaxErrorProvider(getSyntaxErrorProvider()); parser.setNodeModelBuilder(nodeModelBuilder); parser.setSemanticModelBuilder(getElementFactory()); IUnorderedGroupHelper helper = getUnorderedGroupHelper().get(); parser.setUnorderedGroupHelper(helper); helper.initializeWith(parser); try { if(ruleName != null) return parser.parse(ruleName); return parser.parse(); } catch (Exception re) { throw new ParseException(re.getMessage(),re); } }
public Collection<FE> getFollowElements(String input, boolean strict) { TokenSource tokenSource = createTokenSource(input); InternalParser parser = createParser(); parser.setStrict(strict); ObservableXtextTokenStream tokens = new ObservableXtextTokenStream(tokenSource, parser); tokens.setInitialHiddenTokens(getInitialHiddenTokens()); parser.setTokenStream(tokens); IUnorderedGroupHelper helper = createUnorderedGroupHelper(); parser.setUnorderedGroupHelper(helper); helper.initializeWith(parser); tokens.setListener(parser); try { return Lists.newArrayList(getFollowElements(parser)); } catch (InfiniteRecursion infinite) { return Lists.newArrayList(parser.getFollowElements()); } }
public Collection<FollowElement> getFollowElements(String input, boolean strict) { CharStream stream = new ANTLRStringStream(input); TokenSource tokenSource = createLexer(stream); AbstractInternalContentAssistParser parser = createParser(); parser.setStrict(strict); ObservableXtextTokenStream tokens = new ObservableXtextTokenStream(tokenSource, parser); tokens.setInitialHiddenTokens(getInitialHiddenTokens()); parser.setTokenStream(tokens); IUnorderedGroupHelper helper = getUnorderedGroupHelper().get(); parser.setUnorderedGroupHelper(helper); helper.initializeWith(parser); tokens.setListener(parser); try { return Lists.newArrayList(getFollowElements(parser)); } catch(InfiniteRecursion infinite) { return Lists.newArrayList(parser.getFollowElements()); } }
/** * @since 2.4 */ @Override protected RepairEntryData getRepairEntryData(DocumentEvent e) throws Exception { int tokenStartsAt = 0; int tokenInfoIdx = 0; TokenSource source = createTokenSource(e.fDocument.get()); CommonToken token = (CommonToken) source.nextToken(); // find start idx while (true) { if (token == Token.EOF_TOKEN) { break; } if (tokenInfoIdx >= getInternalModifyableTokenInfos().size()) break; TokenInfo tokenInfo = getInternalModifyableTokenInfos().get(tokenInfoIdx); if (tokenInfo.getAntlrTokenType() != token.getType() || token.getStopIndex() - token.getStartIndex() + 1 != tokenInfo.getLength()) break; if (tokenStartsAt + tokenInfo.getLength() > e.fOffset) break; tokenStartsAt += tokenInfo.getLength(); tokenInfoIdx++; token = (CommonToken) source.nextToken(); } return new RepairEntryData(tokenStartsAt, tokenInfoIdx, token, source); }
protected void assertTokens(String value, TokenSource tokenSource, String escapedString) { if (tokenSource == null) return; Token token = tokenSource.nextToken(); if (!escapedString.equals(token.getText())) { throw createTokenContentMismatchException(value, escapedString, token); } String rule1 = getRuleName().toUpperCase(); String rule2 = getRuleName(token); // workaround: if ("NAME".equals(rule1) && "ID".equals(rule2)) { rule2 = "NAME"; } else if ("ID".equals(rule1) && "NAME".equals(rule2)) { rule1 = "NAME"; } // Ambiguity between NAME and ID rule if (!rule1.equals(rule2)) { throw createTokenTypeMismatchException(value, escapedString, token); } String reparsedValue = toValue(token.getText(), null); if (value != reparsedValue && !value.equals(reparsedValue)) { throw createTokenContentMismatchException(value, escapedString, token); } }
/** * @param node * the root node of the model to parse * @param startOffset * the start offset to consider * @param endOffset * the exclusive end offset * @param strict * if true the parser will not use error recovery on the very last token of the input. * @return a collection of follow elements. */ public Collection<FollowElement> getFollowElements(INode node, int startOffset, int endOffset, boolean strict) { Set<FollowElement> result = Sets.newLinkedHashSet(); TokenSource tokenSource = tokenSourceFactory.toTokenSource(node, startOffset, endOffset); CustomInternalN4JSParser parser = collectFollowElements(tokenSource, strict, result); adjustASIAndCollectFollowElements(parser, strict, result); /* * Lists are easier to debug */ return Lists.newArrayList(result); }
protected boolean isSameTokenSequence(TokenSource originalSource, TokenSource newSource, int expectedLength) { Token token = originalSource.nextToken(); int newLength = 0; while(Token.EOF != token.getType()) { Token newToken = newSource.nextToken(); if (token.getType() != newToken.getType()) { return false; } newLength += TokenTool.getLength(newToken); token = originalSource.nextToken(); } return newLength == expectedLength; }
public XtextTokenStream(TokenSource tokenSource, ITokenDefProvider tokenDefProvider) { super(tokenSource); tokens = new TokenList(500); rulenameToTokenType = new HashMap<String, Integer>(tokenDefProvider.getTokenDefMap().size()); for(Map.Entry<Integer, String> entry: tokenDefProvider.getTokenDefMap().entrySet()) { rulenameToTokenType.put(entry.getValue(), entry.getKey()); } }
protected void assertTokens(T value, TokenSource tokenSource, String escapedString) { if (tokenSource == null) return; Token token = tokenSource.nextToken(); if (!escapedString.equals(token.getText())) { throw createTokenContentMismatchException(value, escapedString, token); } if (!getRuleName().toUpperCase().equals(getRuleName(token))) { throw createTokenTypeMismatchException(value, escapedString, token); } T reparsedValue = toValue(token.getText(), null); if (value != reparsedValue && !value.equals(reparsedValue)) { throw createTokenContentMismatchException(value, escapedString, token); } }
protected TokenSource getTokenSource(String escapedValue) { Lexer result = getLexer(); if (result == null) return null; result.setCharStream(new ANTLRStringStream(escapedValue)); return result; }
@Override public TokenSource createTokenSource(final CharStream stream) { if ((this.parser instanceof AbstractAntlrParser)) { return ((AbstractAntlrParser)this.parser).createLexer(stream); } StringConcatenation _builder = new StringConcatenation(); String _name = this.parser.getClass().getName(); _builder.append(_name); _builder.append(" should be a subclass of "); String _name_1 = AbstractAntlrParser.class.getName(); _builder.append(_name_1); throw new IllegalStateException(_builder.toString()); }
protected boolean equalTokenSequence(TokenSource first, TokenSource second) { Token token = null; while (!(token = first.nextToken()).equals(Token.EOF_TOKEN)) { Token otherToken = second.nextToken(); if (otherToken.equals(Token.EOF_TOKEN)) { return false; } if (!token.getText().equals(otherToken.getText())) { return false; } } return true; }
protected boolean equalTokenSequence(TokenSource first, TokenSource second) { Token token = null; while(!(token = first.nextToken()).equals(Token.EOF_TOKEN)) { Token otherToken = second.nextToken(); if (otherToken.equals(Token.EOF_TOKEN)) { return false; } if (!token.getText().equals(otherToken.getText())) { return false; } } return true; }
public RepairEntryData(int offset, int index, CommonToken newToken, TokenSource lexer) { super(); this.offset = offset; this.index = index; this.newToken = newToken; this.tokenSource = lexer; }
protected List<TokenInfo> createTokenInfos(String string) { List<TokenInfo> result = Lists.newArrayListWithExpectedSize(string.length() / 3); TokenSource source = createTokenSource(string); CommonToken token = (CommonToken) source.nextToken(); while (token != Token.EOF_TOKEN) { TokenInfo info = createTokenInfo(token); result.add(info); token = (CommonToken) source.nextToken(); } return result; }
/** * @since 2.4 */ protected RepairEntryData getRepairEntryData(DocumentEvent e) throws Exception { int tokenStartsAt = 0; int tokenInfoIdx = 0; for(tokenInfoIdx = 0; tokenInfoIdx< getInternalModifyableTokenInfos().size(); ++tokenInfoIdx) { TokenInfo oldToken = getInternalModifyableTokenInfos().get(tokenInfoIdx); if(tokenStartsAt <= e.getOffset() && tokenStartsAt + oldToken.getLength() >= e.getOffset()) break; tokenStartsAt += oldToken.getLength(); } final TokenSource delegate = createTokenSource(e.fDocument.get(tokenStartsAt, e.fDocument.getLength() - tokenStartsAt)); final int offset = tokenStartsAt; TokenSource source = new TokenSource() { public Token nextToken() { CommonToken commonToken = (CommonToken) delegate.nextToken(); commonToken.setText(commonToken.getText()); commonToken.setStartIndex(commonToken.getStartIndex()+offset); commonToken.setStopIndex(commonToken.getStopIndex()+offset); return commonToken; } public String getSourceName() { return delegate.getSourceName(); } }; final CommonToken token = (CommonToken) source.nextToken(); return new RepairEntryData(offset, tokenInfoIdx, token, source); }
public Object execParser( String ruleName, String input, int scriptLine) throws Exception { ANTLRStringStream is = new ANTLRStringStream(input); Class<? extends TokenSource> lexerClass = Class.forName(lexerClassName).asSubclass(TokenSource.class); Constructor<? extends TokenSource> lexConstructor = lexerClass.getConstructor(CharStream.class); TokenSource lexer = lexConstructor.newInstance(is); is.setLine(scriptLine); CommonTokenStream tokens = new CommonTokenStream(lexer); Class<? extends Parser> parserClass = Class.forName(parserClassName).asSubclass(Parser.class); Constructor<? extends Parser> parConstructor = parserClass.getConstructor(TokenStream.class); Parser parser = parConstructor.newInstance(tokens); // set up customized tree adaptor if necessary if ( adaptorClassName!=null ) { Method m = parserClass.getMethod("setTreeAdaptor", TreeAdaptor.class); Class<? extends TreeAdaptor> adaptorClass = Class.forName(adaptorClassName).asSubclass(TreeAdaptor.class); m.invoke(parser, adaptorClass.newInstance()); } Method ruleMethod = parserClass.getMethod(ruleName); // INVOKE RULE return ruleMethod.invoke(parser); }
static void printTokenStream(CommonTokenStream tokens, Map tokenToName) { TokenSource source = tokens.getTokenSource(); Token curr; do { curr = source.nextToken(); System.out.println(curr.getText() + " [" + tokenToName.get(String.valueOf(curr.getType())) + "]"); } while (-1 != curr.getType() ); // eof }
/** * Create a new lexer for the given input. */ protected TokenSource createLexer(CharStream stream) { Lexer lexer = lexerProvider.get(); lexer.setCharStream(stream); return lexer; }
/** * Create a new stream with the given source. */ public LazyTokenStream(TokenSource tokenSource, ITokenDefProvider tokenDefProvider) { super(tokenSource, tokenDefProvider); tokens = new JSTokenList(); }
/** * Creates a custom {@link XtextTokenStream} which does not fill its buffer eager but pauses on occurrences of the * {@code '/'}. */ @Override protected LazyTokenStream createTokenStream(TokenSource tokenSource) { return new LazyTokenStream(tokenSource, getTokenDefProvider()); }
public static byte[] compile(List<String> smaliTexts, int apiLevel) throws RecognitionException, IOException { DexBuilder dexBuilder = new DexBuilder(Opcodes.forApi(apiLevel)); for (String smaliText : smaliTexts) { Reader reader = new StringReader(smaliText); LexerErrorInterface lexer = new smaliFlexLexer(reader); CommonTokenStream tokens = new CommonTokenStream((TokenSource) lexer); smaliParser parser = new smaliParser(tokens); parser.setVerboseErrors(true); parser.setAllowOdex(false); parser.setApiLevel(apiLevel); smaliParser.smali_file_return result = parser.smali_file(); if (parser.getNumberOfSyntaxErrors() > 0 || lexer.getNumberOfSyntaxErrors() > 0) { throw new RuntimeException( "Error occured while compiling text:\n" + StringUtils.join(smaliTexts, "\n")); } CommonTree t = result.getTree(); CommonTreeNodeStream treeStream = new CommonTreeNodeStream(t); treeStream.setTokenStream(tokens); smaliTreeWalker dexGen = new smaliTreeWalker(treeStream); dexGen.setApiLevel(apiLevel); dexGen.setVerboseErrors(true); dexGen.setDexBuilder(dexBuilder); dexGen.smali_file(); if (dexGen.getNumberOfSyntaxErrors() > 0) { throw new RuntimeException("Error occured while compiling text"); } } MemoryDataStore dataStore = new MemoryDataStore(); dexBuilder.writeTo(dataStore); // TODO(sgjesse): This returns the full backingstore from MemoryDataStore, which by default // is 1024k bytes. Our dex file reader does not complain though. return dataStore.getData(); }
protected TokenSource createLexer(CharStream stream) { Lexer lexer = lexerProvider.get(); lexer.setCharStream(stream); return lexer; }
protected XtextTokenStream createTokenStream(TokenSource tokenSource) { return new XtextTokenStream(tokenSource, getTokenDefProvider()); }
public XtextTokenStream(TokenSource tokenSource, int channel) { super(tokenSource, channel); tokens = new TokenList(500); }
protected AbstractIndentationTokenSource(TokenSource delegate) { setDelegate(delegate); }
public void setDelegate(TokenSource delegate) { this.delegate = delegate; }
public TokenSource getDelegate() { return delegate; }
/** * @since 2.7 */ protected void assertTokens(T value, String result) { TokenSource tokenSource = getTokenSource(result); assertTokens(value, tokenSource, result); }
@Override public TokenSource createTokenSource(final CharSequence text) { return this.createTokenSource(this.getReader(text)); }
@Override public TokenSource createTokenSource(final Reader reader) { return this.createTokenSource(this.getCharStream(reader)); }
public IndentationAwareUiTestLanguageTokenSource(TokenSource delegate) { super(delegate); }
@Override protected TokenSource createLexer(CharStream stream) { return new IndentationAwareUiTestLanguageTokenSource(super.createLexer(stream)); }
@Override protected TokenSource createLexer(CharStream stream) { return new IndentationAwareTestLanguageTokenSource(super.createLexer(stream)); }
public IndentationAwareTestLanguageTokenSource(TokenSource delegate) { super(delegate); }
@Override protected TokenSource createLexer(CharStream stream) { IndentTokenSource tokenSource = new IndentTokenSource(); tokenSource.setDelegate(super.createLexer(stream)); return tokenSource; }