@Override public ATNState getReachableTarget(ATNConfig source, Transition trans, int ttype) { if (trans instanceof RuleTransition) { IntervalSet suppressed = getSuppressedSet(startIndex); if (suppressed.contains(((RuleTransition)trans).ruleIndex)) { return null; } } return super.getReachableTarget(source, trans, ttype); }
/** Get the set of all alts mentioned by all ATN configurations in this * DFA state. */ public Set<Integer> getAltSet() { Set<Integer> alts = new HashSet<Integer>(); if ( configs!=null ) { for (ATNConfig c : configs) { alts.add(c.alt); } } if ( alts.isEmpty() ) return null; return alts; }
/** * Computes the set of conflicting or ambiguous alternatives from a * configuration set, if that information was not already provided by the * parser. * * @param reportedAlts The set of conflicting or ambiguous alternatives, as * reported by the parser. * @param configs The conflicting or ambiguous configuration set. * @return Returns {@code reportedAlts} if it is not {@code null}, otherwise * returns the set of alternatives represented in {@code configs}. */ @NotNull protected BitSet getConflictingAlts(@Nullable BitSet reportedAlts, @NotNull ATNConfigSet configs) { if (reportedAlts != null) { return reportedAlts; } BitSet result = new BitSet(); for (ATNConfig config : configs) { result.set(config.alt); } return result; }
@RuleDependencies({ @RuleDependency(recognizer=GrammarParser.class, rule=GrammarParser.RULE_lexerCommandName, version=0, dependents=Dependents.SELF), @RuleDependency(recognizer=GrammarParser.class, rule=GrammarParser.RULE_id, version=6, dependents=Dependents.PARENTS), }) private void analyzeKeywords(Map<RuleContext, CaretReachedException> parseTrees, Map<String, CompletionItem> intermediateResults) { boolean maybeLexerCommand = false; IntervalSet remainingKeywords = new IntervalSet(KeywordCompletionItem.KEYWORD_TYPES); for (Map.Entry<RuleContext, CaretReachedException> entry : parseTrees.entrySet()) { CaretReachedException caretReachedException = entry.getValue(); if (caretReachedException == null || caretReachedException.getTransitions() == null) { continue; } RuleContext finalContext = caretReachedException.getFinalContext(); if (finalContext.getRuleIndex() == GrammarParser.RULE_id) { RuleContext parent = finalContext.getParent(); if (parent != null && parent.getRuleIndex() == GrammarParser.RULE_lexerCommandName) { maybeLexerCommand = true; } continue; } Map<ATNConfig, List<Transition>> transitions = caretReachedException.getTransitions(); for (List<Transition> transitionList : transitions.values()) { for (Transition transition : transitionList) { if (transition.isEpsilon() || transition instanceof WildcardTransition || transition instanceof NotSetTransition) { continue; } IntervalSet label = transition.label(); if (label == null) { continue; } for (int keyword : remainingKeywords.toArray()) { if (label.contains(keyword)) { remainingKeywords.remove(keyword); KeywordCompletionItem item = KeywordCompletionItem.KEYWORD_ITEMS.get(keyword); intermediateResults.put(item.getInsertPrefix().toString(), item); } } } } } if (maybeLexerCommand) { addLexerCommands(intermediateResults); } }
public Map<ATNConfig, List<Transition>> getCaretTransitions() { return caretTransitions; }
@Override public ATNState getReachableTarget(ATNConfig source, Transition trans, int ttype) { if (ttype == CaretToken.CARET_TOKEN_TYPE) { ATNState target = null; if (trans instanceof AtomTransition) { AtomTransition at = (AtomTransition)trans; if (getWordlikeTokenTypes().contains(at.label)) { target = at.target; } } else if (trans instanceof SetTransition) { SetTransition st = (SetTransition)trans; boolean not = trans instanceof NotSetTransition; // TODO: this could probably be done with an intersects method? for (int t : getWordlikeTokenTypes().toArray()) { if (!not && st.set.contains(t) || not && !st.set.contains(t)) { target = st.target; break; } } } else if (trans instanceof RangeTransition) { RangeTransition rt = (RangeTransition)trans; // TODO: there must be a better algorithm here :) int[] wordlikeTokenTypes = getWordlikeTokenTypes().toArray(); int lb = Arrays.binarySearch(wordlikeTokenTypes, rt.from); int ub = Arrays.binarySearch(wordlikeTokenTypes, rt.to); if (lb >= 0 || ub >= 0 || lb != ub) { target = rt.target; } } else if (trans instanceof WildcardTransition) { target = trans.target; } if (caretTransitions == null) { caretTransitions = new LinkedHashMap<>(); } List<Transition> configTransitions = caretTransitions.get(source); if (configTransitions == null) { configTransitions = new ArrayList<>(); caretTransitions.put(source, configTransitions); } configTransitions.add(trans); return target; } return super.getReachableTarget(source, trans, ttype); }
public CaretReachedException(RuleContext finalContext, CaretToken caretToken, Map<ATNConfig, List<Transition>> transitions, RecognitionException cause) { super(cause); this.finalContext = finalContext; this.caretToken = caretToken; this.transitions = transitions; }
public Map<ATNConfig, List<Transition>> getTransitions() { return transitions; }