/** * Converts zero-indexed, [closed, open) line ranges in the given source file to character ranges. */ public static RangeSet<Integer> lineRangesToCharRanges( String input, RangeSet<Integer> lineRanges) { List<Integer> lines = new ArrayList<>(); Iterators.addAll(lines, Newlines.lineOffsetIterator(input)); lines.add(input.length() + 1); final RangeSet<Integer> characterRanges = TreeRangeSet.create(); for (Range<Integer> lineRange : lineRanges.subRangeSet(Range.closedOpen(0, lines.size() - 1)).asRanges()) { int lineStart = lines.get(lineRange.lowerEndpoint()); // Exclude the trailing newline. This isn't strictly necessary, but handling blank lines // as empty ranges is convenient. int lineEnd = lines.get(lineRange.upperEndpoint()) - 1; Range<Integer> range = Range.closedOpen(lineStart, lineEnd); characterRanges.add(range); } return characterRanges; }
private static FrontendClientMetadata createFrontedClientMetadata(final long num) { final String index = String.valueOf(num); final String indexName = "test_" + index; final FrontendIdentifier frontendIdentifier = FrontendIdentifier.create(MemberName.forName(indexName), FrontendType.forName(index)); final ClientIdentifier clientIdentifier = ClientIdentifier.create(frontendIdentifier, num); final RangeSet<UnsignedLong> purgedHistories = TreeRangeSet.create(); purgedHistories.add(Range.closed(UnsignedLong.ZERO, UnsignedLong.ONE)); final Collection<FrontendHistoryMetadata> currentHistories = Collections.singleton( new FrontendHistoryMetadata(num, num, true, ImmutableMap.of(UnsignedLong.ZERO, Boolean.TRUE), purgedHistories)); return new FrontendClientMetadata(clientIdentifier, purgedHistories, currentHistories); }
@Override public void process(JCas jcas) throws AnalysisEngineProcessException { Collection<Concept> concepts = TypeUtil.getConcepts(jcas); List<QueryConcept> qconcepts = ConceptAbstractQueryGenerator .createQueryConceptsFromConceptMentions(jcas, concepts, useType, useWeight); // filter tokens that are covered by concept mentions RangeSet<Integer> cmentionRanges = TreeRangeSet.create(); concepts.stream().map(TypeUtil::getConceptMentions).flatMap(Collection::stream) .map(cmention -> Range.closedOpen(cmention.getBegin(), cmention.getEnd())) .forEach(cmentionRanges::add); // create an aquery using all tokens with POS in posTags set List<Token> tokens = TypeUtil.getOrderedTokens(jcas).stream().filter(token -> !cmentionRanges .encloses(Range.closedOpen(token.getBegin(), token.getEnd()))).collect(toList()); List<QueryConcept> qconceptTokens = TokenSelectionAbstractQueryGenerator .createQueryConceptsFromTokens(jcas, tokens, posTags, stoplist); qconceptTokens.addAll(qconcepts); AbstractQuery aquery = TypeFactory.createAbstractQuery(jcas, qconceptTokens); aquery.addToIndexes(); // create a backup aquery using only nouns List<QueryConcept> qconceptNouns = TokenSelectionAbstractQueryGenerator .createQueryConceptsFromTokens(jcas, tokens, nounTags, stoplist); qconceptNouns.addAll(qconcepts); AbstractQuery aqueryNoun = TypeFactory.createAbstractQuery(jcas, qconceptNouns); aqueryNoun.addToIndexes(); }
@Test public void testThatFilterAndTruncateViolatorsFiltersIPsInDoNotBlockRangeSet() { config.setRateLimitViolationBlacklistPeriodInMinutes(1); RangeSet<Integer> doNotAutoBlockIpRangeSet = TreeRangeSet.create(); SubnetUtils subnetUtils = new SubnetUtils("50.39.100.193/32"); subnetUtils.setInclusiveHostCount(true); SubnetUtils.SubnetInfo subnetInfo = subnetUtils.getInfo(); Integer lowIpAsInt = subnetInfo.asInteger(subnetInfo.getLowAddress()); Integer highIpAsInt = subnetInfo.asInteger(subnetInfo.getHighAddress()); doNotAutoBlockIpRangeSet.add(Range.closed(lowIpAsInt, highIpAsInt)); Map<String, ViolationMetaData> violators = new HashMap<>(); violators.put("50.39.100.193", new ViolationMetaData(new Date(), 2)); Map<String, ViolationMetaData> actual = processor.filterAndTruncateViolators(config, doNotAutoBlockIpRangeSet, violators); assertTrue("The violators map should be empty after filtering", actual.size() == 0); }
@Test public void testThatFilterAndTruncateViolatorsTruncatesTheLowestOffenders() { final int cidrLimit = 2; processor.setCidrLimitForIpSetOverride(cidrLimit); config.setRateLimitViolationBlacklistPeriodInMinutes(10); RangeSet<Integer> rangeSet = TreeRangeSet.create(); Map<String, ViolationMetaData> violators = new HashMap<>(); violators.put("50.39.100.193", new ViolationMetaData(new Date(), 3)); violators.put("50.39.100.191", new ViolationMetaData(new Date(), 1)); violators.put("50.39.100.192", new ViolationMetaData(new Date(), 2)); violators.put("50.39.100.194", new ViolationMetaData(new Date(), 4)); Map<String, ViolationMetaData> actual = processor.filterAndTruncateViolators(config, rangeSet, violators); assertTrue("The violators map should be the size of the cidr limit", actual.size() == cidrLimit); assertTrue("violators should contain 193 and 194 the highest offenders", actual.containsKey("50.39.100.193") && actual.containsKey("50.39.100.194")); }
/** * Add another protein sequence coverage object's ranges to this one's * * @param coverageToAdd */ public void addSequenceCoverageObject( ProteinSequenceCoverage coverageToAdd ) throws Exception { if( this.ranges == null ) this.ranges = TreeRangeSet.create(); if( this.getProtein().getProteinSequenceVersionId() != coverageToAdd.getProtein().getProteinSequenceVersionId() ) throw new ProxlWebappInternalErrorException( "Attempted to add two coverage objects that do not describe the same protein." ); if( coverageToAdd.getRanges() == null ) return; for( Range<Integer> r : coverageToAdd.getRanges() ) { this.ranges.add( r ); } }
/** * Generates a list of {@link Interval}s equivalent to a given * expression. Assumes that all the predicates in the input * reference a single column: the timestamp column. */ public static List<Interval> createInterval(RexNode e, String timeZone) { final List<Range<TimestampString>> ranges = extractRanges(e, TimeZone.getTimeZone(timeZone), false); if (ranges == null) { // We did not succeed, bail out return null; } final TreeRangeSet condensedRanges = TreeRangeSet.create(); for (Range r : ranges) { condensedRanges.add(r); } if (LOGGER.isDebugEnabled()) { LOGGER.debug("Inferred ranges on interval : " + condensedRanges); } return toInterval( ImmutableList.<Range>copyOf(condensedRanges.asRanges())); }
private RexNode compareFloorCeil(SqlKind comparison, RexNode operand, RexLiteral timeLiteral, TimeUnitRange timeUnit, boolean floor) { RangeSet<Calendar> rangeSet = operandRanges.get(operand.toString()); if (rangeSet == null) { rangeSet = ImmutableRangeSet.<Calendar>of().complement(); } final RangeSet<Calendar> s2 = TreeRangeSet.create(); final Calendar c = timestampValue(timeLiteral); final Range<Calendar> range = floor ? floorRange(timeUnit, comparison, c) : ceilRange(timeUnit, comparison, c); s2.add(range); // Intersect old range set with new. s2.removeAll(rangeSet.complement()); operandRanges.put(operand.toString(), ImmutableRangeSet.copyOf(s2)); if (range.isEmpty()) { return rexBuilder.makeLiteral(false); } return toRex(operand, range); }
public P2PBlobRangeSet constrainMaximumSpan(int maximumSpanSize) { int currentSize=0; RangeSet<Integer> constrainedRange=TreeRangeSet.create(); for(Range<Integer> r: ranges.asRanges()){ int sizeOfRange=r.upperEndpoint()-r.lowerEndpoint()+1; if(currentSize+sizeOfRange<=maximumSpanSize){ currentSize+=sizeOfRange; constrainedRange.add(r); } else{ sizeOfRange=maximumSpanSize-currentSize-1; constrainedRange.add(Range.closed(r.lowerEndpoint(), r.lowerEndpoint()+sizeOfRange)); break; } } return new P2PBlobRangeSet(constrainedRange); }
@Before public void setUp() throws Exception { ranges = new ArrayList<Range<Long>>(); ranges.add(Range.closedOpen(0L, 0L)); ranges.add(Range.closedOpen(0L, 1L)); ranges.add(Range.closedOpen(0L, 43L)); ranges.add(Range.closedOpen(1L, 43L)); rangeSet = TreeRangeSet.create(); rangeSet.add(Range.closedOpen(0L, 0L)); rangeSet.add(Range.closedOpen(0L, 1L)); rangeSet.add(Range.closedOpen(0L, 43L)); rangeSet.add(Range.closedOpen(1L, 43L)); subject = DNATools.createGappedDNASequence("aaaattttaaaattttaaaa", "subject"); query = DNATools.createGappedDNASequence("aaaaccccaaaa----aaaa", "query"); substitutionMatrix = SubstitutionMatrix.getNuc4_4(); alignmentPair = new AlignmentPair(query, subject, substitutionMatrix); }
private String context(final String originalDocText, final Response response) { // [1,3], [2,5], [8,10] => [1,5], [8,10] final List<CharOffsetSpan> charSpans = justificationSpans(response); final List<CharOffsetSpan> unitedSpans = Lists.newArrayList(); // use RangeSet to do this final RangeSet<Integer> disconnected = TreeRangeSet.create(); for (CharOffsetSpan charSpan : charSpans) { int startInclusive = charSpan.startInclusive(); int endInclusive = charSpan.endInclusive(); startInclusive = (startInclusive - 100) >= 0 ? startInclusive - 100 : 0; endInclusive = (endInclusive + 100) < originalDocText.length() ? endInclusive + 100 : endInclusive; disconnected.add(Range.closed(startInclusive, endInclusive)); } for (Range<Integer> range : disconnected.asRanges()) { unitedSpans.add(CharOffsetSpan.fromOffsetsOnly(range.lowerEndpoint(), range.upperEndpoint())); } Collections.sort(unitedSpans); String justificationsString = ""; if (unitedSpans.get(0).startInclusive() != 0) { justificationsString += "[.....]"; } for (CharOffsetSpan span : unitedSpans) { justificationsString += originalDocText.substring(span.startInclusive(), span.endInclusive() + 1); justificationsString += "[.....]"; } return justificationsString; }
/** * Runs the Google Java formatter on the given source, with only the given ranges specified. */ public List<Replacement> format( SnippetKind kind, String source, List<Range<Integer>> ranges, int initialIndent, boolean includeComments) throws FormatterException { RangeSet<Integer> rangeSet = TreeRangeSet.create(); for (Range<Integer> range : ranges) { rangeSet.add(range); } if (includeComments) { if (kind != SnippetKind.COMPILATION_UNIT) { throw new IllegalArgumentException( "comment formatting is only supported for compilation units"); } return formatter.getFormatReplacements(source, ranges); } SnippetWrapper wrapper = snippetWrapper(kind, source, initialIndent); ranges = offsetRanges(ranges, wrapper.offset); String replacement = formatter.formatSource(wrapper.contents.toString(), ranges); replacement = replacement.substring( wrapper.offset, replacement.length() - (wrapper.contents.length() - wrapper.offset - source.length())); List<Replacement> replacements = toReplacements(source, replacement); List<Replacement> filtered = new ArrayList<>(); for (Replacement r : replacements) { if (rangeSet.encloses(r.getReplaceRange())) { filtered.add(r); } } return filtered; }
public RangeSet<Integer> characterRangesToTokenRanges(Collection<Range<Integer>> characterRanges) throws FormatterException { RangeSet<Integer> tokenRangeSet = TreeRangeSet.create(); for (Range<Integer> characterRange0 : characterRanges) { Range<Integer> characterRange = characterRange0.canonical(DiscreteDomain.integers()); tokenRangeSet.add( characterRangeToTokenRange( characterRange.lowerEndpoint(), characterRange.upperEndpoint() - characterRange.lowerEndpoint())); } return tokenRangeSet; }
static EncodedDiscreteResources of(Set<DiscreteResource> resources, DiscreteResourceCodec codec) { RangeSet<Integer> rangeSet = TreeRangeSet.create(); resources.stream() .map(x -> x.valueAs(Object.class)) .flatMap(Tools::stream) .map(x -> codec.encode(x)) .map(Range::singleton) .map(x -> x.canonical(DiscreteDomain.integers())) .forEach(rangeSet::add); return new EncodedDiscreteResources(rangeSet, codec); }
@Override public EncodedDiscreteResources read(Kryo kryo, Input input, Class<EncodedDiscreteResources> cls) { @SuppressWarnings("unchecked") List<ClosedOpenRange> ranges = kryo.readObject(input, ArrayList.class); DiscreteResourceCodec codec = (DiscreteResourceCodec) kryo.readClassAndObject(input); RangeSet<Integer> rangeSet = TreeRangeSet.create(); ranges.stream() .map(x -> Range.closedOpen(x.lowerBound(), x.upperBound())) .forEach(rangeSet::add); return new EncodedDiscreteResources(rangeSet, codec); }
public static FrontendHistoryMetadata readFrom(final DataInput in) throws IOException { byte header = WritableObjects.readLongHeader(in); final long historyId = WritableObjects.readFirstLong(in, header); final long cookie = WritableObjects.readSecondLong(in, header); final boolean closed = in.readBoolean(); header = WritableObjects.readLongHeader(in); long ls = WritableObjects.readFirstLong(in, header); Verify.verify(ls >= 0 && ls <= Integer.MAX_VALUE); final int csize = (int) ls; ls = WritableObjects.readSecondLong(in, header); Verify.verify(ls >= 0 && ls <= Integer.MAX_VALUE); final int psize = (int) ls; final Map<UnsignedLong, Boolean> closedTransactions = new HashMap<>(csize); for (int i = 0; i < csize; ++i) { final UnsignedLong key = UnsignedLong.fromLongBits(WritableObjects.readLong(in)); final Boolean value = Boolean.valueOf(in.readBoolean()); closedTransactions.put(key, value); } final RangeSet<UnsignedLong> purgedTransactions = TreeRangeSet.create(); for (int i = 0; i < psize; ++i) { final byte h = WritableObjects.readLongHeader(in); final UnsignedLong l = UnsignedLong.fromLongBits(WritableObjects.readFirstLong(in, h)); final UnsignedLong u = UnsignedLong.fromLongBits(WritableObjects.readSecondLong(in, h)); purgedTransactions.add(Range.closed(l, u)); } return new FrontendHistoryMetadata(historyId, cookie, closed, closedTransactions, purgedTransactions); }
private RangeSet<Integer> parseMinute() { RangeSet<Integer> minutes = TreeRangeSet.create(); for (String component : getComponents(rawMinute)) { minutes.addAll(parseComponent(MINUTE, component)); } return ImmutableRangeSet.copyOf(minutes); }
private RangeSet<Integer> parseHour() { RangeSet<Integer> hours = TreeRangeSet.create(); for (String component : getComponents(rawHour)) { hours.addAll(parseComponent(HOUR, component)); } return ImmutableRangeSet.copyOf(hours); }
private RangeSet<Integer> parseDayOfWeek() { RangeSet<Integer> daysOfWeek = TreeRangeSet.create(); for (String component : getComponents(rawDayOfWeek)) { daysOfWeek.addAll(parseComponent(DAY_OF_WEEK, replaceNameAliases(component, DAY_NAMES))); } return ImmutableRangeSet.copyOf(daysOfWeek); }
private RangeSet<Integer> parseMonth() { RangeSet<Integer> months = TreeRangeSet.create(); for (String component : getComponents(rawMonth)) { months.addAll(parseComponent(MONTH, replaceNameAliases(component, MONTH_NAMES))); } return ImmutableRangeSet.copyOf(months); }
private RangeSet<Integer> parseDayOfMonth() { RangeSet<Integer> daysOfMonth = TreeRangeSet.create(); for (String component : getComponents(rawDayOfMonth)) { daysOfMonth.addAll(parseComponent(DAY_OF_MONTH, component)); } return ImmutableRangeSet.copyOf(daysOfMonth); }
public OopFinder(Hotspot hotspot) { this.hotspot = hotspot; this.oop = hotspot.getStructs().staticStruct(oopDesc.class); this.klass = hotspot.getStructs().staticStruct(Klass.class); this.largeObjects = TreeRangeSet.create(); this.heapWordSize = hotspot.getConstants().getHeapWordSize(); }
@Test public void testThatFilterAndTruncateViolatorsRemoveExpiredBlocks() { config.setRateLimitViolationBlacklistPeriodInMinutes(1); RangeSet<Integer> rangeSet = TreeRangeSet.create(); Map<String, ViolationMetaData> violators = new HashMap<>(); violators.put("50.39.100.194", new ViolationMetaData(new Date(new Date().getTime() - 120000), 2)); Map<String, ViolationMetaData> actual = processor.filterAndTruncateViolators(config, rangeSet, violators); assertTrue("The violators map should be empty after filtering", actual.isEmpty()); }
@Test public void testThatFilterAndTruncateViolatorsDoesNotRemoveNonExpiredBlocks() { config.setRateLimitViolationBlacklistPeriodInMinutes(1); RangeSet<Integer> rangeSet = TreeRangeSet.create(); Map<String, ViolationMetaData> violators = new HashMap<>(); violators.put("50.39.100.193", new ViolationMetaData(new Date(), 2)); Map<String, ViolationMetaData> actual = processor.filterAndTruncateViolators(config, rangeSet, violators); assertTrue("The violators map should still have one entry after filtering", actual.size() == 1); }
/** * Build the range set of intervals for the entries. * * @param entries Entries to build the intervals for * @param interval Interval to add to each of the entries * @param container Map into which to build the interval sets */ private static void buildRangeSet( List<String> entries, Interval interval, Map<String, RangeSet<DateTime>> container ) { entries.stream() .map(entry -> container.computeIfAbsent(entry, ignored -> TreeRangeSet.create())) .forEach(set -> set.add(Range.closedOpen(interval.getStart(), interval.getEnd()))); }
public static CodePointSet range(RangeRep... ranges) { TreeRangeSet<Integer> rangeSet = TreeRangeSet.create(); for ( RangeRep rangeRep : ranges ) { rangeSet.addAll( rangeRep.getRangeSet() ); } return new CodePointSet( null, RangeRep.of( rangeSet ) ); }
public static CodePointSet positiveFn(Set<RangeRep> ranges) { RangeSet<Integer> rangeSet = TreeRangeSet.create(); for ( RangeRep rangeRep : ranges ) { rangeSet.addAll( rangeRep.getRangeSet() ); } return CodePointSet.range( RangeRep.of( rangeSet ) ); }
@Test public void testAllFields() { assertEquals(ID, base.getId()); assertEquals(TEXT, base.getText()); assertEquals(TYPE, base.getType()); assertEquals(SPAN_1.lowerEndpoint().intValue(), base.beginPosition()); assertEquals(SPAN_2.upperEndpoint().intValue(), base.endPosition()); RangeSet<Integer> actual = TreeRangeSet.create(); actual.add(SPAN_1); actual.add(SPAN_2); assertEquals(actual, base.getSpans()); assertEquals(actual.span(), base.totalSpan()); }
/** * Add the supplied start and end coordinates as a sequence coverage range * @param start * @param end */ public void addStartEndBoundary( int start, int end ) { if( this.ranges == null ) this.ranges = TreeRangeSet.create(); Range<Integer> r = Range.closed( start, end ); this.ranges.add( r ); }
/** * Get the ranges of this protein's sequence that are covered by the * peptides that have been added * @return */ public Set<Range<Integer>> getRanges() { if( this.ranges == null ) this.ranges = TreeRangeSet.create(); return ranges.asRanges(); }
/** * Get the sequence coverage of this protein given the peptides that have * been added * @return */ public Double getSequenceCoverage() throws Exception { int totalResidues = 0; if( this.ranges == null ) this.ranges = TreeRangeSet.create(); for( Range<Integer> r : this.ranges.asRanges() ) { totalResidues += r.upperEndpoint() - r.lowerEndpoint() + 1; } return (double)totalResidues / (double)this.getProtein().getProteinSequenceObject().getSequence().length(); }
/** * Sets the dynamism levels. * @param levels At least one level must be given. The default level is * <code>.5</code>. * @return This, as per the builder pattern. */ public Builder setDynamismLevels(Iterable<Double> levels) { checkArgument(Iterables.size(levels) > 0); final RangeSet<Double> rangeSet = TreeRangeSet.create(); final Set<Range<Double>> dynamismLevelsB = new LinkedHashSet<>(); final RangeMap<Double, Double> map = TreeRangeMap.create(); for (final Double d : levels) { checkArgument(d >= 0d && d <= 1d); final Range<Double> newRange = createDynRange(d); checkArgument( rangeSet.subRangeSet(newRange).isEmpty(), "Can not add dynamism level %s, it is too close to another level.", d); rangeSet.add(newRange); dynamismLevelsB.add(newRange); map.put(newRange, d); } final SetMultimap<TimeSeriesType, Range<Double>> timeSeriesTypes = LinkedHashMultimap .<TimeSeriesType, Range<Double>>create(); for (final Range<Double> r : dynamismLevelsB) { checkArgument(DYNAMISM_MAP.get(r.lowerEndpoint()) != null); checkArgument(DYNAMISM_MAP.get(r.lowerEndpoint()) == DYNAMISM_MAP.get(r .upperEndpoint())); timeSeriesTypes.put(DYNAMISM_MAP.get(r.lowerEndpoint()), r); } dynamismLevels = ImmutableSetMultimap.copyOf(timeSeriesTypes); dynamismRangeMap = ImmutableRangeMap.copyOf(map); return this; }
@Test public void whenUsingRangeSet_thenCorrect() { RangeSet<Integer> rangeSet = TreeRangeSet.create(); rangeSet.add(Range.closed(1, 10)); rangeSet.add(Range.closed(12, 15)); assertEquals(2, rangeSet.asRanges().size()); rangeSet.add(Range.closed(10, 12)); assertTrue(rangeSet.encloses(Range.closed(1, 15))); assertEquals(1, rangeSet.asRanges().size()); }
/** Applies the replacements to the given source, and re-format any edited javadoc. */ private static String applyReplacements(String source, RangeMap<Integer, String> replacements) { // save non-empty fixed ranges for reformatting after fixes are applied RangeSet<Integer> fixedRanges = TreeRangeSet.create(); // Apply the fixes in increasing order, adjusting ranges to account for // earlier fixes that change the length of the source. The output ranges are // needed so we can reformat fixed regions, otherwise the fixes could just // be applied in descending order without adjusting offsets. StringBuilder sb = new StringBuilder(source); int offset = 0; for (Map.Entry<Range<Integer>, String> replacement : replacements.asMapOfRanges().entrySet()) { Range<Integer> range = replacement.getKey(); String replaceWith = replacement.getValue(); int start = offset + range.lowerEndpoint(); int end = offset + range.upperEndpoint(); sb.replace(start, end, replaceWith); if (!replaceWith.isEmpty()) { fixedRanges.add(Range.closedOpen(start, end)); } offset += replaceWith.length() - (range.upperEndpoint() - range.lowerEndpoint()); } String result = sb.toString(); // If there were any non-empty replaced ranges (e.g. javadoc), reformat the fixed regions. // We could avoid formatting twice in --fix-imports=also mode, but that is not the default // and removing imports won't usually affect javadoc. if (!fixedRanges.isEmpty()) { try { result = new Formatter().formatSource(result, fixedRanges.asRanges()); } catch (FormatterException e) { // javadoc reformatting is best-effort } } return result; }
/** Runs the Google Java formatter on the given source, with only the given ranges specified. */ public List<Replacement> format( SnippetKind kind, String source, List<Range<Integer>> ranges, int initialIndent, boolean includeComments) throws FormatterException { RangeSet<Integer> rangeSet = TreeRangeSet.create(); for (Range<Integer> range : ranges) { rangeSet.add(range); } if (includeComments) { if (kind != SnippetKind.COMPILATION_UNIT) { throw new IllegalArgumentException( "comment formatting is only supported for compilation units"); } return formatter.getFormatReplacements(source, ranges); } SnippetWrapper wrapper = snippetWrapper(kind, source, initialIndent); ranges = offsetRanges(ranges, wrapper.offset); String replacement = formatter.formatSource(wrapper.contents.toString(), ranges); replacement = replacement.substring( wrapper.offset, replacement.length() - (wrapper.contents.length() - wrapper.offset - source.length())); List<Replacement> replacements = toReplacements(source, replacement); List<Replacement> filtered = new ArrayList<>(); for (Replacement r : replacements) { if (rangeSet.encloses(r.getReplaceRange())) { filtered.add(r); } } return filtered; }
@SafeVarargs final Set<Range<Integer>> getCharRanges(String input, Range<Integer>... ranges) { RangeSet<Integer> rangeSet = TreeRangeSet.create(); for (Range<Integer> range : ranges) { rangeSet.add(range); } return Formatter.lineRangesToCharRanges(input, rangeSet).asRanges(); }
public RangeSet<Integer> getTouchedRanges() { if (touchedRanges == null) { touchedRanges = TreeRangeSet.create(); for (NodeCoverageModel nodeModel : getTouchedExpressionNodes()) { touchedRanges.add(Range.closed(nodeModel.getStart(), nodeModel.getEnd() + 1)); } // Now remove untouched ranges for (Range<Integer> untouchedRange : getUntouchedRanges().asRanges()) { touchedRanges.remove(untouchedRange); } } return touchedRanges; }
public RangeSet<Integer> getUntouchedRanges() { if (untouchedRanges == null) { untouchedRanges = TreeRangeSet.create(); for (NodeCoverageModel nodeModel : getUntouchedExpressionNodes()) { untouchedRanges.add(Range.closed(nodeModel.getStart(), nodeModel.getEnd() + 1)); } } return untouchedRanges; }