Java 类com.intellij.util.indexing.IdDataConsumer 实例源码

项目:intellij-ce-playground    文件:PlainTextIndexer.java   
@Override
@NotNull
public Map<IdIndexEntry, Integer> map(@NotNull final FileContent inputData) {
  final IdDataConsumer consumer = new IdDataConsumer();
  final CharSequence chars = inputData.getContentAsText();
  IdTableBuilding.scanWords(new IdTableBuilding.ScanWordProcessor() {
    @Override
    public void run(final CharSequence chars11, @Nullable char[] charsArray, final int start, final int end) {
      if (charsArray != null) {
        consumer.addOccurrence(charsArray, start, end, (int)UsageSearchContext.IN_PLAIN_TEXT);
      }
      else {
        consumer.addOccurrence(chars11, start, end, (int)UsageSearchContext.IN_PLAIN_TEXT);
      }
    }
  }, chars, 0, chars.length());
  return consumer.getResult();
}
项目:tools-idea    文件:IdTableBuilding.java   
@Override
@NotNull
public Map<IdIndexEntry, Integer> map(final FileContent inputData) {
  final IdDataConsumer consumer = new IdDataConsumer();
  final CharSequence chars = inputData.getContentAsText();
  scanWords(new ScanWordProcessor() {
    @Override
    public void run(final CharSequence chars11, @Nullable char[] charsArray, final int start, final int end) {
      if (charsArray != null) {
        consumer.addOccurrence(charsArray, start, end, (int)UsageSearchContext.IN_PLAIN_TEXT);
      } else {
        consumer.addOccurrence(chars11, start, end, (int)UsageSearchContext.IN_PLAIN_TEXT);
      }
    }
  }, chars, 0, chars.length());
  return consumer.getResult();
}
项目:consulo    文件:PlainTextIndexer.java   
@Override
@Nonnull
public Map<IdIndexEntry, Integer> map(@Nonnull final FileContent inputData) {
  final IdDataConsumer consumer = new IdDataConsumer();
  final CharSequence chars = inputData.getContentAsText();
  IdTableBuilding.scanWords(new IdTableBuilding.ScanWordProcessor() {
    @Override
    public void run(final CharSequence chars11, @Nullable char[] charsArray, final int start, final int end) {
      if (charsArray != null) {
        consumer.addOccurrence(charsArray, start, end, (int)UsageSearchContext.IN_PLAIN_TEXT);
      }
      else {
        consumer.addOccurrence(chars11, start, end, (int)UsageSearchContext.IN_PLAIN_TEXT);
      }
    }
  }, chars, 0, chars.length());
  return consumer.getResult();
}
项目:intellij-ce-playground    文件:BaseFilterLexerUtil.java   
public static ScanContent scanContent(FileContent content, IdAndToDoScannerBasedOnFilterLexer indexer) {
  ScanContent data = content.getUserData(scanContentKey);
  if (data != null) {
    content.putUserData(scanContentKey, null);
    return data;
  }

  final boolean needTodo = content.getFile().isInLocalFileSystem(); // same as TodoIndex.getFilter().isAcceptable
  final boolean needIdIndex = IdTableBuilding.getFileTypeIndexer(content.getFileType()) instanceof LexerBasedIdIndexer;

  final IdDataConsumer consumer = needIdIndex? new IdDataConsumer():null;
  final OccurrenceConsumer todoOccurrenceConsumer = new OccurrenceConsumer(consumer, needTodo);
  final Lexer filterLexer = indexer.createLexer(todoOccurrenceConsumer);
  filterLexer.start(content.getContentAsText());

  while (filterLexer.getTokenType() != null) filterLexer.advance();

  Map<TodoIndexEntry,Integer> todoMap = null;
  if (needTodo) {
    for (IndexPattern indexPattern : IndexPatternUtil.getIndexPatterns()) {
        final int count = todoOccurrenceConsumer.getOccurrenceCount(indexPattern);
        if (count > 0) {
          if (todoMap == null) todoMap = new THashMap<TodoIndexEntry, Integer>();
          todoMap.put(new TodoIndexEntry(indexPattern.getPatternString(), indexPattern.isCaseSensitive()), count);
        }
      }
  }

  data = new ScanContent(
    consumer != null? consumer.getResult():Collections.<IdIndexEntry, Integer>emptyMap(),
    todoMap != null ? todoMap: Collections.<TodoIndexEntry,Integer>emptyMap()
  );
  if (needIdIndex && needTodo) content.putUserData(scanContentKey, data);
  return data;
}
项目:intellij-ce-playground    文件:IdTableBuilding.java   
@Override
@NotNull
public Map<IdIndexEntry, Integer> map(@NotNull final FileContent inputData) {
  final CharSequence chars = inputData.getContentAsText();
  final char[] charsArray = CharArrayUtil.fromSequenceWithoutCopying(chars);
  final IdDataConsumer consumer = new IdDataConsumer();
  myScanner.processWords(chars, new Processor<WordOccurrence>() {
    @Override
    public boolean process(final WordOccurrence t) {
      if (charsArray != null && t.getBaseText() == chars) {
        consumer.addOccurrence(charsArray, t.getStart(), t.getEnd(), convertToMask(t.getKind()));
      }
      else {
        consumer.addOccurrence(t.getBaseText(), t.getStart(), t.getEnd(), convertToMask(t.getKind()));
      }
      return true;
    }

    private int convertToMask(final WordOccurrence.Kind kind) {
      if (kind == null) {
        return UsageSearchContext.ANY;
      }
      if (kind == WordOccurrence.Kind.CODE) return UsageSearchContext.IN_CODE;
      if (kind == WordOccurrence.Kind.COMMENTS) return UsageSearchContext.IN_COMMENTS;
      if (kind == WordOccurrence.Kind.LITERALS) return UsageSearchContext.IN_STRINGS;
      if (kind == WordOccurrence.Kind.FOREIGN_LANGUAGE) return UsageSearchContext.IN_FOREIGN_LANGUAGES;
      return 0;
    }
  });
  return consumer.getResult();
}
项目:tools-idea    文件:BaseFilterLexerUtil.java   
public static ScanContent scanContent(FileContent content, IdAndToDoScannerBasedOnFilterLexer indexer) {
  ScanContent data = content.getUserData(scanContentKey);
  if (data != null) {
    content.putUserData(scanContentKey, null);
    return data;
  }

  final boolean needTodo = content.getFile().getFileSystem().getProtocol().equals(StandardFileSystems.FILE_PROTOCOL);
  final boolean needIdIndex = IdTableBuilding.getFileTypeIndexer(content.getFileType()) instanceof LexerBasedIdIndexer;

  final IdDataConsumer consumer = needIdIndex? new IdDataConsumer():null;
  final OccurrenceConsumer todoOccurrenceConsumer = new OccurrenceConsumer(consumer, needTodo);
  final Lexer filterLexer = indexer.createLexer(todoOccurrenceConsumer);
  filterLexer.start(content.getContentAsText());

  while (filterLexer.getTokenType() != null) filterLexer.advance();

  Map<TodoIndexEntry,Integer> todoMap = null;
  if (needTodo) {
    for (IndexPattern indexPattern : IndexPatternUtil.getIndexPatterns()) {
        final int count = todoOccurrenceConsumer.getOccurrenceCount(indexPattern);
        if (count > 0) {
          if (todoMap == null) todoMap = new THashMap<TodoIndexEntry, Integer>();
          todoMap.put(new TodoIndexEntry(indexPattern.getPatternString(), indexPattern.isCaseSensitive()), count);
        }
      }
  }

  data = new ScanContent(
    consumer != null? consumer.getResult():Collections.<IdIndexEntry, Integer>emptyMap(),
    todoMap != null ? todoMap: Collections.<TodoIndexEntry,Integer>emptyMap()
  );
  if (needIdIndex && needTodo) content.putUserData(scanContentKey, data);
  return data;
}
项目:tools-idea    文件:IdTableBuilding.java   
@Override
@NotNull
public Map<IdIndexEntry, Integer> map(final FileContent inputData) {
  final CharSequence chars = inputData.getContentAsText();
  final char[] charsArray = CharArrayUtil.fromSequenceWithoutCopying(chars);
  final IdDataConsumer consumer = new IdDataConsumer();
  myScanner.processWords(chars, new Processor<WordOccurrence>() {
    @Override
    public boolean process(final WordOccurrence t) {
      if (charsArray != null && t.getBaseText() == chars) {
        consumer.addOccurrence(charsArray, t.getStart(), t.getEnd(), convertToMask(t.getKind()));
      }
      else {
        consumer.addOccurrence(t.getBaseText(), t.getStart(), t.getEnd(), convertToMask(t.getKind()));
      }
      return true;
    }

    private int convertToMask(final WordOccurrence.Kind kind) {
      if (kind == null) return UsageSearchContext.ANY;
      if (kind == WordOccurrence.Kind.CODE) return UsageSearchContext.IN_CODE;
      if (kind == WordOccurrence.Kind.COMMENTS) return UsageSearchContext.IN_COMMENTS;
      if (kind == WordOccurrence.Kind.LITERALS) return UsageSearchContext.IN_STRINGS;
      if (kind == WordOccurrence.Kind.FOREIGN_LANGUAGE) return UsageSearchContext.IN_FOREIGN_LANGUAGES;
      return 0;
    }
  });
  return consumer.getResult();
}
项目:consulo    文件:IdTableBuilding.java   
@Override
@Nonnull
public Map<IdIndexEntry, Integer> map(final FileContent inputData) {
  final CharSequence chars = inputData.getContentAsText();
  final char[] charsArray = CharArrayUtil.fromSequenceWithoutCopying(chars);
  final IdDataConsumer consumer = new IdDataConsumer();
  myScanner.processWords(chars, new Processor<WordOccurrence>() {
    @Override
    public boolean process(final WordOccurrence t) {
      if (charsArray != null && t.getBaseText() == chars) {
        consumer.addOccurrence(charsArray, t.getStart(), t.getEnd(), convertToMask(t.getKind()));
      }
      else {
        consumer.addOccurrence(t.getBaseText(), t.getStart(), t.getEnd(), convertToMask(t.getKind()));
      }
      return true;
    }

    private int convertToMask(final WordOccurrence.Kind kind) {
      if (kind == null) return UsageSearchContext.ANY;
      if (kind == WordOccurrence.Kind.CODE) return UsageSearchContext.IN_CODE;
      if (kind == WordOccurrence.Kind.COMMENTS) return UsageSearchContext.IN_COMMENTS;
      if (kind == WordOccurrence.Kind.LITERALS) return UsageSearchContext.IN_STRINGS;
      if (kind == WordOccurrence.Kind.FOREIGN_LANGUAGE) return UsageSearchContext.IN_FOREIGN_LANGUAGES;
      return 0;
    }
  });
  return consumer.getResult();
}
项目:consulo    文件:BaseFilterLexerUtil.java   
public static ScanContent scanContent(FileContent content, IdAndToDoScannerBasedOnFilterLexer indexer) {
  ScanContent data = content.getUserData(scanContentKey);
  if (data != null) {
    content.putUserData(scanContentKey, null);
    return data;
  }

  final boolean needTodo = content.getFile().getFileSystem() instanceof LocalFileSystem;
  final boolean needIdIndex = IdTableBuilding.getFileTypeIndexer(content.getFileType()) instanceof LexerBasedIdIndexer;

  final IdDataConsumer consumer = needIdIndex? new IdDataConsumer():null;
  final OccurrenceConsumer todoOccurrenceConsumer = new OccurrenceConsumer(consumer, needTodo);
  final Lexer filterLexer = indexer.createLexer(todoOccurrenceConsumer);
  filterLexer.start(content.getContentAsText());

  while (filterLexer.getTokenType() != null) filterLexer.advance();

  Map<TodoIndexEntry,Integer> todoMap = null;
  if (needTodo) {
    for (IndexPattern indexPattern : IndexPatternUtil.getIndexPatterns()) {
        final int count = todoOccurrenceConsumer.getOccurrenceCount(indexPattern);
        if (count > 0) {
          if (todoMap == null) todoMap = new THashMap<TodoIndexEntry, Integer>();
          todoMap.put(new TodoIndexEntry(indexPattern.getPatternString(), indexPattern.isCaseSensitive()), count);
        }
      }
  }

  data = new ScanContent(
    consumer != null? consumer.getResult():Collections.<IdIndexEntry, Integer>emptyMap(),
    todoMap != null ? todoMap: Collections.<TodoIndexEntry,Integer>emptyMap()
  );
  if (needIdIndex && needTodo) content.putUserData(scanContentKey, data);
  return data;
}
项目:intellij-ce-playground    文件:OccurrenceConsumer.java   
public OccurrenceConsumer(final IdDataConsumer indexDataConsumer, boolean needToDo) {
  myIndexDataConsumer = indexDataConsumer;
  myNeedToDo = needToDo;
}
项目:tools-idea    文件:OccurrenceConsumer.java   
public OccurrenceConsumer(final IdDataConsumer indexDataConsumer, boolean needToDo) {
  myIndexDataConsumer = indexDataConsumer;
  myNeedToDo = needToDo;
}
项目:consulo    文件:OccurrenceConsumer.java   
public OccurrenceConsumer(final IdDataConsumer indexDataConsumer, boolean needToDo) {
  myIndexDataConsumer = indexDataConsumer;
  myNeedToDo = needToDo;
}