Java 类org.apache.lucene.analysis.NumericTokenStream 实例源码

项目:Elasticsearch    文件:NumericTokenizer.java   
protected NumericTokenizer(NumericTokenStream numericTokenStream, char[] buffer, Object extra) throws IOException {
    super(delegatingAttributeFactory(numericTokenStream));
    this.numericTokenStream = numericTokenStream;
    // Add attributes from the numeric token stream, this works fine because the attribute factory delegates to numericTokenStream
    for (Iterator<Class<? extends Attribute>> it = numericTokenStream.getAttributeClassesIterator(); it.hasNext();) {
        addAttribute(it.next());
    }
    this.extra = extra;
    this.buffer = buffer;
    started = true;
}
项目:Elasticsearch    文件:NumberFieldMapper.java   
protected NumericTokenStream getCachedStream() {
    if (fieldType().numericPrecisionStep() == 4) {
        return tokenStream4.get();
    } else if (fieldType().numericPrecisionStep() == 8) {
        return tokenStream8.get();
    } else if (fieldType().numericPrecisionStep() == 16) {
        return tokenStream16.get();
    } else if (fieldType().numericPrecisionStep() == Integer.MAX_VALUE) {
        return tokenStreamMax.get();
    }
    return tokenStream.get();
}
项目:search    文件:TestTypeTokenFilterFactory.java   
public void testCreationWithBlackList() throws Exception {
  TokenFilterFactory factory = tokenFilterFactory("Type",
      "types", "stoptypes-1.txt, stoptypes-2.txt",
      "enablePositionIncrements", "true");
  NumericTokenStream input = new NumericTokenStream();
  input.setIntValue(123);
  factory.create(input);
}
项目:search    文件:TestTypeTokenFilterFactory.java   
public void testCreationWithWhiteList() throws Exception {
  TokenFilterFactory factory = tokenFilterFactory("Type",
      "types", "stoptypes-1.txt, stoptypes-2.txt",
      "enablePositionIncrements", "true",
      "useWhitelist", "true");
  NumericTokenStream input = new NumericTokenStream();
  input.setIntValue(123);
  factory.create(input);
}
项目:search    文件:TestFieldReuse.java   
private void assertNumericContents(int value, TokenStream ts) throws IOException {
  assertTrue(ts instanceof NumericTokenStream);
  NumericTermAttribute numericAtt = ts.getAttribute(NumericTermAttribute.class);
  ts.reset();
  boolean seen = false;
  while (ts.incrementToken()) {
    if (numericAtt.getShift() == 0) {
      assertEquals(value, numericAtt.getRawValue());
      seen = true;
    }
  }
  ts.end();
  ts.close();
  assertTrue(seen);
}
项目:NYBC    文件:TestTypeTokenFilterFactory.java   
@Test
public void testCreationWithBlackList() throws Exception {
  TypeTokenFilterFactory typeTokenFilterFactory = new TypeTokenFilterFactory();
  Map<String, String> args = new HashMap<String, String>();
  args.put("types", "stoptypes-1.txt, stoptypes-2.txt");
  args.put("enablePositionIncrements", "false");
  typeTokenFilterFactory.setLuceneMatchVersion(TEST_VERSION_CURRENT);
  typeTokenFilterFactory.init(args);
  NumericTokenStream input = new NumericTokenStream();
  input.setIntValue(123);
  typeTokenFilterFactory.create(input);
}
项目:NYBC    文件:TestTypeTokenFilterFactory.java   
@Test
public void testCreationWithWhiteList() throws Exception {
  TypeTokenFilterFactory typeTokenFilterFactory = new TypeTokenFilterFactory();
  Map<String, String> args = new HashMap<String, String>();
  args.put("types", "stoptypes-1.txt, stoptypes-2.txt");
  args.put("enablePositionIncrements", "false");
  args.put("useWhitelist","true");
  typeTokenFilterFactory.setLuceneMatchVersion(TEST_VERSION_CURRENT);
  typeTokenFilterFactory.init(args);
  NumericTokenStream input = new NumericTokenStream();
  input.setIntValue(123);
  typeTokenFilterFactory.create(input);
}
项目:NYBC    文件:TrieTokenizerFactory.java   
public TrieTokenizer(Reader input, TrieTypes type, NumericTokenStream ts) {
  // must share the attribute source with the NumericTokenStream we delegate to
  super(ts, input);
  this.type = type;
  this.ts = ts;
  // dates tend to be longer, especially when math is involved
  termAtt.resizeBuffer( type == TrieTypes.DATE ? 128 : 32 );
}
项目:search-core    文件:TrieTokenizerFactory.java   
public TrieTokenizer(Reader input, TrieTypes type, NumericTokenStream ts) {
  // must share the attribute source with the NumericTokenStream we delegate to
  super(ts, input);
  this.type = type;
  this.ts = ts;
  // dates tend to be longer, especially when math is involved
  termAtt.resizeBuffer( type == TrieTypes.DATE ? 128 : 32 );
}
项目:Maskana-Gestor-de-Conocimiento    文件:TestTypeTokenFilterFactory.java   
public void testCreationWithBlackList() throws Exception {
  TokenFilterFactory factory = tokenFilterFactory("Type",
      "types", "stoptypes-1.txt, stoptypes-2.txt",
      "enablePositionIncrements", "true");
  NumericTokenStream input = new NumericTokenStream();
  input.setIntValue(123);
  factory.create(input);
}
项目:Maskana-Gestor-de-Conocimiento    文件:TestTypeTokenFilterFactory.java   
public void testCreationWithWhiteList() throws Exception {
  TokenFilterFactory factory = tokenFilterFactory("Type",
      "types", "stoptypes-1.txt, stoptypes-2.txt",
      "enablePositionIncrements", "true",
      "useWhitelist", "true");
  NumericTokenStream input = new NumericTokenStream();
  input.setIntValue(123);
  factory.create(input);
}
项目:Elasticsearch    文件:NumericLongTokenizer.java   
public NumericLongTokenizer(int precisionStep, char[] buffer) throws IOException {
    super(new NumericTokenStream(precisionStep), buffer, null);
}
项目:Elasticsearch    文件:NumericLongTokenizer.java   
@Override
protected void setValue(NumericTokenStream tokenStream, String value) {
    tokenStream.setLongValue(Long.parseLong(value));
}
项目:Elasticsearch    文件:NumericDateTokenizer.java   
public NumericDateTokenizer(int precisionStep, char[] buffer, DateTimeFormatter dateTimeFormatter) throws IOException {
    super(new NumericTokenStream(precisionStep), buffer, dateTimeFormatter);
}
项目:Elasticsearch    文件:NumericDateTokenizer.java   
@Override
protected void setValue(NumericTokenStream tokenStream, String value) {
    tokenStream.setLongValue(((DateTimeFormatter) extra).parseMillis(value));
}
项目:Elasticsearch    文件:NumericIntegerTokenizer.java   
public NumericIntegerTokenizer(int precisionStep, char[] buffer) throws IOException {
    super(new NumericTokenStream(precisionStep), buffer, null);
}
项目:Elasticsearch    文件:NumericIntegerTokenizer.java   
@Override
protected void setValue(NumericTokenStream tokenStream, String value) {
    tokenStream.setIntValue(Integer.parseInt(value));
}
项目:Elasticsearch    文件:NumericDoubleTokenizer.java   
public NumericDoubleTokenizer(int precisionStep, char[] buffer) throws IOException {
    super(new NumericTokenStream(precisionStep), buffer, null);
}
项目:Elasticsearch    文件:NumericDoubleTokenizer.java   
@Override
protected void setValue(NumericTokenStream tokenStream, String value) {
    tokenStream.setDoubleValue(Double.parseDouble(value));
}
项目:Elasticsearch    文件:NumericFloatTokenizer.java   
public NumericFloatTokenizer(int precisionStep, char[] buffer) throws IOException {
    super(new NumericTokenStream(precisionStep), buffer, null);
}
项目:Elasticsearch    文件:NumericFloatTokenizer.java   
@Override
protected void setValue(NumericTokenStream tokenStream, String value) {
    tokenStream.setFloatValue(Float.parseFloat(value));
}
项目:Elasticsearch    文件:IpFieldMapper.java   
public NumericIpTokenizer(int precisionStep, char[] buffer) throws IOException {
    super(new NumericTokenStream(precisionStep), buffer, null);
}
项目:Elasticsearch    文件:IpFieldMapper.java   
@Override
protected void setValue(NumericTokenStream tokenStream, String value) {
    tokenStream.setLongValue(ipToLong(value));
}
项目:Elasticsearch    文件:NumberFieldMapper.java   
@Override
protected NumericTokenStream initialValue() {
    return new NumericTokenStream(fieldType().numericPrecisionStep());
}
项目:Elasticsearch    文件:NumberFieldMapper.java   
@Override
protected NumericTokenStream initialValue() {
    return new NumericTokenStream(4);
}
项目:Elasticsearch    文件:NumberFieldMapper.java   
@Override
protected NumericTokenStream initialValue() {
    return new NumericTokenStream(8);
}
项目:Elasticsearch    文件:NumberFieldMapper.java   
@Override
protected NumericTokenStream initialValue() {
    return new NumericTokenStream(16);
}
项目:Elasticsearch    文件:NumberFieldMapper.java   
@Override
protected NumericTokenStream initialValue() {
    return new NumericTokenStream(Integer.MAX_VALUE);
}
项目:NYBC    文件:TrieTokenizerFactory.java   
static NumericTokenStream getNumericTokenStream(int precisionStep) {
  return new NumericTokenStream(precisionStep);
}
项目:search-core    文件:TrieTokenizerFactory.java   
static NumericTokenStream getNumericTokenStream(int precisionStep) {
  return new NumericTokenStream(precisionStep);
}
项目:Elasticsearch    文件:NumericTokenizer.java   
protected abstract void setValue(NumericTokenStream tokenStream, String value);