Java 类org.apache.lucene.analysis.CannedBinaryTokenStream 实例源码

项目:search    文件:TestTerms.java   
public void testTermMinMaxRandom() throws Exception {
  // NOTE: don't use @SuppressCodecs("Lucene3x") on the entire test
  // class, so that we still run the other test methods
  // with Lucene3x.  This is important, to have some
  // testing of Terms.getMin/Max on older indices.
  assumeFalse("test writes binary terms", Codec.getDefault() instanceof Lucene3xCodec);
  Directory dir = newDirectory();
  RandomIndexWriter w = new RandomIndexWriter(random(), dir);
  int numDocs = atLeast(100);
  BytesRef minTerm = null;
  BytesRef maxTerm = null;
  for(int i=0;i<numDocs;i++ ){
    Document doc = new Document();
    Field field = new TextField("field", "", Field.Store.NO);
    doc.add(field);
    //System.out.println("  doc " + i);
    CannedBinaryTokenStream.BinaryToken[] tokens = new CannedBinaryTokenStream.BinaryToken[atLeast(10)];
    for(int j=0;j<tokens.length;j++) {
      byte[] bytes = new byte[TestUtil.nextInt(random(), 1, 20)];
      random().nextBytes(bytes);
      BytesRef tokenBytes = new BytesRef(bytes);
      //System.out.println("    token " + tokenBytes);
      if (minTerm == null || tokenBytes.compareTo(minTerm) < 0) {
        //System.out.println("      ** new min");
        minTerm = tokenBytes;
      }
      if (maxTerm == null || tokenBytes.compareTo(maxTerm) > 0) {
        //System.out.println("      ** new max");
        maxTerm = tokenBytes;
      }
      tokens[j] = new CannedBinaryTokenStream.BinaryToken(tokenBytes);
    }
    field.setTokenStream(new CannedBinaryTokenStream(tokens));
    w.addDocument(doc);
  }

  IndexReader r = w.getReader();
  Terms terms = MultiFields.getTerms(r, "field");
  assertEquals(minTerm, terms.getMin());
  assertEquals(maxTerm, terms.getMax());

  r.close();
  w.close();
  dir.close();
}