protected NumericTokenizer(NumericTokenStream numericTokenStream, char[] buffer, Object extra) throws IOException { super(delegatingAttributeFactory(numericTokenStream)); this.numericTokenStream = numericTokenStream; // Add attributes from the numeric token stream, this works fine because the attribute factory delegates to numericTokenStream for (Iterator<Class<? extends Attribute>> it = numericTokenStream.getAttributeClassesIterator(); it.hasNext();) { addAttribute(it.next()); } this.extra = extra; this.buffer = buffer; started = true; }
protected NumericTokenStream getCachedStream() { if (fieldType().numericPrecisionStep() == 4) { return tokenStream4.get(); } else if (fieldType().numericPrecisionStep() == 8) { return tokenStream8.get(); } else if (fieldType().numericPrecisionStep() == 16) { return tokenStream16.get(); } else if (fieldType().numericPrecisionStep() == Integer.MAX_VALUE) { return tokenStreamMax.get(); } return tokenStream.get(); }
public void testCreationWithBlackList() throws Exception { TokenFilterFactory factory = tokenFilterFactory("Type", "types", "stoptypes-1.txt, stoptypes-2.txt", "enablePositionIncrements", "true"); NumericTokenStream input = new NumericTokenStream(); input.setIntValue(123); factory.create(input); }
public void testCreationWithWhiteList() throws Exception { TokenFilterFactory factory = tokenFilterFactory("Type", "types", "stoptypes-1.txt, stoptypes-2.txt", "enablePositionIncrements", "true", "useWhitelist", "true"); NumericTokenStream input = new NumericTokenStream(); input.setIntValue(123); factory.create(input); }
private void assertNumericContents(int value, TokenStream ts) throws IOException { assertTrue(ts instanceof NumericTokenStream); NumericTermAttribute numericAtt = ts.getAttribute(NumericTermAttribute.class); ts.reset(); boolean seen = false; while (ts.incrementToken()) { if (numericAtt.getShift() == 0) { assertEquals(value, numericAtt.getRawValue()); seen = true; } } ts.end(); ts.close(); assertTrue(seen); }
@Test public void testCreationWithBlackList() throws Exception { TypeTokenFilterFactory typeTokenFilterFactory = new TypeTokenFilterFactory(); Map<String, String> args = new HashMap<String, String>(); args.put("types", "stoptypes-1.txt, stoptypes-2.txt"); args.put("enablePositionIncrements", "false"); typeTokenFilterFactory.setLuceneMatchVersion(TEST_VERSION_CURRENT); typeTokenFilterFactory.init(args); NumericTokenStream input = new NumericTokenStream(); input.setIntValue(123); typeTokenFilterFactory.create(input); }
@Test public void testCreationWithWhiteList() throws Exception { TypeTokenFilterFactory typeTokenFilterFactory = new TypeTokenFilterFactory(); Map<String, String> args = new HashMap<String, String>(); args.put("types", "stoptypes-1.txt, stoptypes-2.txt"); args.put("enablePositionIncrements", "false"); args.put("useWhitelist","true"); typeTokenFilterFactory.setLuceneMatchVersion(TEST_VERSION_CURRENT); typeTokenFilterFactory.init(args); NumericTokenStream input = new NumericTokenStream(); input.setIntValue(123); typeTokenFilterFactory.create(input); }
public TrieTokenizer(Reader input, TrieTypes type, NumericTokenStream ts) { // must share the attribute source with the NumericTokenStream we delegate to super(ts, input); this.type = type; this.ts = ts; // dates tend to be longer, especially when math is involved termAtt.resizeBuffer( type == TrieTypes.DATE ? 128 : 32 ); }
public NumericLongTokenizer(int precisionStep, char[] buffer) throws IOException { super(new NumericTokenStream(precisionStep), buffer, null); }
@Override protected void setValue(NumericTokenStream tokenStream, String value) { tokenStream.setLongValue(Long.parseLong(value)); }
public NumericDateTokenizer(int precisionStep, char[] buffer, DateTimeFormatter dateTimeFormatter) throws IOException { super(new NumericTokenStream(precisionStep), buffer, dateTimeFormatter); }
@Override protected void setValue(NumericTokenStream tokenStream, String value) { tokenStream.setLongValue(((DateTimeFormatter) extra).parseMillis(value)); }
public NumericIntegerTokenizer(int precisionStep, char[] buffer) throws IOException { super(new NumericTokenStream(precisionStep), buffer, null); }
@Override protected void setValue(NumericTokenStream tokenStream, String value) { tokenStream.setIntValue(Integer.parseInt(value)); }
public NumericDoubleTokenizer(int precisionStep, char[] buffer) throws IOException { super(new NumericTokenStream(precisionStep), buffer, null); }
@Override protected void setValue(NumericTokenStream tokenStream, String value) { tokenStream.setDoubleValue(Double.parseDouble(value)); }
public NumericFloatTokenizer(int precisionStep, char[] buffer) throws IOException { super(new NumericTokenStream(precisionStep), buffer, null); }
@Override protected void setValue(NumericTokenStream tokenStream, String value) { tokenStream.setFloatValue(Float.parseFloat(value)); }
public NumericIpTokenizer(int precisionStep, char[] buffer) throws IOException { super(new NumericTokenStream(precisionStep), buffer, null); }
@Override protected void setValue(NumericTokenStream tokenStream, String value) { tokenStream.setLongValue(ipToLong(value)); }
@Override protected NumericTokenStream initialValue() { return new NumericTokenStream(fieldType().numericPrecisionStep()); }
@Override protected NumericTokenStream initialValue() { return new NumericTokenStream(4); }
@Override protected NumericTokenStream initialValue() { return new NumericTokenStream(8); }
@Override protected NumericTokenStream initialValue() { return new NumericTokenStream(16); }
@Override protected NumericTokenStream initialValue() { return new NumericTokenStream(Integer.MAX_VALUE); }
static NumericTokenStream getNumericTokenStream(int precisionStep) { return new NumericTokenStream(precisionStep); }
protected abstract void setValue(NumericTokenStream tokenStream, String value);