/** * De-serialization for compound Bloom filter metadata. Must be consistent * with what {@link CompoundBloomFilterWriter} does. * * @param meta serialized Bloom filter metadata without any magic blocks * @throws IOException */ public CompoundBloomFilter(DataInput meta, HFile.Reader reader) throws IOException { this.reader = reader; totalByteSize = meta.readLong(); hashCount = meta.readInt(); hashType = meta.readInt(); totalKeyCount = meta.readLong(); totalMaxKeys = meta.readLong(); numChunks = meta.readInt(); comparator = FixedFileTrailer.createComparator( Bytes.toString(Bytes.readByteArray(meta))); hash = Hash.getInstance(hashType); if (hash == null) { throw new IllegalArgumentException("Invalid hash type: " + hashType); } index = new HFileBlockIndex.BlockIndexReader(comparator, 1); index.readRootIndex(meta, numChunks); }
@Override protected void finishInit(final Configuration conf) { if (null != fsBlockWriter) throw new IllegalStateException("finishInit called twice"); fsBlockWriter = new PFileBlockWriter(blockEncoder, hFileContext); boolean cacheIndexesOnWrite = cacheConf.shouldCacheIndexesOnWrite(); dataBlockIndexWriter = new HFileBlockIndex.BlockIndexWriter(fsBlockWriter, cacheIndexesOnWrite ? cacheConf.getBlockCache(): null, cacheIndexesOnWrite ? name : null); dataBlockIndexWriter.setMaxChunkSize( HFileBlockIndex.getMaxChunkSize(conf)); inlineBlockWriters.add(dataBlockIndexWriter); // Meta data block index writer metaBlockIndexWriter = new HFileBlockIndex.BlockIndexWriter(); if (LOG.isTraceEnabled()) LOG.trace("Initialized with " + cacheConf); }
private static int addStoreFileCutpoints(List<byte[]> cutpoints, HFile.Reader fileReader, long storeFileInBytes, int carry, Pair<byte[], byte[]> range, int splitBlockSize) throws IOException { HFileBlockIndex.BlockIndexReader indexReader = fileReader.getDataBlockIndexReader(); int size = indexReader.getRootBlockCount(); int levels = fileReader.getTrailer().getNumDataIndexLevels(); if (levels == 1) { int incrementalSize = (int) (size > 0 ? storeFileInBytes / (float) size : storeFileInBytes); int sizeCounter = 0; for (int i = 0; i < size; ++i) { if (sizeCounter >= splitBlockSize) { sizeCounter = 0; KeyValue tentative = KeyValue.createKeyValueFromKey(indexReader.getRootBlockKey(i)); if (CellUtils.isKeyValueInRange(tentative, range)) { cutpoints.add(tentative.getRow()); } } sizeCounter += incrementalSize; } return sizeCounter; } else { for (int i = 0; i < size; ++i) { HFileBlock block = fileReader.readBlock( indexReader.getRootBlockOffset(i), indexReader.getRootBlockDataSize(i), true, true, false, true, levels == 2 ? BlockType.LEAF_INDEX : BlockType.INTERMEDIATE_INDEX, fileReader.getDataBlockEncoding()); carry = addIndexCutpoints(fileReader, block.getBufferWithoutHeader(), levels - 1, cutpoints, storeFileInBytes / size, carry, range, splitBlockSize); } return carry; } }