static void writeEncodedBlock(DataBlockEncoding encoding, DataOutputStream dos, final List<Integer> encodedSizes, final List<ByteBuffer> encodedBlocks, int blockId, boolean includesMemstoreTS) throws IOException { ByteArrayOutputStream baos = new ByteArrayOutputStream(); DoubleOutputStream doubleOutputStream = new DoubleOutputStream(dos, baos); final int rawBlockSize = writeTestKeyValues(doubleOutputStream, blockId, includesMemstoreTS); ByteBuffer rawBuf = ByteBuffer.wrap(baos.toByteArray()); rawBuf.rewind(); final int encodedSize; final ByteBuffer encodedBuf; if (encoding == DataBlockEncoding.NONE) { encodedSize = rawBlockSize; encodedBuf = rawBuf; } else { ByteArrayOutputStream encodedOut = new ByteArrayOutputStream(); encoding.getEncoder().compressKeyValues( new DataOutputStream(encodedOut), rawBuf.duplicate(), includesMemstoreTS); // We need to account for the two-byte encoding algorithm ID that // comes after the 24-byte block header but before encoded KVs. encodedSize = encodedOut.size() + DataBlockEncoding.ID_SIZE; encodedBuf = ByteBuffer.wrap(encodedOut.toByteArray()); } encodedSizes.add(encodedSize); encodedBlocks.add(encodedBuf); }
static void writeEncodedBlock(Algorithm algo, DataBlockEncoding encoding, DataOutputStream dos, final List<Integer> encodedSizes, final List<ByteBuffer> encodedBlocks, int blockId, boolean includesMemstoreTS, byte[] dummyHeader, boolean useTag) throws IOException { ByteArrayOutputStream baos = new ByteArrayOutputStream(); DoubleOutputStream doubleOutputStream = new DoubleOutputStream(dos, baos); writeTestKeyValues(doubleOutputStream, blockId, includesMemstoreTS, useTag); ByteBuffer rawBuf = ByteBuffer.wrap(baos.toByteArray()); rawBuf.rewind(); DataBlockEncoder encoder = encoding.getEncoder(); int headerLen = dummyHeader.length; byte[] encodedResultWithHeader = null; HFileContext meta = new HFileContextBuilder() .withCompression(algo) .withIncludesMvcc(includesMemstoreTS) .withIncludesTags(useTag) .build(); if (encoder != null) { HFileBlockEncodingContext encodingCtx = encoder.newDataBlockEncodingContext(encoding, dummyHeader, meta); encoder.encodeKeyValues(rawBuf, encodingCtx); encodedResultWithHeader = encodingCtx.getUncompressedBytesWithHeader(); } else { HFileBlockDefaultEncodingContext defaultEncodingCtx = new HFileBlockDefaultEncodingContext( encoding, dummyHeader, meta); byte[] rawBufWithHeader = new byte[rawBuf.array().length + headerLen]; System.arraycopy(rawBuf.array(), 0, rawBufWithHeader, headerLen, rawBuf.array().length); defaultEncodingCtx.compressAfterEncodingWithBlockType(rawBufWithHeader, BlockType.DATA); encodedResultWithHeader = defaultEncodingCtx.getUncompressedBytesWithHeader(); } final int encodedSize = encodedResultWithHeader.length - headerLen; if (encoder != null) { // We need to account for the two-byte encoding algorithm ID that // comes after the 24-byte block header but before encoded KVs. headerLen += DataBlockEncoding.ID_SIZE; } byte[] encodedDataSection = new byte[encodedResultWithHeader.length - headerLen]; System.arraycopy(encodedResultWithHeader, headerLen, encodedDataSection, 0, encodedDataSection.length); final ByteBuffer encodedBuf = ByteBuffer.wrap(encodedDataSection); encodedSizes.add(encodedSize); encodedBlocks.add(encodedBuf); }
/** * Starts writing into the block. The previous block's data is discarded. * * @return the stream the user can write their data into * @throws IOException */ public DataOutputStream startWriting(BlockType newBlockType, boolean cacheOnWrite) throws IOException { if (state == State.BLOCK_READY && startOffset != -1) { // We had a previous block that was written to a stream at a specific // offset. Save that offset as the last offset of a block of that type. prevOffsetByType[blockType.ordinal()] = startOffset; } this.cacheOnWrite = cacheOnWrite; startOffset = -1; blockType = newBlockType; baosOnDisk.reset(); baosOnDisk.write(DUMMY_HEADER); state = State.WRITING; if (compressAlgo == NONE) { // We do not need a compression stream or a second uncompressed stream // for cache-on-write. userDataStream = new DataOutputStream(baosOnDisk); } else { OutputStream compressingOutputStream = compressAlgo.createCompressionStream(baosOnDisk, compressor, 0); if (cacheOnWrite) { // We save uncompressed data in a cache-on-write mode. if (baosInMemory == null) baosInMemory = new ByteArrayOutputStream(); baosInMemory.reset(); baosInMemory.write(DUMMY_HEADER); userDataStream = new DataOutputStream(new DoubleOutputStream( compressingOutputStream, baosInMemory)); } else { userDataStream = new DataOutputStream(compressingOutputStream); } } return userDataStream; }
static void writeEncodedBlock(Algorithm algo, DataBlockEncoding encoding, DataOutputStream dos, final List<Integer> encodedSizes, final List<ByteBuffer> encodedBlocks, int blockId, boolean includesMemstoreTS, byte[] dummyHeader) throws IOException { ByteArrayOutputStream baos = new ByteArrayOutputStream(); DoubleOutputStream doubleOutputStream = new DoubleOutputStream(dos, baos); writeTestKeyValues(doubleOutputStream, blockId, includesMemstoreTS); ByteBuffer rawBuf = ByteBuffer.wrap(baos.toByteArray()); rawBuf.rewind(); DataBlockEncoder encoder = encoding.getEncoder(); int headerLen = dummyHeader.length; byte[] encodedResultWithHeader = null; if (encoder != null) { HFileBlockEncodingContext encodingCtx = encoder.newDataBlockEncodingContext(algo, encoding, dummyHeader); encoder.encodeKeyValues(rawBuf, includesMemstoreTS, encodingCtx); encodedResultWithHeader = encodingCtx.getUncompressedBytesWithHeader(); } else { HFileBlockDefaultEncodingContext defaultEncodingCtx = new HFileBlockDefaultEncodingContext(algo, encoding, dummyHeader); byte[] rawBufWithHeader = new byte[rawBuf.array().length + headerLen]; System.arraycopy(rawBuf.array(), 0, rawBufWithHeader, headerLen, rawBuf.array().length); defaultEncodingCtx.compressAfterEncodingWithBlockType(rawBufWithHeader, BlockType.DATA); encodedResultWithHeader = defaultEncodingCtx.getUncompressedBytesWithHeader(); } final int encodedSize = encodedResultWithHeader.length - headerLen; if (encoder != null) { // We need to account for the two-byte encoding algorithm ID that // comes after the 24-byte block header but before encoded KVs. headerLen += DataBlockEncoding.ID_SIZE; } byte[] encodedDataSection = new byte[encodedResultWithHeader.length - headerLen]; System.arraycopy(encodedResultWithHeader, headerLen, encodedDataSection, 0, encodedDataSection.length); final ByteBuffer encodedBuf = ByteBuffer.wrap(encodedDataSection); encodedSizes.add(encodedSize); encodedBlocks.add(encodedBuf); }
static void writeEncodedBlock(Algorithm algo, DataBlockEncoding encoding, DataOutputStream dos, final List<Integer> encodedSizes, final List<ByteBuffer> encodedBlocks, int blockId, boolean includesMemstoreTS, byte[] dummyHeader) throws IOException { ByteArrayOutputStream baos = new ByteArrayOutputStream(); DoubleOutputStream doubleOutputStream = new DoubleOutputStream(dos, baos); writeTestKeyValues(doubleOutputStream, blockId, includesMemstoreTS); ByteBuffer rawBuf = ByteBuffer.wrap(baos.toByteArray()); rawBuf.rewind(); DataBlockEncoder encoder = encoding.getEncoder(); int headerLen = dummyHeader.length; byte[] encodedResultWithHeader = null; if (encoder != null) { HFileBlockEncodingContext encodingCtx = encoder.newDataBlockEncodingContext(algo, encoding, dummyHeader); encoder.encodeKeyValues(rawBuf, includesMemstoreTS, encodingCtx); encodedResultWithHeader = encodingCtx.getUncompressedBytesWithHeader(); } else { HFileBlockDefaultEncodingContext defaultEncodingCtx = new HFileBlockDefaultEncodingContext(algo, encoding, dummyHeader); byte[] rawBufWithHeader = new byte[rawBuf.array().length + headerLen]; System.arraycopy(rawBuf.array(), 0, rawBufWithHeader, headerLen, rawBuf.array().length); defaultEncodingCtx.compressAfterEncoding(rawBufWithHeader, BlockType.DATA); encodedResultWithHeader = defaultEncodingCtx.getUncompressedBytesWithHeader(); } final int encodedSize = encodedResultWithHeader.length - headerLen; if (encoder != null) { // We need to account for the two-byte encoding algorithm ID that // comes after the 24-byte block header but before encoded KVs. headerLen += DataBlockEncoding.ID_SIZE; } byte[] encodedDataSection = new byte[encodedResultWithHeader.length - headerLen]; System.arraycopy(encodedResultWithHeader, headerLen, encodedDataSection, 0, encodedDataSection.length); final ByteBuffer encodedBuf = ByteBuffer.wrap(encodedDataSection); encodedSizes.add(encodedSize); encodedBlocks.add(encodedBuf); }