Java 类org.apache.hadoop.io.erasurecode.ECChunk 实例源码

项目:hadoop-oss    文件:TestRawCoderBase.java   
/**
 * Tests that the input buffer's position is moved to the end after
 * encode/decode.
 */
protected void testInputPosition(boolean usingDirectBuffer) {
  this.usingDirectBuffer = usingDirectBuffer;
  prepareCoders();
  prepareBufferAllocator(false);

  // verify encode
  ECChunk[] dataChunks = prepareDataChunksForEncoding();
  ECChunk[] parityChunks = prepareParityChunksForEncoding();
  ECChunk[] clonedDataChunks = cloneChunksWithData(dataChunks);
  encoder.encode(dataChunks, parityChunks);
  verifyBufferPositionAtEnd(dataChunks);

  // verify decode
  backupAndEraseChunks(clonedDataChunks, parityChunks);
  ECChunk[] inputChunks = prepareInputChunksForDecoding(
      clonedDataChunks, parityChunks);
  ensureOnlyLeastRequiredChunks(inputChunks);
  ECChunk[] recoveredChunks = prepareOutputChunksForDecoding();
  decoder.decode(inputChunks, getErasedIndexesForDecoding(), recoveredChunks);
  verifyBufferPositionAtEnd(inputChunks);
}
项目:hadoop-oss    文件:TestDummyRawCoder.java   
@Override
protected void testCoding(boolean usingDirectBuffer) {
  this.usingDirectBuffer = usingDirectBuffer;
  prepareCoders();

  prepareBufferAllocator(true);
  setAllowChangeInputs(false);

  // Generate data and encode
  ECChunk[] dataChunks = prepareDataChunksForEncoding();
  markChunks(dataChunks);
  ECChunk[] parityChunks = prepareParityChunksForEncoding();
  encoder.encode(dataChunks, parityChunks);
  compareAndVerify(parityChunks, getEmptyChunks(parityChunks.length));

  // Decode
  restoreChunksFromMark(dataChunks);
  backupAndEraseChunks(dataChunks, parityChunks);
  ECChunk[] inputChunks = prepareInputChunksForDecoding(
      dataChunks, parityChunks);
  ensureOnlyLeastRequiredChunks(inputChunks);
  ECChunk[] recoveredChunks = prepareOutputChunksForDecoding();
  decoder.decode(inputChunks, getErasedIndexesForDecoding(), recoveredChunks);
  compareAndVerify(recoveredChunks, getEmptyChunks(recoveredChunks.length));
}
项目:aliyun-oss-hadoop-fs    文件:TestDummyRawCoder.java   
@Override
protected void testCoding(boolean usingDirectBuffer) {
  this.usingDirectBuffer = usingDirectBuffer;
  prepareCoders();

  prepareBufferAllocator(true);
  setAllowChangeInputs(false);

  // Generate data and encode
  ECChunk[] dataChunks = prepareDataChunksForEncoding();
  markChunks(dataChunks);
  ECChunk[] parityChunks = prepareParityChunksForEncoding();
  encoder.encode(dataChunks, parityChunks);
  compareAndVerify(parityChunks, getEmptyChunks(parityChunks.length));

  // Decode
  restoreChunksFromMark(dataChunks);
  backupAndEraseChunks(dataChunks, parityChunks);
  ECChunk[] inputChunks = prepareInputChunksForDecoding(
      dataChunks, parityChunks);
  ensureOnlyLeastRequiredChunks(inputChunks);
  ECChunk[] recoveredChunks = prepareOutputChunksForDecoding();
  decoder.decode(inputChunks, getErasedIndexesForDecoding(), recoveredChunks);
  compareAndVerify(recoveredChunks, getEmptyChunks(recoveredChunks.length));
}
项目:hops    文件:CoderUtil.java   
/**
 * Convert an array of this chunks to an array of ByteBuffers
 * @param chunks chunks to convertToByteArrayState into buffers
 * @return an array of ByteBuffers
 */
static ByteBuffer[] toBuffers(ECChunk[] chunks) {
  ByteBuffer[] buffers = new ByteBuffer[chunks.length];

  ECChunk chunk;
  for (int i = 0; i < chunks.length; i++) {
    chunk = chunks[i];
    if (chunk == null) {
      buffers[i] = null;
    } else {
      buffers[i] = chunk.getBuffer();
      if (chunk.isAllZero()) {
        CoderUtil.resetBuffer(buffers[i], buffers[i].remaining());
      }
    }
  }

  return buffers;
}
项目:hops    文件:TestRawCoderBase.java   
/**
 * Tests that the input buffer's position is moved to the end after
 * encode/decode.
 */
protected void testInputPosition(boolean usingDirectBuffer) {
  this.usingDirectBuffer = usingDirectBuffer;
  prepareCoders(true);
  prepareBufferAllocator(false);

  // verify encode
  ECChunk[] dataChunks = prepareDataChunksForEncoding();
  ECChunk[] parityChunks = prepareParityChunksForEncoding();
  ECChunk[] clonedDataChunks = cloneChunksWithData(dataChunks);
  encoder.encode(dataChunks, parityChunks);
  verifyBufferPositionAtEnd(dataChunks);

  // verify decode
  backupAndEraseChunks(clonedDataChunks, parityChunks);
  ECChunk[] inputChunks = prepareInputChunksForDecoding(
      clonedDataChunks, parityChunks);
  ensureOnlyLeastRequiredChunks(inputChunks);
  ECChunk[] recoveredChunks = prepareOutputChunksForDecoding();
  decoder.decode(inputChunks, getErasedIndexesForDecoding(), recoveredChunks);
  verifyBufferPositionAtEnd(inputChunks);
}
项目:hops    文件:TestDummyRawCoder.java   
@Override
protected void testCoding(boolean usingDirectBuffer) {
  this.usingDirectBuffer = usingDirectBuffer;
  prepareCoders(true);

  prepareBufferAllocator(true);
  setAllowChangeInputs(false);

  // Generate data and encode
  ECChunk[] dataChunks = prepareDataChunksForEncoding();
  markChunks(dataChunks);
  ECChunk[] parityChunks = prepareParityChunksForEncoding();
  encoder.encode(dataChunks, parityChunks);
  compareAndVerify(parityChunks, getEmptyChunks(parityChunks.length));

  // Decode
  restoreChunksFromMark(dataChunks);
  backupAndEraseChunks(dataChunks, parityChunks);
  ECChunk[] inputChunks = prepareInputChunksForDecoding(
      dataChunks, parityChunks);
  ensureOnlyLeastRequiredChunks(inputChunks);
  ECChunk[] recoveredChunks = prepareOutputChunksForDecoding();
  decoder.decode(inputChunks, getErasedIndexesForDecoding(), recoveredChunks);
  compareAndVerify(recoveredChunks, getEmptyChunks(recoveredChunks.length));
}
项目:hadoop-oss    文件:HHXORErasureDecodingStep.java   
@Override
public void performCoding(ECChunk[] inputChunks, ECChunk[] outputChunks) {
  if (erasedIndexes.length == 0) {
    return;
  }

  ByteBuffer[] inputBuffers = ECChunk.toBuffers(inputChunks);
  ByteBuffer[] outputBuffers = ECChunk.toBuffers(outputChunks);
  performCoding(inputBuffers, outputBuffers);
}
项目:hadoop-oss    文件:DumpUtil.java   
/**
 * Print data in hex format in an array of chunks.
 * @param header
 * @param chunks
 */
public static void dumpChunks(String header, ECChunk[] chunks) {
  System.out.println();
  System.out.println(header);
  for (int i = 0; i < chunks.length; i++) {
    dumpChunk(chunks[i]);
  }
  System.out.println();
}
项目:hadoop-oss    文件:DumpUtil.java   
/**
 * Print data in hex format in a chunk.
 * @param chunk
 */
public static void dumpChunk(ECChunk chunk) {
  String str;
  if (chunk == null) {
    str = "<EMPTY>";
  } else {
    byte[] bytes = chunk.toBytesArray();
    str = DumpUtil.bytesToHex(bytes, 16);
  }
  System.out.println(str);
}
项目:hadoop-oss    文件:AbstractRawErasureDecoder.java   
@Override
public void decode(ECChunk[] inputs, int[] erasedIndexes,
                   ECChunk[] outputs) {
  ByteBuffer[] newInputs = ECChunk.toBuffers(inputs);
  ByteBuffer[] newOutputs = ECChunk.toBuffers(outputs);
  decode(newInputs, erasedIndexes, newOutputs);
}
项目:hadoop-oss    文件:TestHHErasureCoderBase.java   
@Override
protected void performCodingStep(ErasureCodingStep codingStep) {
  // Pretend that we're opening these input blocks and output blocks.
  ECBlock[] inputBlocks = codingStep.getInputBlocks();
  ECBlock[] outputBlocks = codingStep.getOutputBlocks();
  // We allocate input and output chunks accordingly.
  ECChunk[] inputChunks = new ECChunk[inputBlocks.length * subPacketSize];
  ECChunk[] outputChunks = new ECChunk[outputBlocks.length * subPacketSize];

  for (int i = 0; i < numChunksInBlock; i += subPacketSize) {
    // Pretend that we're reading input chunks from input blocks.
    for (int k = 0; k < subPacketSize; ++k) {
      for (int j = 0; j < inputBlocks.length; ++j) {
        inputChunks[k * inputBlocks.length + j] = ((TestBlock)
                inputBlocks[j]).chunks[i + k];
      }

      // Pretend that we allocate and will write output results to the blocks.
      for (int j = 0; j < outputBlocks.length; ++j) {
        outputChunks[k * outputBlocks.length + j] = allocateOutputChunk();
        ((TestBlock) outputBlocks[j]).chunks[i + k] =
                outputChunks[k * outputBlocks.length + j];
      }
    }

    // Given the input chunks and output chunk buffers, just call it !
    codingStep.performCoding(inputChunks, outputChunks);
  }

  codingStep.finish();
}
项目:hadoop-oss    文件:TestErasureCoderBase.java   
/**
 * This is typically how a coding step should be performed.
 * @param codingStep
 */
protected void performCodingStep(ErasureCodingStep codingStep) {
  // Pretend that we're opening these input blocks and output blocks.
  ECBlock[] inputBlocks = codingStep.getInputBlocks();
  ECBlock[] outputBlocks = codingStep.getOutputBlocks();
  // We allocate input and output chunks accordingly.
  ECChunk[] inputChunks = new ECChunk[inputBlocks.length];
  ECChunk[] outputChunks = new ECChunk[outputBlocks.length];

  for (int i = 0; i < numChunksInBlock; ++i) {
    // Pretend that we're reading input chunks from input blocks.
    for (int j = 0; j < inputBlocks.length; ++j) {
      inputChunks[j] = ((TestBlock) inputBlocks[j]).chunks[i];
    }

    // Pretend that we allocate and will write output results to the blocks.
    for (int j = 0; j < outputBlocks.length; ++j) {
      outputChunks[j] = allocateOutputChunk();
      ((TestBlock) outputBlocks[j]).chunks[i] = outputChunks[j];
    }

    // Given the input chunks and output chunk buffers, just call it !
    codingStep.performCoding(inputChunks, outputChunks);
  }

  codingStep.finish();
}
项目:hadoop-oss    文件:TestErasureCoderBase.java   
/**
 * Generate random data and return a data block.
 * @return
 */
protected ECBlock generateDataBlock() {
  ECChunk[] chunks = new ECChunk[numChunksInBlock];

  for (int i = 0; i < numChunksInBlock; ++i) {
    chunks[i] = generateDataChunk();
  }

  return new TestBlock(chunks);
}
项目:hadoop-oss    文件:TestRawCoderBase.java   
protected void ensureOnlyLeastRequiredChunks(ECChunk[] inputChunks) {
  int leastRequiredNum = numDataUnits;
  int erasedNum = erasedDataIndexes.length + erasedParityIndexes.length;
  int goodNum = inputChunks.length - erasedNum;
  int redundantNum = goodNum - leastRequiredNum;

  for (int i = 0; i < inputChunks.length && redundantNum > 0; i++) {
    if (inputChunks[i] != null) {
      inputChunks[i] = null; // Setting it null, not needing it actually
      redundantNum--;
    }
  }
}
项目:hadoop-oss    文件:TestRawCoderBase.java   
private void verifyBufferPositionAtEnd(ECChunk[] inputChunks) {
  for (ECChunk chunk : inputChunks) {
    if (chunk != null) {
      Assert.assertEquals(0, chunk.getBuffer().remaining());
    }
  }
}
项目:hadoop-oss    文件:TestDummyRawCoder.java   
private ECChunk[] getEmptyChunks(int num) {
  ECChunk[] chunks = new ECChunk[num];
  for (int i = 0; i < chunks.length; i++) {
    chunks[i] = new ECChunk(ByteBuffer.wrap(getZeroChunkBytes()));
  }
  return chunks;
}
项目:aliyun-oss-hadoop-fs    文件:DumpUtil.java   
/**
 * Print data in hex format in an array of chunks.
 * @param header
 * @param chunks
 */
public static void dumpChunks(String header, ECChunk[] chunks) {
  System.out.println();
  System.out.println(header);
  for (int i = 0; i < chunks.length; i++) {
    dumpChunk(chunks[i]);
  }
  System.out.println();
}
项目:aliyun-oss-hadoop-fs    文件:DumpUtil.java   
/**
 * Print data in hex format in a chunk.
 * @param chunk
 */
public static void dumpChunk(ECChunk chunk) {
  String str;
  if (chunk == null) {
    str = "<EMPTY>";
  } else {
    byte[] bytes = chunk.toBytesArray();
    str = DumpUtil.bytesToHex(bytes, 16);
  }
  System.out.println(str);
}
项目:aliyun-oss-hadoop-fs    文件:AbstractRawErasureDecoder.java   
@Override
public void decode(ECChunk[] inputs, int[] erasedIndexes,
                   ECChunk[] outputs) {
  ByteBuffer[] newInputs = ECChunk.toBuffers(inputs);
  ByteBuffer[] newOutputs = ECChunk.toBuffers(outputs);
  decode(newInputs, erasedIndexes, newOutputs);
}
项目:aliyun-oss-hadoop-fs    文件:TestErasureCoderBase.java   
/**
 * This is typically how a coding step should be performed.
 * @param codingStep
 */
private void performCodingStep(ErasureCodingStep codingStep) {
  // Pretend that we're opening these input blocks and output blocks.
  ECBlock[] inputBlocks = codingStep.getInputBlocks();
  ECBlock[] outputBlocks = codingStep.getOutputBlocks();
  // We allocate input and output chunks accordingly.
  ECChunk[] inputChunks = new ECChunk[inputBlocks.length];
  ECChunk[] outputChunks = new ECChunk[outputBlocks.length];

  for (int i = 0; i < numChunksInBlock; ++i) {
    // Pretend that we're reading input chunks from input blocks.
    for (int j = 0; j < inputBlocks.length; ++j) {
      inputChunks[j] = ((TestBlock) inputBlocks[j]).chunks[i];
    }

    // Pretend that we allocate and will write output results to the blocks.
    for (int j = 0; j < outputBlocks.length; ++j) {
      outputChunks[j] = allocateOutputChunk();
      ((TestBlock) outputBlocks[j]).chunks[i] = outputChunks[j];
    }

    // Given the input chunks and output chunk buffers, just call it !
    codingStep.performCoding(inputChunks, outputChunks);
  }

  codingStep.finish();
}
项目:aliyun-oss-hadoop-fs    文件:TestErasureCoderBase.java   
/**
 * Generate random data and return a data block.
 * @return
 */
protected ECBlock generateDataBlock() {
  ECChunk[] chunks = new ECChunk[numChunksInBlock];

  for (int i = 0; i < numChunksInBlock; ++i) {
    chunks[i] = generateDataChunk();
  }

  return new TestBlock(chunks);
}
项目:aliyun-oss-hadoop-fs    文件:TestRawCoderBase.java   
protected void ensureOnlyLeastRequiredChunks(ECChunk[] inputChunks) {
  int leastRequiredNum = numDataUnits;
  int erasedNum = erasedDataIndexes.length + erasedParityIndexes.length;
  int goodNum = inputChunks.length - erasedNum;
  int redundantNum = goodNum - leastRequiredNum;

  for (int i = 0; i < inputChunks.length && redundantNum > 0; i++) {
    if (inputChunks[i] != null) {
      inputChunks[i] = null; // Setting it null, not needing it actually
      redundantNum--;
    }
  }
}
项目:aliyun-oss-hadoop-fs    文件:TestDummyRawCoder.java   
private ECChunk[] getEmptyChunks(int num) {
  ECChunk[] chunks = new ECChunk[num];
  for (int i = 0; i < chunks.length; i++) {
    chunks[i] = new ECChunk(ByteBuffer.wrap(getZeroChunkBytes()));
  }
  return chunks;
}
项目:hops    文件:HHXORErasureDecodingStep.java   
@Override
public void performCoding(ECChunk[] inputChunks, ECChunk[] outputChunks) {
  if (erasedIndexes.length == 0) {
    return;
  }

  ByteBuffer[] inputBuffers = ECChunk.toBuffers(inputChunks);
  ByteBuffer[] outputBuffers = ECChunk.toBuffers(outputChunks);
  performCoding(inputBuffers, outputBuffers);
}
项目:hops    文件:DumpUtil.java   
/**
 * Print data in hex format in an array of chunks.
 * @param header
 * @param chunks
 */
public static void dumpChunks(String header, ECChunk[] chunks) {
  System.out.println();
  System.out.println(header);
  for (int i = 0; i < chunks.length; i++) {
    dumpChunk(chunks[i]);
  }
  System.out.println();
}
项目:hops    文件:DumpUtil.java   
/**
 * Print data in hex format in a chunk.
 * @param chunk
 */
public static void dumpChunk(ECChunk chunk) {
  String str;
  if (chunk == null) {
    str = "<EMPTY>";
  } else {
    byte[] bytes = chunk.toBytesArray();
    str = DumpUtil.bytesToHex(bytes, 16);
  }
  System.out.println(str);
}
项目:hops    文件:TestHHErasureCoderBase.java   
@Override
protected void performCodingStep(ErasureCodingStep codingStep) {
  // Pretend that we're opening these input blocks and output blocks.
  ECBlock[] inputBlocks = codingStep.getInputBlocks();
  ECBlock[] outputBlocks = codingStep.getOutputBlocks();
  // We allocate input and output chunks accordingly.
  ECChunk[] inputChunks = new ECChunk[inputBlocks.length * subPacketSize];
  ECChunk[] outputChunks = new ECChunk[outputBlocks.length * subPacketSize];

  for (int i = 0; i < numChunksInBlock; i += subPacketSize) {
    // Pretend that we're reading input chunks from input blocks.
    for (int k = 0; k < subPacketSize; ++k) {
      for (int j = 0; j < inputBlocks.length; ++j) {
        inputChunks[k * inputBlocks.length + j] = ((TestBlock)
                inputBlocks[j]).chunks[i + k];
      }

      // Pretend that we allocate and will write output results to the blocks.
      for (int j = 0; j < outputBlocks.length; ++j) {
        outputChunks[k * outputBlocks.length + j] = allocateOutputChunk();
        ((TestBlock) outputBlocks[j]).chunks[i + k] =
                outputChunks[k * outputBlocks.length + j];
      }
    }

    // Given the input chunks and output chunk buffers, just call it !
    codingStep.performCoding(inputChunks, outputChunks);
  }

  codingStep.finish();
}
项目:hops    文件:TestErasureCoderBase.java   
/**
 * This is typically how a coding step should be performed.
 * @param codingStep
 */
protected void performCodingStep(ErasureCodingStep codingStep) {
  // Pretend that we're opening these input blocks and output blocks.
  ECBlock[] inputBlocks = codingStep.getInputBlocks();
  ECBlock[] outputBlocks = codingStep.getOutputBlocks();
  // We allocate input and output chunks accordingly.
  ECChunk[] inputChunks = new ECChunk[inputBlocks.length];
  ECChunk[] outputChunks = new ECChunk[outputBlocks.length];

  for (int i = 0; i < numChunksInBlock; ++i) {
    // Pretend that we're reading input chunks from input blocks.
    for (int j = 0; j < inputBlocks.length; ++j) {
      inputChunks[j] = ((TestBlock) inputBlocks[j]).chunks[i];
    }

    // Pretend that we allocate and will write output results to the blocks.
    for (int j = 0; j < outputBlocks.length; ++j) {
      outputChunks[j] = allocateOutputChunk();
      ((TestBlock) outputBlocks[j]).chunks[i] = outputChunks[j];
    }

    // Given the input chunks and output chunk buffers, just call it !
    codingStep.performCoding(inputChunks, outputChunks);
  }

  codingStep.finish();
}
项目:hops    文件:TestErasureCoderBase.java   
/**
 * Generate random data and return a data block.
 * @return
 */
protected ECBlock generateDataBlock() {
  ECChunk[] chunks = new ECChunk[numChunksInBlock];

  for (int i = 0; i < numChunksInBlock; ++i) {
    chunks[i] = generateDataChunk();
  }

  return new TestBlock(chunks);
}
项目:hops    文件:TestRawCoderBase.java   
protected void ensureOnlyLeastRequiredChunks(ECChunk[] inputChunks) {
  int leastRequiredNum = numDataUnits;
  int erasedNum = erasedDataIndexes.length + erasedParityIndexes.length;
  int goodNum = inputChunks.length - erasedNum;
  int redundantNum = goodNum - leastRequiredNum;

  for (int i = 0; i < inputChunks.length && redundantNum > 0; i++) {
    if (inputChunks[i] != null) {
      inputChunks[i] = null; // Setting it null, not needing it actually
      redundantNum--;
    }
  }
}
项目:hops    文件:TestRawCoderBase.java   
private void verifyBufferPositionAtEnd(ECChunk[] inputChunks) {
  for (ECChunk chunk : inputChunks) {
    if (chunk != null) {
      Assert.assertEquals(0, chunk.getBuffer().remaining());
    }
  }
}
项目:hops    文件:TestDummyRawCoder.java   
private ECChunk[] getEmptyChunks(int num) {
  ECChunk[] chunks = new ECChunk[num];
  for (int i = 0; i < chunks.length; i++) {
    chunks[i] = new ECChunk(ByteBuffer.wrap(getZeroChunkBytes()));
  }
  return chunks;
}
项目:hadoop-oss    文件:ErasureEncodingStep.java   
@Override
public void performCoding(ECChunk[] inputChunks, ECChunk[] outputChunks) {
  rawEncoder.encode(inputChunks, outputChunks);
}
项目:hadoop-oss    文件:HHXORErasureEncodingStep.java   
@Override
public void performCoding(ECChunk[] inputChunks, ECChunk[] outputChunks) {
  ByteBuffer[] inputBuffers = ECChunk.toBuffers(inputChunks);
  ByteBuffer[] outputBuffers = ECChunk.toBuffers(outputChunks);
  performCoding(inputBuffers, outputBuffers);
}
项目:hadoop-oss    文件:ErasureDecodingStep.java   
@Override
public void performCoding(ECChunk[] inputChunks, ECChunk[] outputChunks) {
  rawDecoder.decode(inputChunks, erasedIndexes, outputChunks);
}
项目:hadoop-oss    文件:AbstractRawErasureEncoder.java   
@Override
public void encode(ECChunk[] inputs, ECChunk[] outputs) {
  ByteBuffer[] newInputs = ECChunk.toBuffers(inputs);
  ByteBuffer[] newOutputs = ECChunk.toBuffers(outputs);
  encode(newInputs, newOutputs);
}
项目:hadoop-oss    文件:TestErasureCoderBase.java   
public TestBlock(ECChunk[] chunks) {
  this.chunks = chunks;
}
项目:hadoop-oss    文件:TestErasureCoderBase.java   
/**
 * Allocate an output block. Note the chunk buffer will be allocated by the
 * up caller when performing the coding step.
 * @return
 */
protected TestBlock allocateOutputBlock() {
  ECChunk[] chunks = new ECChunk[numChunksInBlock];

  return new TestBlock(chunks);
}
项目:hadoop-oss    文件:TestErasureCoderBase.java   
/**
 * Clone exactly a block, avoiding affecting the original block.
 * @param block
 * @return a new block
 */
protected TestBlock cloneBlockWithData(TestBlock block) {
  ECChunk[] newChunks = cloneChunksWithData(block.chunks);

  return new TestBlock(newChunks);
}
项目:hadoop-oss    文件:TestRawCoderBase.java   
private void performTestCoding(int chunkSize, boolean usingSlicedBuffer,
                               boolean useBadInput, boolean useBadOutput,
                               boolean allowChangeInputs) {
  setChunkSize(chunkSize);
  prepareBufferAllocator(usingSlicedBuffer);
  setAllowChangeInputs(allowChangeInputs);

  dumpSetting();

  // Generate data and encode
  ECChunk[] dataChunks = prepareDataChunksForEncoding();
  if (useBadInput) {
    corruptSomeChunk(dataChunks);
  }
  dumpChunks("Testing data chunks", dataChunks);

  ECChunk[] parityChunks = prepareParityChunksForEncoding();

  // Backup all the source chunks for later recovering because some coders
  // may affect the source data.
  ECChunk[] clonedDataChunks = cloneChunksWithData(dataChunks);
  markChunks(dataChunks);

  encoder.encode(dataChunks, parityChunks);
  dumpChunks("Encoded parity chunks", parityChunks);

  if (!allowChangeInputs) {
    restoreChunksFromMark(dataChunks);
    compareAndVerify(clonedDataChunks, dataChunks);
  }

  // Backup and erase some chunks
  ECChunk[] backupChunks = backupAndEraseChunks(clonedDataChunks, parityChunks);

  // Decode
  ECChunk[] inputChunks = prepareInputChunksForDecoding(
      clonedDataChunks, parityChunks);

  // Remove unnecessary chunks, allowing only least required chunks to be read.
  ensureOnlyLeastRequiredChunks(inputChunks);

  ECChunk[] recoveredChunks = prepareOutputChunksForDecoding();
  if (useBadOutput) {
    corruptSomeChunk(recoveredChunks);
  }

  ECChunk[] clonedInputChunks = null;
  if (!allowChangeInputs) {
    markChunks(inputChunks);
    clonedInputChunks = cloneChunksWithData(inputChunks);
  }

  dumpChunks("Decoding input chunks", inputChunks);
  decoder.decode(inputChunks, getErasedIndexesForDecoding(), recoveredChunks);
  dumpChunks("Decoded/recovered chunks", recoveredChunks);

  if (!allowChangeInputs) {
    restoreChunksFromMark(inputChunks);
    compareAndVerify(clonedInputChunks, inputChunks);
  }

  // Compare
  compareAndVerify(backupChunks, recoveredChunks);
}