Java 类org.apache.hadoop.io.erasurecode.rawcoder.util.RSUtil 实例源码

项目:hadoop-oss    文件:RSRawEncoder.java   
public RSRawEncoder(int numDataUnits, int numParityUnits) {
  super(numDataUnits, numParityUnits);

  if (numDataUnits + numParityUnits >= RSUtil.GF.getFieldSize()) {
    throw new HadoopIllegalArgumentException(
        "Invalid numDataUnits and numParityUnits");
  }

  encodeMatrix = new byte[getNumAllUnits() * numDataUnits];
  RSUtil.genCauchyMatrix(encodeMatrix, getNumAllUnits(), numDataUnits);
  if (isAllowingVerboseDump()) {
    DumpUtil.dumpMatrix(encodeMatrix, numDataUnits, getNumAllUnits());
  }
  gfTables = new byte[getNumAllUnits() * numDataUnits * 32];
  RSUtil.initTables(numDataUnits, numParityUnits, encodeMatrix,
      numDataUnits * numDataUnits, gfTables);
  if (isAllowingVerboseDump()) {
    System.out.println(DumpUtil.bytesToHex(gfTables, -1));
  }
}
项目:hadoop-oss    文件:RSRawDecoder.java   
private void processErasures(int[] erasedIndexes) {
  this.decodeMatrix = new byte[getNumAllUnits() * getNumDataUnits()];
  this.invertMatrix = new byte[getNumAllUnits() * getNumDataUnits()];
  this.gfTables = new byte[getNumAllUnits() * getNumDataUnits() * 32];

  this.erasureFlags = new boolean[getNumAllUnits()];
  this.numErasedDataUnits = 0;

  for (int i = 0; i < erasedIndexes.length; i++) {
    int index = erasedIndexes[i];
    erasureFlags[index] = true;
    if (index < getNumDataUnits()) {
      numErasedDataUnits++;
    }
  }

  generateDecodeMatrix(erasedIndexes);

  RSUtil.initTables(getNumDataUnits(), erasedIndexes.length,
      decodeMatrix, 0, gfTables);
  if (isAllowingVerboseDump()) {
    System.out.println(DumpUtil.bytesToHex(gfTables, -1));
  }
}
项目:hadoop-oss    文件:RSRawEncoderLegacy.java   
public RSRawEncoderLegacy(int numDataUnits, int numParityUnits) {
  super(numDataUnits, numParityUnits);

  assert (getNumDataUnits() + getNumParityUnits() < RSUtil.GF.getFieldSize());

  int[] primitivePower = RSUtil.getPrimitivePower(numDataUnits,
      numParityUnits);
  // compute generating polynomial
  int[] gen = {1};
  int[] poly = new int[2];
  for (int i = 0; i < numParityUnits; i++) {
    poly[0] = primitivePower[i];
    poly[1] = 1;
    gen = RSUtil.GF.multiply(gen, poly);
  }
  // generating polynomial has all generating roots
  generatingPolynomial = gen;
}
项目:aliyun-oss-hadoop-fs    文件:RSRawEncoder.java   
public RSRawEncoder(int numDataUnits, int numParityUnits) {
  super(numDataUnits, numParityUnits);

  assert (getNumDataUnits() + getNumParityUnits() < RSUtil.GF.getFieldSize());

  int[] primitivePower = RSUtil.getPrimitivePower(numDataUnits,
      numParityUnits);
  // compute generating polynomial
  int[] gen = {1};
  int[] poly = new int[2];
  for (int i = 0; i < numParityUnits; i++) {
    poly[0] = primitivePower[i];
    poly[1] = 1;
    gen = RSUtil.GF.multiply(gen, poly);
  }
  // generating polynomial has all generating roots
  generatingPolynomial = gen;
}
项目:hops    文件:RSRawEncoder.java   
public RSRawEncoder(ErasureCoderOptions coderOptions) {
  super(coderOptions);

  if (getNumAllUnits() >= RSUtil.GF.getFieldSize()) {
    throw new HadoopIllegalArgumentException(
        "Invalid numDataUnits and numParityUnits");
  }

  encodeMatrix = new byte[getNumAllUnits() * getNumDataUnits()];
  RSUtil.genCauchyMatrix(encodeMatrix, getNumAllUnits(), getNumDataUnits());
  if (allowVerboseDump()) {
    DumpUtil.dumpMatrix(encodeMatrix, getNumDataUnits(), getNumAllUnits());
  }
  gfTables = new byte[getNumAllUnits() * getNumDataUnits() * 32];
  RSUtil.initTables(getNumDataUnits(), getNumParityUnits(), encodeMatrix,
      getNumDataUnits() * getNumDataUnits(), gfTables);
  if (allowVerboseDump()) {
    System.out.println(DumpUtil.bytesToHex(gfTables, -1));
  }
}
项目:hops    文件:RSRawDecoder.java   
@Override
protected void doDecode(ByteArrayDecodingState decodingState) {
  int dataLen = decodingState.decodeLength;
  CoderUtil.resetOutputBuffers(decodingState.outputs,
      decodingState.outputOffsets, dataLen);
  prepareDecoding(decodingState.inputs, decodingState.erasedIndexes);

  byte[][] realInputs = new byte[getNumDataUnits()][];
  int[] realInputOffsets = new int[getNumDataUnits()];
  for (int i = 0; i < getNumDataUnits(); i++) {
    realInputs[i] = decodingState.inputs[validIndexes[i]];
    realInputOffsets[i] = decodingState.inputOffsets[validIndexes[i]];
  }
  RSUtil.encodeData(gfTables, dataLen, realInputs, realInputOffsets,
      decodingState.outputs, decodingState.outputOffsets);
}
项目:hops    文件:RSRawDecoder.java   
private void processErasures(int[] erasedIndexes) {
  this.decodeMatrix = new byte[getNumAllUnits() * getNumDataUnits()];
  this.invertMatrix = new byte[getNumAllUnits() * getNumDataUnits()];
  this.gfTables = new byte[getNumAllUnits() * getNumDataUnits() * 32];

  this.erasureFlags = new boolean[getNumAllUnits()];
  this.numErasedDataUnits = 0;

  for (int i = 0; i < erasedIndexes.length; i++) {
    int index = erasedIndexes[i];
    erasureFlags[index] = true;
    if (index < getNumDataUnits()) {
      numErasedDataUnits++;
    }
  }

  generateDecodeMatrix(erasedIndexes);

  RSUtil.initTables(getNumDataUnits(), erasedIndexes.length,
      decodeMatrix, 0, gfTables);
  if (allowVerboseDump()) {
    System.out.println(DumpUtil.bytesToHex(gfTables, -1));
  }
}
项目:hops    文件:RSRawEncoderLegacy.java   
public RSRawEncoderLegacy(ErasureCoderOptions coderOptions) {
  super(coderOptions);

  assert (getNumDataUnits() + getNumParityUnits() < RSUtil.GF.getFieldSize());

  int[] primitivePower = RSUtil.getPrimitivePower(getNumDataUnits(),
      getNumParityUnits());
  // compute generating polynomial
  int[] gen = {1};
  int[] poly = new int[2];
  for (int i = 0; i < getNumParityUnits(); i++) {
    poly[0] = primitivePower[i];
    poly[1] = 1;
    gen = RSUtil.GF.multiply(gen, poly);
  }
  // generating polynomial has all generating roots
  generatingPolynomial = gen;
}
项目:hadoop-oss    文件:RSRawDecoderLegacy.java   
public RSRawDecoderLegacy(int numDataUnits, int numParityUnits) {
  super(numDataUnits, numParityUnits);
  if (numDataUnits + numParityUnits >= RSUtil.GF.getFieldSize()) {
    throw new HadoopIllegalArgumentException(
            "Invalid numDataUnits and numParityUnits");
  }

  this.errSignature = new int[numParityUnits];
  this.primitivePower = RSUtil.getPrimitivePower(numDataUnits,
      numParityUnits);
}
项目:hadoop-oss    文件:RSRawDecoderLegacy.java   
private void doDecodeImpl(ByteBuffer[] inputs, int[] erasedIndexes,
                        ByteBuffer[] outputs) {
  ByteBuffer valid = CoderUtil.findFirstValidInput(inputs);
  int dataLen = valid.remaining();
  for (int i = 0; i < erasedIndexes.length; i++) {
    errSignature[i] = primitivePower[erasedIndexes[i]];
    RSUtil.GF.substitute(inputs, dataLen, outputs[i], primitivePower[i]);
  }

  RSUtil.GF.solveVandermondeSystem(errSignature,
      outputs, erasedIndexes.length);
}
项目:hadoop-oss    文件:RSRawDecoderLegacy.java   
private void doDecodeImpl(byte[][] inputs, int[] inputOffsets,
                        int dataLen, int[] erasedIndexes,
                        byte[][] outputs, int[] outputOffsets) {
  for (int i = 0; i < erasedIndexes.length; i++) {
    errSignature[i] = primitivePower[erasedIndexes[i]];
    RSUtil.GF.substitute(inputs, inputOffsets, dataLen, outputs[i],
        outputOffsets[i], primitivePower[i]);
  }

  RSUtil.GF.solveVandermondeSystem(errSignature, outputs, outputOffsets,
      erasedIndexes.length, dataLen);
}
项目:hadoop-oss    文件:RSRawDecoder.java   
public RSRawDecoder(int numDataUnits, int numParityUnits) {
  super(numDataUnits, numParityUnits);
  if (numDataUnits + numParityUnits >= RSUtil.GF.getFieldSize()) {
    throw new HadoopIllegalArgumentException(
            "Invalid getNumDataUnits() and numParityUnits");
  }

  int numAllUnits = getNumDataUnits() + numParityUnits;
  encodeMatrix = new byte[numAllUnits * getNumDataUnits()];
  RSUtil.genCauchyMatrix(encodeMatrix, numAllUnits, getNumDataUnits());
  if (isAllowingVerboseDump()) {
    DumpUtil.dumpMatrix(encodeMatrix, numDataUnits, numAllUnits);
  }
}
项目:hadoop-oss    文件:RSRawDecoder.java   
@Override
protected void doDecode(ByteBuffer[] inputs, int[] erasedIndexes,
                        ByteBuffer[] outputs) {
  prepareDecoding(inputs, erasedIndexes);

  ByteBuffer[] realInputs = new ByteBuffer[getNumDataUnits()];
  for (int i = 0; i < getNumDataUnits(); i++) {
    realInputs[i] = inputs[validIndexes[i]];
  }
  RSUtil.encodeData(gfTables, realInputs, outputs);
}
项目:hadoop-oss    文件:RSRawDecoder.java   
@Override
protected void doDecode(byte[][] inputs, int[] inputOffsets,
                        int dataLen, int[] erasedIndexes,
                        byte[][] outputs, int[] outputOffsets) {
  prepareDecoding(inputs, erasedIndexes);

  byte[][] realInputs = new byte[getNumDataUnits()][];
  int[] realInputOffsets = new int[getNumDataUnits()];
  for (int i = 0; i < getNumDataUnits(); i++) {
    realInputs[i] = inputs[validIndexes[i]];
    realInputOffsets[i] = inputOffsets[validIndexes[i]];
  }
  RSUtil.encodeData(gfTables, dataLen, realInputs, realInputOffsets,
          outputs, outputOffsets);
}
项目:hadoop-oss    文件:RSRawEncoderLegacy.java   
@Override
protected void doEncode(ByteBuffer[] inputs, ByteBuffer[] outputs) {
  // parity units + data units
  ByteBuffer[] all = new ByteBuffer[outputs.length + inputs.length];

  if (isAllowingChangeInputs()) {
    System.arraycopy(outputs, 0, all, 0, outputs.length);
    System.arraycopy(inputs, 0, all, outputs.length, inputs.length);
  } else {
    System.arraycopy(outputs, 0, all, 0, outputs.length);

    /**
     * Note when this coder would be really (rarely) used in a production
     * system, this can  be optimized to cache and reuse the new allocated
     * buffers avoiding reallocating.
     */
    ByteBuffer tmp;
    for (int i = 0; i < inputs.length; i++) {
      tmp = ByteBuffer.allocate(inputs[i].remaining());
      tmp.put(inputs[i]);
      tmp.flip();
      all[outputs.length + i] = tmp;
    }
  }

  // Compute the remainder
  RSUtil.GF.remainder(all, generatingPolynomial);
}
项目:hadoop-oss    文件:RSRawEncoderLegacy.java   
@Override
protected void doEncode(byte[][] inputs, int[] inputOffsets,
                        int dataLen, byte[][] outputs,
                        int[] outputOffsets) {
  // parity units + data units
  byte[][] all = new byte[outputs.length + inputs.length][];
  int[] allOffsets = new int[outputOffsets.length + inputOffsets.length];

  if (isAllowingChangeInputs()) {
    System.arraycopy(outputs, 0, all, 0, outputs.length);
    System.arraycopy(inputs, 0, all, outputs.length, inputs.length);

    System.arraycopy(outputOffsets, 0, allOffsets, 0, outputOffsets.length);
    System.arraycopy(inputOffsets, 0, allOffsets,
        outputOffsets.length, inputOffsets.length);
  } else {
    System.arraycopy(outputs, 0, all, 0, outputs.length);
    System.arraycopy(outputOffsets, 0, allOffsets, 0, outputOffsets.length);

    for (int i = 0; i < inputs.length; i++) {
      all[outputs.length + i] = Arrays.copyOfRange(inputs[i],
          inputOffsets[i], inputOffsets[i] + dataLen);
    }
  }

  // Compute the remainder
  RSUtil.GF.remainder(all, allOffsets, dataLen, generatingPolynomial);
}
项目:aliyun-oss-hadoop-fs    文件:RSRawEncoder.java   
@Override
protected void doEncode(ByteBuffer[] inputs, ByteBuffer[] outputs) {
  // parity units + data units
  ByteBuffer[] all = new ByteBuffer[outputs.length + inputs.length];

  if (isAllowingChangeInputs()) {
    System.arraycopy(outputs, 0, all, 0, outputs.length);
    System.arraycopy(inputs, 0, all, outputs.length, inputs.length);
  } else {
    System.arraycopy(outputs, 0, all, 0, outputs.length);

    /**
     * Note when this coder would be really (rarely) used in a production
     * system, this can  be optimized to cache and reuse the new allocated
     * buffers avoiding reallocating.
     */
    ByteBuffer tmp;
    for (int i = 0; i < inputs.length; i++) {
      tmp = ByteBuffer.allocate(inputs[i].remaining());
      tmp.put(inputs[i]);
      tmp.flip();
      all[outputs.length + i] = tmp;
    }
  }

  // Compute the remainder
  RSUtil.GF.remainder(all, generatingPolynomial);
}
项目:aliyun-oss-hadoop-fs    文件:RSRawEncoder.java   
@Override
protected void doEncode(byte[][] inputs, int[] inputOffsets,
                        int dataLen, byte[][] outputs,
                        int[] outputOffsets) {
  // parity units + data units
  byte[][] all = new byte[outputs.length + inputs.length][];
  int[] allOffsets = new int[outputOffsets.length + inputOffsets.length];

  if (isAllowingChangeInputs()) {
    System.arraycopy(outputs, 0, all, 0, outputs.length);
    System.arraycopy(inputs, 0, all, outputs.length, inputs.length);

    System.arraycopy(outputOffsets, 0, allOffsets, 0, outputOffsets.length);
    System.arraycopy(inputOffsets, 0, allOffsets,
        outputOffsets.length, inputOffsets.length);
  } else {
    System.arraycopy(outputs, 0, all, 0, outputs.length);
    System.arraycopy(outputOffsets, 0, allOffsets, 0, outputOffsets.length);

    for (int i = 0; i < inputs.length; i++) {
      all[outputs.length + i] = Arrays.copyOfRange(inputs[i],
          inputOffsets[i], inputOffsets[i] + dataLen);
    }
  }

  // Compute the remainder
  RSUtil.GF.remainder(all, allOffsets, dataLen, generatingPolynomial);
}
项目:aliyun-oss-hadoop-fs    文件:RSRawDecoder.java   
public RSRawDecoder(int numDataUnits, int numParityUnits) {
  super(numDataUnits, numParityUnits);
  if (numDataUnits + numParityUnits >= RSUtil.GF.getFieldSize()) {
    throw new HadoopIllegalArgumentException(
            "Invalid numDataUnits and numParityUnits");
  }

  this.errSignature = new int[numParityUnits];
  this.primitivePower = RSUtil.getPrimitivePower(numDataUnits,
      numParityUnits);
}
项目:aliyun-oss-hadoop-fs    文件:RSRawDecoder.java   
private void doDecodeImpl(ByteBuffer[] inputs, int[] erasedIndexes,
                        ByteBuffer[] outputs) {
  ByteBuffer valid = findFirstValidInput(inputs);
  int dataLen = valid.remaining();
  for (int i = 0; i < erasedIndexes.length; i++) {
    errSignature[i] = primitivePower[erasedIndexes[i]];
    RSUtil.GF.substitute(inputs, dataLen, outputs[i], primitivePower[i]);
  }

  RSUtil.GF.solveVandermondeSystem(errSignature,
      outputs, erasedIndexes.length);
}
项目:aliyun-oss-hadoop-fs    文件:RSRawDecoder.java   
private void doDecodeImpl(byte[][] inputs, int[] inputOffsets,
                        int dataLen, int[] erasedIndexes,
                        byte[][] outputs, int[] outputOffsets) {
  for (int i = 0; i < erasedIndexes.length; i++) {
    errSignature[i] = primitivePower[erasedIndexes[i]];
    RSUtil.GF.substitute(inputs, inputOffsets, dataLen, outputs[i],
        outputOffsets[i], primitivePower[i]);
  }

  RSUtil.GF.solveVandermondeSystem(errSignature, outputs, outputOffsets,
      erasedIndexes.length, dataLen);
}
项目:hops    文件:RSRawEncoder.java   
@Override
protected void doEncode(ByteArrayEncodingState encodingState) {
  CoderUtil.resetOutputBuffers(encodingState.outputs,
      encodingState.outputOffsets,
      encodingState.encodeLength);
  RSUtil.encodeData(gfTables, encodingState.encodeLength,
      encodingState.inputs,
      encodingState.inputOffsets, encodingState.outputs,
      encodingState.outputOffsets);
}
项目:hops    文件:RSRawDecoderLegacy.java   
public RSRawDecoderLegacy(ErasureCoderOptions coderOptions) {
  super(coderOptions);
  if (getNumAllUnits() >= RSUtil.GF.getFieldSize()) {
    throw new HadoopIllegalArgumentException(
            "Invalid numDataUnits and numParityUnits");
  }

  this.errSignature = new int[getNumParityUnits()];
  this.primitivePower = RSUtil.getPrimitivePower(getNumDataUnits(),
      getNumParityUnits());
}
项目:hops    文件:RSRawDecoderLegacy.java   
private void doDecodeImpl(ByteBuffer[] inputs, int[] erasedIndexes,
                        ByteBuffer[] outputs) {
  ByteBuffer valid = CoderUtil.findFirstValidInput(inputs);
  int dataLen = valid.remaining();
  for (int i = 0; i < erasedIndexes.length; i++) {
    errSignature[i] = primitivePower[erasedIndexes[i]];
    RSUtil.GF.substitute(inputs, dataLen, outputs[i], primitivePower[i]);
  }

  RSUtil.GF.solveVandermondeSystem(errSignature,
      outputs, erasedIndexes.length);
}
项目:hops    文件:RSRawDecoderLegacy.java   
private void doDecodeImpl(byte[][] inputs, int[] inputOffsets,
                        int dataLen, int[] erasedIndexes,
                        byte[][] outputs, int[] outputOffsets) {
  for (int i = 0; i < erasedIndexes.length; i++) {
    errSignature[i] = primitivePower[erasedIndexes[i]];
    RSUtil.GF.substitute(inputs, inputOffsets, dataLen, outputs[i],
        outputOffsets[i], primitivePower[i]);
  }

  RSUtil.GF.solveVandermondeSystem(errSignature, outputs, outputOffsets,
      erasedIndexes.length, dataLen);
}
项目:hops    文件:RSRawDecoder.java   
public RSRawDecoder(ErasureCoderOptions coderOptions) {
  super(coderOptions);

  int numAllUnits = getNumAllUnits();
  if (getNumAllUnits() >= RSUtil.GF.getFieldSize()) {
    throw new HadoopIllegalArgumentException(
            "Invalid getNumDataUnits() and numParityUnits");
  }

  encodeMatrix = new byte[numAllUnits * getNumDataUnits()];
  RSUtil.genCauchyMatrix(encodeMatrix, numAllUnits, getNumDataUnits());
  if (allowVerboseDump()) {
    DumpUtil.dumpMatrix(encodeMatrix, getNumDataUnits(), numAllUnits);
  }
}
项目:hops    文件:RSRawDecoder.java   
@Override
protected void doDecode(ByteBufferDecodingState decodingState) {
  CoderUtil.resetOutputBuffers(decodingState.outputs,
      decodingState.decodeLength);
  prepareDecoding(decodingState.inputs, decodingState.erasedIndexes);

  ByteBuffer[] realInputs = new ByteBuffer[getNumDataUnits()];
  for (int i = 0; i < getNumDataUnits(); i++) {
    realInputs[i] = decodingState.inputs[validIndexes[i]];
  }
  RSUtil.encodeData(gfTables, realInputs, decodingState.outputs);
}
项目:hops    文件:RSRawEncoderLegacy.java   
@Override
protected void doEncode(ByteBufferEncodingState encodingState) {
  CoderUtil.resetOutputBuffers(encodingState.outputs,
      encodingState.encodeLength);
  // parity units + data units
  ByteBuffer[] all = new ByteBuffer[encodingState.outputs.length +
      encodingState.inputs.length];

  if (allowChangeInputs()) {
    System.arraycopy(encodingState.outputs, 0, all, 0,
        encodingState.outputs.length);
    System.arraycopy(encodingState.inputs, 0, all,
        encodingState.outputs.length, encodingState.inputs.length);
  } else {
    System.arraycopy(encodingState.outputs, 0, all, 0,
        encodingState.outputs.length);

    /**
     * Note when this coder would be really (rarely) used in a production
     * system, this can  be optimized to cache and reuse the new allocated
     * buffers avoiding reallocating.
     */
    ByteBuffer tmp;
    for (int i = 0; i < encodingState.inputs.length; i++) {
      tmp = ByteBuffer.allocate(encodingState.inputs[i].remaining());
      tmp.put(encodingState.inputs[i]);
      tmp.flip();
      all[encodingState.outputs.length + i] = tmp;
    }
  }

  // Compute the remainder
  RSUtil.GF.remainder(all, generatingPolynomial);
}
项目:hops    文件:RSRawEncoderLegacy.java   
@Override
protected void doEncode(ByteArrayEncodingState encodingState) {
  int dataLen = encodingState.encodeLength;
  CoderUtil.resetOutputBuffers(encodingState.outputs,
      encodingState.outputOffsets, dataLen);
  // parity units + data units
  byte[][] all = new byte[encodingState.outputs.length +
      encodingState.inputs.length][];
  int[] allOffsets = new int[encodingState.outputOffsets.length +
      encodingState.inputOffsets.length];

  if (allowChangeInputs()) {
    System.arraycopy(encodingState.outputs, 0, all, 0,
        encodingState.outputs.length);
    System.arraycopy(encodingState.inputs, 0, all,
        encodingState.outputs.length, encodingState.inputs.length);

    System.arraycopy(encodingState.outputOffsets, 0, allOffsets, 0,
        encodingState.outputOffsets.length);
    System.arraycopy(encodingState.inputOffsets, 0, allOffsets,
        encodingState.outputOffsets.length,
        encodingState.inputOffsets.length);
  } else {
    System.arraycopy(encodingState.outputs, 0, all, 0,
        encodingState.outputs.length);
    System.arraycopy(encodingState.outputOffsets, 0, allOffsets, 0,
        encodingState.outputOffsets.length);

    for (int i = 0; i < encodingState.inputs.length; i++) {
      all[encodingState.outputs.length + i] =
          Arrays.copyOfRange(encodingState.inputs[i],
          encodingState.inputOffsets[i],
              encodingState.inputOffsets[i] + dataLen);
    }
  }

  // Compute the remainder
  RSUtil.GF.remainder(all, allOffsets, dataLen, generatingPolynomial);
}
项目:hadoop-oss    文件:HHUtil.java   
public static ByteBuffer getPiggyBackForDecode(ByteBuffer[][] inputs,
                                               ByteBuffer[][] outputs,
                                               int pbParityIndex,
                                               int numDataUnits,
                                               int numParityUnits,
                                               int pbIndex) {
  ByteBuffer fisrtValidInput = HHUtil.findFirstValidInput(inputs[0]);
  int bufSize = fisrtValidInput.remaining();

  ByteBuffer piggybacks = allocateByteBuffer(fisrtValidInput.isDirect(),
          bufSize);

  // Use piggyBackParityIndex to figure out which parity location has the
  // associated piggyBack
  // Obtain the piggyback by subtracting the decoded (second sub-packet
  // only ) parity value from the actually read parity value
  if (pbParityIndex < numParityUnits) {
    // not the last piggybackSet
    int inputIdx = numDataUnits + pbParityIndex;
    int inputPos = inputs[1][inputIdx].position();
    int outputPos = outputs[1][pbParityIndex].position();

    for (int m = 0, k = inputPos, n = outputPos; m < bufSize; k++, m++, n++) {
      int valueWithPb = 0xFF & inputs[1][inputIdx].get(k);
      int valueWithoutPb = 0xFF & outputs[1][pbParityIndex].get(n);
      piggybacks.put(m, (byte) RSUtil.GF.add(valueWithPb, valueWithoutPb));
    }
  } else {
    // last piggybackSet
    int sum = 0;
    for (int k = 0; k < bufSize; k++) {
      sum = 0;
      for (int i = 1; i < numParityUnits; i++) {
        int inIdx = numDataUnits + i;
        int inPos = inputs[1][numDataUnits + i].position();
        int outPos = outputs[1][i].position();

        sum = RSUtil.GF.add(sum, (0xFF & inputs[1][inIdx].get(inPos + k)));
        sum = RSUtil.GF.add(sum, (0xFF & outputs[1][i].get(outPos + k)));
      }

      sum = RSUtil.GF.add(sum,
              (0xFF & inputs[0][numDataUnits + pbIndex].get(
                      inputs[0][numDataUnits + pbIndex].position() + k)));

      piggybacks.put(k, (byte) sum);
    }

  }

  return piggybacks;
}
项目:hadoop-oss    文件:RSRawEncoder.java   
@Override
protected void doEncode(ByteBuffer[] inputs, ByteBuffer[] outputs) {
  RSUtil.encodeData(gfTables, inputs, outputs);
}
项目:hadoop-oss    文件:RSRawEncoder.java   
@Override
protected void doEncode(byte[][] inputs, int[] inputOffsets,
                        int dataLen, byte[][] outputs, int[] outputOffsets) {
  RSUtil.encodeData(gfTables, dataLen, inputs, inputOffsets, outputs,
      outputOffsets);
}
项目:hops    文件:HHUtil.java   
public static ByteBuffer getPiggyBackForDecode(ByteBuffer[][] inputs,
                                               ByteBuffer[][] outputs,
                                               int pbParityIndex,
                                               int numDataUnits,
                                               int numParityUnits,
                                               int pbIndex) {
  ByteBuffer fisrtValidInput = HHUtil.findFirstValidInput(inputs[0]);
  int bufSize = fisrtValidInput.remaining();

  ByteBuffer piggybacks = allocateByteBuffer(fisrtValidInput.isDirect(),
          bufSize);

  // Use piggyBackParityIndex to figure out which parity location has the
  // associated piggyBack
  // Obtain the piggyback by subtracting the decoded (second sub-packet
  // only ) parity value from the actually read parity value
  if (pbParityIndex < numParityUnits) {
    // not the last piggybackSet
    int inputIdx = numDataUnits + pbParityIndex;
    int inputPos = inputs[1][inputIdx].position();
    int outputPos = outputs[1][pbParityIndex].position();

    for (int m = 0, k = inputPos, n = outputPos; m < bufSize; k++, m++, n++) {
      int valueWithPb = 0xFF & inputs[1][inputIdx].get(k);
      int valueWithoutPb = 0xFF & outputs[1][pbParityIndex].get(n);
      piggybacks.put(m, (byte) RSUtil.GF.add(valueWithPb, valueWithoutPb));
    }
  } else {
    // last piggybackSet
    int sum = 0;
    for (int k = 0; k < bufSize; k++) {
      sum = 0;
      for (int i = 1; i < numParityUnits; i++) {
        int inIdx = numDataUnits + i;
        int inPos = inputs[1][numDataUnits + i].position();
        int outPos = outputs[1][i].position();

        sum = RSUtil.GF.add(sum, (0xFF & inputs[1][inIdx].get(inPos + k)));
        sum = RSUtil.GF.add(sum, (0xFF & outputs[1][i].get(outPos + k)));
      }

      sum = RSUtil.GF.add(sum,
              (0xFF & inputs[0][numDataUnits + pbIndex].get(
                      inputs[0][numDataUnits + pbIndex].position() + k)));

      piggybacks.put(k, (byte) sum);
    }

  }

  return piggybacks;
}
项目:hops    文件:RSRawEncoder.java   
@Override
protected void doEncode(ByteBufferEncodingState encodingState) {
  CoderUtil.resetOutputBuffers(encodingState.outputs,
      encodingState.encodeLength);
  RSUtil.encodeData(gfTables, encodingState.inputs, encodingState.outputs);
}