Java 类org.apache.hadoop.io.DataInputBuffer 实例源码

项目:aliyun-oss-hadoop-fs    文件:TestContainerLocalizer.java   
@SuppressWarnings({ "rawtypes", "unchecked" })
static DataInputBuffer createFakeCredentials(Random r, int nTok)
      throws IOException {
    Credentials creds = new Credentials();
    byte[] password = new byte[20];
    Text kind = new Text();
    Text service = new Text();
    Text alias = new Text();
    for (int i = 0; i < nTok; ++i) {
      byte[] identifier = ("idef" + i).getBytes();
      r.nextBytes(password);
      kind.set("kind" + i);
      service.set("service" + i);
      alias.set("token" + i);
      Token token = new Token(identifier, password, kind, service);
      creds.addToken(alias, token);
    }
    DataOutputBuffer buf = new DataOutputBuffer();
    creds.writeTokenStorageToStream(buf);
    DataInputBuffer ret = new DataInputBuffer();
    ret.reset(buf.getData(), 0, buf.getLength());
    return ret;
  }
项目:hadoop-oss    文件:TFile.java   
/**
 * Constructor
 * 
 * @param reader
 *          The TFile reader object.
 * @param begin
 *          Begin location of the scan.
 * @param end
 *          End location of the scan.
 * @throws IOException
 */
Scanner(Reader reader, Location begin, Location end) throws IOException {
  this.reader = reader;
  // ensure the TFile index is loaded throughout the life of scanner.
  reader.checkTFileDataIndex();
  beginLocation = begin;
  endLocation = end;

  valTransferBuffer = new BytesWritable();
  // TODO: remember the longest key in a TFile, and use it to replace
  // MAX_KEY_SIZE.
  keyBuffer = new byte[MAX_KEY_SIZE];
  keyDataInputStream = new DataInputBuffer();
  valueBufferInputStream = new ChunkDecoder();
  valueDataInputStream = new DataInputStream(valueBufferInputStream);

  if (beginLocation.compareTo(endLocation) >= 0) {
    currentLocation = new Location(endLocation);
  } else {
    currentLocation = new Location(0, 0);
    initBlock(beginLocation.getBlockIndex());
    inBlockAdvance(beginLocation.getRecordIndex());
  }
}
项目:hadoop-oss    文件:TestDelegationToken.java   
private boolean testDelegationTokenIdentiferSerializationRoundTrip(Text owner,
    Text renewer, Text realUser) throws IOException {
  TestDelegationTokenIdentifier dtid = new TestDelegationTokenIdentifier(
      owner, renewer, realUser);
  DataOutputBuffer out = new DataOutputBuffer();
  dtid.writeImpl(out);
  DataInputBuffer in = new DataInputBuffer();
  in.reset(out.getData(), out.getLength());
  try {
    TestDelegationTokenIdentifier dtid2 =
        new TestDelegationTokenIdentifier();
    dtid2.readFields(in);
    assertTrue(dtid.equals(dtid2));
    return true;
  } catch(IOException e){
    return false;
  }
}
项目:hadoop-oss    文件:TestWritableSerialization.java   
@Test
@SuppressWarnings({"rawtypes", "unchecked"})
public void testWritableComparatorJavaSerialization() throws Exception {
  Serialization ser = new JavaSerialization();

  Serializer<TestWC> serializer = ser.getSerializer(TestWC.class);
  DataOutputBuffer dob = new DataOutputBuffer();
  serializer.open(dob);
  TestWC orig = new TestWC(0);
  serializer.serialize(orig);
  serializer.close();

  Deserializer<TestWC> deserializer = ser.getDeserializer(TestWC.class);
  DataInputBuffer dib = new DataInputBuffer();
  dib.reset(dob.getData(), 0, dob.getLength());
  deserializer.open(dib);
  TestWC deser = deserializer.deserialize(null);
  deserializer.close();
  assertEquals(orig, deser);
}
项目:hadoop-oss    文件:SerializationTestUtil.java   
/**
 * A utility that tests serialization/deserialization. 
 * @param conf configuration to use, "io.serializations" is read to 
 * determine the serialization
 * @param <K> the class of the item
 * @param before item to (de)serialize
 * @return deserialized item
 */
public static <K> K testSerialization(Configuration conf, K before)
    throws Exception {

  SerializationFactory factory = new SerializationFactory(conf);
  Serializer<K> serializer 
    = factory.getSerializer(GenericsUtil.getClass(before));
  Deserializer<K> deserializer 
    = factory.getDeserializer(GenericsUtil.getClass(before));

  DataOutputBuffer out = new DataOutputBuffer();
  serializer.open(out);
  serializer.serialize(before);
  serializer.close();

  DataInputBuffer in = new DataInputBuffer();
  in.reset(out.getData(), out.getLength());
  deserializer.open(in);
  K after = deserializer.deserialize(null);
  deserializer.close();
  return after;
}
项目:aliyun-maxcompute-data-collectors    文件:TestMainframeDatasetInputSplit.java   
@Test
public void testWriteRead() {
  mfDatasetInputSplit.addDataset("dataSet1");
  mfDatasetInputSplit.addDataset("dataSet2");
  DataOutputBuffer dob = new DataOutputBuffer();
  DataInputBuffer dib = new DataInputBuffer();
  MainframeDatasetInputSplit mfReader = new MainframeDatasetInputSplit();
  try {
    mfDatasetInputSplit.write(dob);
    dib.reset(dob.getData(), dob.getLength());
    mfReader.readFields(dib);
    Assert.assertNotNull("MFReader get data from tester", mfReader);
    Assert.assertEquals(2, mfReader.getLength());
    Assert.assertEquals("dataSet1", mfReader.getNextDataset());
    Assert.assertEquals("dataSet2", mfReader.getNextDataset());
  } catch (IOException ioe) {
    Assert.fail("No IOException should be thrown!");
  } catch (InterruptedException ie) {
    Assert.fail("No InterruptedException should be thrown!");
  }
}
项目:hadoop    文件:TestYARNTokenIdentifier.java   
@Test
public void testAMRMTokenIdentifier() throws IOException {
  ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance(
      ApplicationId.newInstance(1, 1), 1);
  int masterKeyId = 1;

  AMRMTokenIdentifier token = new AMRMTokenIdentifier(appAttemptId, masterKeyId);

  AMRMTokenIdentifier anotherToken = new AMRMTokenIdentifier();
  byte[] tokenContent = token.getBytes();
  DataInputBuffer dib = new DataInputBuffer();
  dib.reset(tokenContent, tokenContent.length);
  anotherToken.readFields(dib);

  // verify the whole record equals with original record
  Assert.assertEquals("Token is not the same after serialization " +
      "and deserialization.", token, anotherToken);

  Assert.assertEquals("ApplicationAttemptId from proto is not the same with original token",
      anotherToken.getApplicationAttemptId(), appAttemptId);

  Assert.assertEquals("masterKeyId from proto is not the same with original token",
      anotherToken.getKeyId(), masterKeyId);
}
项目:hadoop    文件:TestContainerLocalizer.java   
@SuppressWarnings({ "rawtypes", "unchecked" })
static DataInputBuffer createFakeCredentials(Random r, int nTok)
      throws IOException {
    Credentials creds = new Credentials();
    byte[] password = new byte[20];
    Text kind = new Text();
    Text service = new Text();
    Text alias = new Text();
    for (int i = 0; i < nTok; ++i) {
      byte[] identifier = ("idef" + i).getBytes();
      r.nextBytes(password);
      kind.set("kind" + i);
      service.set("service" + i);
      alias.set("token" + i);
      Token token = new Token(identifier, password, kind, service);
      creds.addToken(alias, token);
    }
    DataOutputBuffer buf = new DataOutputBuffer();
    creds.writeTokenStorageToStream(buf);
    DataInputBuffer ret = new DataInputBuffer();
    ret.reset(buf.getData(), 0, buf.getLength());
    return ret;
  }
项目:hadoop    文件:TestPBRecordImpl.java   
@Test(timeout=10000)
public void testLocalResourceStatusSerDe() throws Exception {
  LocalResourceStatus rsrcS = createLocalResourceStatus();
  assertTrue(rsrcS instanceof LocalResourceStatusPBImpl);
  LocalResourceStatusPBImpl rsrcPb = (LocalResourceStatusPBImpl) rsrcS;
  DataOutputBuffer out = new DataOutputBuffer();
  rsrcPb.getProto().writeDelimitedTo(out);
  DataInputBuffer in = new DataInputBuffer();
  in.reset(out.getData(), 0, out.getLength());
  LocalResourceStatusProto rsrcPbD =
    LocalResourceStatusProto.parseDelimitedFrom(in);
  assertNotNull(rsrcPbD);
  LocalResourceStatus rsrcD =
    new LocalResourceStatusPBImpl(rsrcPbD);

  assertEquals(rsrcS, rsrcD);
  assertEquals(createResource(), rsrcS.getResource());
  assertEquals(createResource(), rsrcD.getResource());
}
项目:hadoop    文件:TestPBRecordImpl.java   
@Test(timeout=10000)
public void testLocalizerStatusSerDe() throws Exception {
  LocalizerStatus rsrcS = createLocalizerStatus();
  assertTrue(rsrcS instanceof LocalizerStatusPBImpl);
  LocalizerStatusPBImpl rsrcPb = (LocalizerStatusPBImpl) rsrcS;
  DataOutputBuffer out = new DataOutputBuffer();
  rsrcPb.getProto().writeDelimitedTo(out);
  DataInputBuffer in = new DataInputBuffer();
  in.reset(out.getData(), 0, out.getLength());
  LocalizerStatusProto rsrcPbD =
    LocalizerStatusProto.parseDelimitedFrom(in);
  assertNotNull(rsrcPbD);
  LocalizerStatus rsrcD =
    new LocalizerStatusPBImpl(rsrcPbD);

  assertEquals(rsrcS, rsrcD);
  assertEquals("localizer0", rsrcS.getLocalizerId());
  assertEquals("localizer0", rsrcD.getLocalizerId());
  assertEquals(createLocalResourceStatus(), rsrcS.getResourceStatus(0));
  assertEquals(createLocalResourceStatus(), rsrcD.getResourceStatus(0));
}
项目:hadoop    文件:TestPBRecordImpl.java   
@Test(timeout=10000)
public void testLocalizerHeartbeatResponseSerDe() throws Exception {
  LocalizerHeartbeatResponse rsrcS = createLocalizerHeartbeatResponse();
  assertTrue(rsrcS instanceof LocalizerHeartbeatResponsePBImpl);
  LocalizerHeartbeatResponsePBImpl rsrcPb =
    (LocalizerHeartbeatResponsePBImpl) rsrcS;
  DataOutputBuffer out = new DataOutputBuffer();
  rsrcPb.getProto().writeDelimitedTo(out);
  DataInputBuffer in = new DataInputBuffer();
  in.reset(out.getData(), 0, out.getLength());
  LocalizerHeartbeatResponseProto rsrcPbD =
    LocalizerHeartbeatResponseProto.parseDelimitedFrom(in);
  assertNotNull(rsrcPbD);
  LocalizerHeartbeatResponse rsrcD =
    new LocalizerHeartbeatResponsePBImpl(rsrcPbD);

  assertEquals(rsrcS, rsrcD);
  assertEquals(createResource(), rsrcS.getResourceSpecs().get(0).getResource());
  assertEquals(createResource(), rsrcD.getResourceSpecs().get(0).getResource());
}
项目:hadoop    文件:TestIFileStreams.java   
public void testIFileStream() throws Exception {
  final int DLEN = 100;
  DataOutputBuffer dob = new DataOutputBuffer(DLEN + 4);
  IFileOutputStream ifos = new IFileOutputStream(dob);
  for (int i = 0; i < DLEN; ++i) {
    ifos.write(i);
  }
  ifos.close();
  DataInputBuffer dib = new DataInputBuffer();
  dib.reset(dob.getData(), DLEN + 4);
  IFileInputStream ifis = new IFileInputStream(dib, 104, new Configuration());
  for (int i = 0; i < DLEN; ++i) {
    assertEquals(i, ifis.read());
  }
  ifis.close();
}
项目:hadoop    文件:TestWritableJobConf.java   
private <K> K serDeser(K conf) throws Exception {
  SerializationFactory factory = new SerializationFactory(CONF);
  Serializer<K> serializer =
    factory.getSerializer(GenericsUtil.getClass(conf));
  Deserializer<K> deserializer =
    factory.getDeserializer(GenericsUtil.getClass(conf));

  DataOutputBuffer out = new DataOutputBuffer();
  serializer.open(out);
  serializer.serialize(conf);
  serializer.close();

  DataInputBuffer in = new DataInputBuffer();
  in.reset(out.getData(), out.getLength());
  deserializer.open(in);
  K after = deserializer.deserialize(null);
  deserializer.close();
  return after;
}
项目:hadoop    文件:IFile.java   
public void append(DataInputBuffer key, DataInputBuffer value)
throws IOException {
  int keyLength = key.getLength() - key.getPosition();
  if (keyLength < 0) {
    throw new IOException("Negative key-length not allowed: " + keyLength + 
                          " for " + key);
  }

  int valueLength = value.getLength() - value.getPosition();
  if (valueLength < 0) {
    throw new IOException("Negative value-length not allowed: " + 
                          valueLength + " for " + value);
  }

  WritableUtils.writeVInt(out, keyLength);
  WritableUtils.writeVInt(out, valueLength);
  out.write(key.getData(), key.getPosition(), keyLength); 
  out.write(value.getData(), value.getPosition(), valueLength); 

  // Update bytes written
  decompressedBytesWritten += keyLength + valueLength + 
                  WritableUtils.getVIntSize(keyLength) + 
                  WritableUtils.getVIntSize(valueLength);
  ++numRecordsWritten;
}
项目:hadoop    文件:IFile.java   
public void nextRawValue(DataInputBuffer value) throws IOException {
  final byte[] valBytes = (value.getData().length < currentValueLength)
    ? new byte[currentValueLength << 1]
    : value.getData();
  int i = readData(valBytes, 0, currentValueLength);
  if (i != currentValueLength) {
    throw new IOException ("Asked for " + currentValueLength + " Got: " + i);
  }
  value.reset(valBytes, currentValueLength);

  // Record the bytes read
  bytesRead += currentValueLength;

  ++recNo;
  ++numRecordsRead;
}
项目:hadoop    文件:BackupStore.java   
/**
 * Write the given K,V to the cache. 
 * Write to memcache if space is available, else write to the filecache
 * @param key
 * @param value
 * @throws IOException
 */
public void write(DataInputBuffer key, DataInputBuffer value)
throws IOException {

  assert (key != null && value != null);

  if (fileCache.isActive()) {
    fileCache.write(key, value);
    return;
  }

  if (memCache.reserveSpace(key, value)) {
    memCache.write(key, value);
  } else {
    fileCache.activate();
    fileCache.write(key, value);
  }
}
项目:hadoop    文件:InMemoryWriter.java   
public void append(DataInputBuffer key, DataInputBuffer value)
throws IOException {
  int keyLength = key.getLength() - key.getPosition();
  if (keyLength < 0) {
    throw new IOException("Negative key-length not allowed: " + keyLength + 
                          " for " + key);
  }

  int valueLength = value.getLength() - value.getPosition();
  if (valueLength < 0) {
    throw new IOException("Negative value-length not allowed: " + 
                          valueLength + " for " + value);
  }

  WritableUtils.writeVInt(out, keyLength);
  WritableUtils.writeVInt(out, valueLength);
  out.write(key.getData(), key.getPosition(), keyLength); 
  out.write(value.getData(), value.getPosition(), valueLength); 
}
项目:hadoop    文件:InMemoryReader.java   
public boolean nextRawKey(DataInputBuffer key) throws IOException {
  try {
    if (!positionToNextRecord(memDataIn)) {
      return false;
    }
    // Setup the key
    int pos = memDataIn.getPosition();
    byte[] data = memDataIn.getData();
    key.reset(data, pos, currentKeyLength);
    // Position for the next value
    long skipped = memDataIn.skip(currentKeyLength);
    if (skipped != currentKeyLength) {
      throw new IOException("Rec# " + recNo + 
          ": Failed to skip past key of length: " + 
          currentKeyLength);
    }

    // Record the byte
    bytesRead += currentKeyLength;
    return true;
  } catch (IOException ioe) {
    dumpOnError();
    throw ioe;
  }
}
项目:hadoop    文件:InMemoryReader.java   
public void nextRawValue(DataInputBuffer value) throws IOException {
  try {
    int pos = memDataIn.getPosition();
    byte[] data = memDataIn.getData();
    value.reset(data, pos, currentValueLength);

    // Position for the next record
    long skipped = memDataIn.skip(currentValueLength);
    if (skipped != currentValueLength) {
      throw new IOException("Rec# " + recNo + 
          ": Failed to skip past value of length: " + 
          currentValueLength);
    }
    // Record the byte
    bytesRead += currentValueLength;

    ++recNo;
  } catch (IOException ioe) {
    dumpOnError();
    throw ioe;
  }
}
项目:hadoop    文件:TestMerger.java   
private void readOnDiskMapOutput(Configuration conf, FileSystem fs, Path path,
    List<String> keys, List<String> values) throws IOException {
  FSDataInputStream in = CryptoUtils.wrapIfNecessary(conf, fs.open(path));

  IFile.Reader<Text, Text> reader = new IFile.Reader<Text, Text>(conf, in,
      fs.getFileStatus(path).getLen(), null, null);
  DataInputBuffer keyBuff = new DataInputBuffer();
  DataInputBuffer valueBuff = new DataInputBuffer();
  Text key = new Text();
  Text value = new Text();
  while (reader.nextRawKey(keyBuff)) {
    key.readFields(keyBuff);
    keys.add(key.toString());
    reader.nextRawValue(valueBuff);
    value.readFields(valueBuff);
    values.add(value.toString());
  }
}
项目:hadoop    文件:TestMerger.java   
private Answer<?> getKeyAnswer(final String segmentName,
    final boolean isCompressedInput) {
  return new Answer<Object>() {
    int i = 0;

    @SuppressWarnings("unchecked")
    public Boolean answer(InvocationOnMock invocation) {
      if (i++ == 3) {
        return false;
      }
      Reader<Text,Text> mock = (Reader<Text,Text>) invocation.getMock();
      int multiplier = isCompressedInput ? 100 : 1;
      mock.bytesRead += 10 * multiplier;
      Object[] args = invocation.getArguments();
      DataInputBuffer key = (DataInputBuffer) args[0];
      key.reset(("Segment Key " + segmentName + i).getBytes(), 20);
      return true;
    }
  };
}
项目:hadoop    文件:TestJspHelper.java   
@Test
public void testReadWriteReplicaState() {
  try {
    DataOutputBuffer out = new DataOutputBuffer();
    DataInputBuffer in = new DataInputBuffer();
    for (HdfsServerConstants.ReplicaState repState : HdfsServerConstants.ReplicaState
        .values()) {
      repState.write(out);
      in.reset(out.getData(), out.getLength());
      HdfsServerConstants.ReplicaState result = HdfsServerConstants.ReplicaState
          .read(in);
      assertTrue("testReadWrite error !!!", repState == result);
      out.reset();
      in.reset();
    }
  } catch (Exception ex) {
    fail("testReadWrite ex error ReplicaState");
  }
}
项目:hadoop    文件:TestGridMixClasses.java   
@Override
public DataInputBuffer getKey() throws IOException {
  ByteArrayOutputStream dt = new ByteArrayOutputStream();
  GridmixKey key = new GridmixKey(GridmixKey.REDUCE_SPEC, 10 * counter, 1L);
  Spec spec = new Spec();
  spec.rec_in = counter;
  spec.rec_out = counter;
  spec.bytes_out = counter * 100;

  key.setSpec(spec);
  key.write(new DataOutputStream(dt));
  DataInputBuffer result = new DataInputBuffer();
  byte[] b = dt.toByteArray();
  result.reset(b, 0, b.length);
  return result;
}
项目:hadoop    文件:TestGridMixClasses.java   
@Override
public DataInputBuffer getKey() throws IOException {
  ByteArrayOutputStream dt = new ByteArrayOutputStream();
  GridmixKey key = new GridmixKey(GridmixKey.REDUCE_SPEC, 10 * counter, 1L);
  Spec spec = new Spec();
  spec.rec_in = counter;
  spec.rec_out = counter;
  spec.bytes_out = counter * 100;

  key.setSpec(spec);
  key.write(new DataOutputStream(dt));
  DataInputBuffer result = new DataInputBuffer();
  byte[] b = dt.toByteArray();
  result.reset(b, 0, b.length);
  return result;
}
项目:hadoop    文件:TFile.java   
/**
 * Constructor
 * 
 * @param reader
 *          The TFile reader object.
 * @param begin
 *          Begin location of the scan.
 * @param end
 *          End location of the scan.
 * @throws IOException
 */
Scanner(Reader reader, Location begin, Location end) throws IOException {
  this.reader = reader;
  // ensure the TFile index is loaded throughout the life of scanner.
  reader.checkTFileDataIndex();
  beginLocation = begin;
  endLocation = end;

  valTransferBuffer = new BytesWritable();
  // TODO: remember the longest key in a TFile, and use it to replace
  // MAX_KEY_SIZE.
  keyBuffer = new byte[MAX_KEY_SIZE];
  keyDataInputStream = new DataInputBuffer();
  valueBufferInputStream = new ChunkDecoder();
  valueDataInputStream = new DataInputStream(valueBufferInputStream);

  if (beginLocation.compareTo(endLocation) >= 0) {
    currentLocation = new Location(endLocation);
  } else {
    currentLocation = new Location(0, 0);
    initBlock(beginLocation.getBlockIndex());
    inBlockAdvance(beginLocation.getRecordIndex());
  }
}
项目:hadoop    文件:TestDelegationToken.java   
private boolean testDelegationTokenIdentiferSerializationRoundTrip(Text owner,
    Text renewer, Text realUser) throws IOException {
  TestDelegationTokenIdentifier dtid = new TestDelegationTokenIdentifier(
      owner, renewer, realUser);
  DataOutputBuffer out = new DataOutputBuffer();
  dtid.writeImpl(out);
  DataInputBuffer in = new DataInputBuffer();
  in.reset(out.getData(), out.getLength());
  try {
    TestDelegationTokenIdentifier dtid2 =
        new TestDelegationTokenIdentifier();
    dtid2.readFields(in);
    assertTrue(dtid.equals(dtid2));
    return true;
  } catch(IOException e){
    return false;
  }
}
项目:hadoop    文件:TestWritableSerialization.java   
@Test
@SuppressWarnings({"rawtypes", "unchecked"})
public void testWritableComparatorJavaSerialization() throws Exception {
  Serialization ser = new JavaSerialization();

  Serializer<TestWC> serializer = ser.getSerializer(TestWC.class);
  DataOutputBuffer dob = new DataOutputBuffer();
  serializer.open(dob);
  TestWC orig = new TestWC(0);
  serializer.serialize(orig);
  serializer.close();

  Deserializer<TestWC> deserializer = ser.getDeserializer(TestWC.class);
  DataInputBuffer dib = new DataInputBuffer();
  dib.reset(dob.getData(), 0, dob.getLength());
  deserializer.open(dib);
  TestWC deser = deserializer.deserialize(null);
  deserializer.close();
  assertEquals(orig, deser);
}
项目:hadoop    文件:SerializationTestUtil.java   
/**
 * A utility that tests serialization/deserialization. 
 * @param conf configuration to use, "io.serializations" is read to 
 * determine the serialization
 * @param <K> the class of the item
 * @param before item to (de)serialize
 * @return deserialized item
 */
public static <K> K testSerialization(Configuration conf, K before)
    throws Exception {

  SerializationFactory factory = new SerializationFactory(conf);
  Serializer<K> serializer 
    = factory.getSerializer(GenericsUtil.getClass(before));
  Deserializer<K> deserializer 
    = factory.getDeserializer(GenericsUtil.getClass(before));

  DataOutputBuffer out = new DataOutputBuffer();
  serializer.open(out);
  serializer.serialize(before);
  serializer.close();

  DataInputBuffer in = new DataInputBuffer();
  in.reset(out.getData(), out.getLength());
  deserializer.open(in);
  K after = deserializer.deserialize(null);
  deserializer.close();
  return after;
}
项目:ditb    文件:TestSerialization.java   
/**
 * Test RegionInfo serialization
 * @throws Exception
 */
@Test public void testRegionInfo() throws Exception {
  HRegionInfo hri = createRandomRegion("testRegionInfo");

  //test toByteArray()
  byte [] hrib = hri.toByteArray();
  HRegionInfo deserializedHri = HRegionInfo.parseFrom(hrib);
  assertEquals(hri.getEncodedName(), deserializedHri.getEncodedName());
  assertEquals(hri, deserializedHri);

  //test toDelimitedByteArray()
  hrib = hri.toDelimitedByteArray();
  DataInputBuffer buf = new DataInputBuffer();
  try {
    buf.reset(hrib, hrib.length);
    deserializedHri = HRegionInfo.parseFrom(buf);
    assertEquals(hri.getEncodedName(), deserializedHri.getEncodedName());
    assertEquals(hri, deserializedHri);
  } finally {
    buf.close();
  }
}
项目:ditb    文件:HRegionInfo.java   
/**
 * Parses all the HRegionInfo instances from the passed in stream until EOF. Presumes the
 * HRegionInfo's were serialized to the stream with {@link #toDelimitedByteArray()}
 * @param bytes serialized bytes
 * @param offset the start offset into the byte[] buffer
 * @param length how far we should read into the byte[] buffer
 * @return All the hregioninfos that are in the byte array. Keeps reading till we hit the end.
 */
public static List<HRegionInfo> parseDelimitedFrom(final byte[] bytes, final int offset,
    final int length) throws IOException {
  if (bytes == null) {
    throw new IllegalArgumentException("Can't build an object with empty bytes array");
  }
  DataInputBuffer in = new DataInputBuffer();
  List<HRegionInfo> hris = new ArrayList<HRegionInfo>();
  try {
    in.reset(bytes, offset, length);
    while (in.available() > 0) {
      HRegionInfo hri = parseFrom(in);
      hris.add(hri);
    }
  } finally {
    in.close();
  }
  return hris;
}
项目:ditb    文件:Writables.java   
/**
 * Set bytes into the passed Writable by calling its
 * {@link Writable#readFields(java.io.DataInput)}.
 * @param bytes serialized bytes
 * @param offset offset into array
 * @param length length of data
 * @param w An empty Writable (usually made by calling the null-arg
 * constructor).
 * @return The passed Writable after its readFields has been called fed
 * by the passed <code>bytes</code> array or IllegalArgumentException
 * if passed null or an empty <code>bytes</code> array.
 * @throws IOException e
 * @throws IllegalArgumentException
 */
public static Writable getWritable(final byte [] bytes, final int offset,
  final int length, final Writable w)
throws IOException {
  if (bytes == null || length <=0) {
    throw new IllegalArgumentException("Can't build a writable with empty " +
      "bytes array");
  }
  if (w == null) {
    throw new IllegalArgumentException("Writable cannot be null");
  }
  DataInputBuffer in = new DataInputBuffer();
  try {
    in.reset(bytes, offset, length);
    w.readFields(in);
    return w;
  } finally {
    in.close();
  }
}
项目:aliyun-oss-hadoop-fs    文件:TestYARNTokenIdentifier.java   
@Test
public void testAMRMTokenIdentifier() throws IOException {
  ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance(
      ApplicationId.newInstance(1, 1), 1);
  int masterKeyId = 1;

  AMRMTokenIdentifier token = new AMRMTokenIdentifier(appAttemptId, masterKeyId);

  AMRMTokenIdentifier anotherToken = new AMRMTokenIdentifier();
  byte[] tokenContent = token.getBytes();
  DataInputBuffer dib = new DataInputBuffer();
  dib.reset(tokenContent, tokenContent.length);
  anotherToken.readFields(dib);

  // verify the whole record equals with original record
  Assert.assertEquals("Token is not the same after serialization " +
      "and deserialization.", token, anotherToken);

  Assert.assertEquals("ApplicationAttemptId from proto is not the same with original token",
      anotherToken.getApplicationAttemptId(), appAttemptId);

  Assert.assertEquals("masterKeyId from proto is not the same with original token",
      anotherToken.getKeyId(), masterKeyId);
}
项目:aliyun-oss-hadoop-fs    文件:TestPBRecordImpl.java   
@Test(timeout=10000)
public void testLocalResourceStatusSerDe() throws Exception {
  LocalResourceStatus rsrcS = createLocalResourceStatus();
  assertTrue(rsrcS instanceof LocalResourceStatusPBImpl);
  LocalResourceStatusPBImpl rsrcPb = (LocalResourceStatusPBImpl) rsrcS;
  DataOutputBuffer out = new DataOutputBuffer();
  rsrcPb.getProto().writeDelimitedTo(out);
  DataInputBuffer in = new DataInputBuffer();
  in.reset(out.getData(), 0, out.getLength());
  LocalResourceStatusProto rsrcPbD =
    LocalResourceStatusProto.parseDelimitedFrom(in);
  assertNotNull(rsrcPbD);
  LocalResourceStatus rsrcD =
    new LocalResourceStatusPBImpl(rsrcPbD);

  assertEquals(rsrcS, rsrcD);
  assertEquals(createResource(), rsrcS.getResource());
  assertEquals(createResource(), rsrcD.getResource());
}
项目:aliyun-oss-hadoop-fs    文件:TestPBRecordImpl.java   
@Test(timeout=10000)
public void testLocalizerStatusSerDe() throws Exception {
  LocalizerStatus rsrcS = createLocalizerStatus();
  assertTrue(rsrcS instanceof LocalizerStatusPBImpl);
  LocalizerStatusPBImpl rsrcPb = (LocalizerStatusPBImpl) rsrcS;
  DataOutputBuffer out = new DataOutputBuffer();
  rsrcPb.getProto().writeDelimitedTo(out);
  DataInputBuffer in = new DataInputBuffer();
  in.reset(out.getData(), 0, out.getLength());
  LocalizerStatusProto rsrcPbD =
    LocalizerStatusProto.parseDelimitedFrom(in);
  assertNotNull(rsrcPbD);
  LocalizerStatus rsrcD =
    new LocalizerStatusPBImpl(rsrcPbD);

  assertEquals(rsrcS, rsrcD);
  assertEquals("localizer0", rsrcS.getLocalizerId());
  assertEquals("localizer0", rsrcD.getLocalizerId());
  assertEquals(createLocalResourceStatus(), rsrcS.getResourceStatus(0));
  assertEquals(createLocalResourceStatus(), rsrcD.getResourceStatus(0));
}
项目:aliyun-oss-hadoop-fs    文件:TestPBRecordImpl.java   
@Test(timeout=10000)
public void testLocalizerHeartbeatResponseSerDe() throws Exception {
  LocalizerHeartbeatResponse rsrcS = createLocalizerHeartbeatResponse();
  assertTrue(rsrcS instanceof LocalizerHeartbeatResponsePBImpl);
  LocalizerHeartbeatResponsePBImpl rsrcPb =
    (LocalizerHeartbeatResponsePBImpl) rsrcS;
  DataOutputBuffer out = new DataOutputBuffer();
  rsrcPb.getProto().writeDelimitedTo(out);
  DataInputBuffer in = new DataInputBuffer();
  in.reset(out.getData(), 0, out.getLength());
  LocalizerHeartbeatResponseProto rsrcPbD =
    LocalizerHeartbeatResponseProto.parseDelimitedFrom(in);
  assertNotNull(rsrcPbD);
  LocalizerHeartbeatResponse rsrcD =
    new LocalizerHeartbeatResponsePBImpl(rsrcPbD);

  assertEquals(rsrcS, rsrcD);
  assertEquals(createResource(), rsrcS.getResourceSpecs().get(0).getResource());
  assertEquals(createResource(), rsrcD.getResourceSpecs().get(0).getResource());
}
项目:aliyun-oss-hadoop-fs    文件:TestWritableJobConf.java   
private <K> K serDeser(K conf) throws Exception {
  SerializationFactory factory = new SerializationFactory(CONF);
  Serializer<K> serializer =
    factory.getSerializer(GenericsUtil.getClass(conf));
  Deserializer<K> deserializer =
    factory.getDeserializer(GenericsUtil.getClass(conf));

  DataOutputBuffer out = new DataOutputBuffer();
  serializer.open(out);
  serializer.serialize(conf);
  serializer.close();

  DataInputBuffer in = new DataInputBuffer();
  in.reset(out.getData(), out.getLength());
  deserializer.open(in);
  K after = deserializer.deserialize(null);
  deserializer.close();
  return after;
}
项目:big-c    文件:InMemoryReader.java   
public boolean nextRawKey(DataInputBuffer key) throws IOException {
  try {
    if (!positionToNextRecord(memDataIn)) {
      return false;
    }
    // Setup the key
    int pos = memDataIn.getPosition();
    byte[] data = memDataIn.getData();
    key.reset(data, pos, currentKeyLength);
    // Position for the next value
    long skipped = memDataIn.skip(currentKeyLength);
    if (skipped != currentKeyLength) {
      throw new IOException("Rec# " + recNo + 
          ": Failed to skip past key of length: " + 
          currentKeyLength);
    }

    // Record the byte
    bytesRead += currentKeyLength;
    return true;
  } catch (IOException ioe) {
    dumpOnError();
    throw ioe;
  }
}
项目:big-c    文件:BackupStore.java   
/**
 * Write the given K,V to the cache. 
 * Write to memcache if space is available, else write to the filecache
 * @param key
 * @param value
 * @throws IOException
 */
public void write(DataInputBuffer key, DataInputBuffer value)
throws IOException {

  assert (key != null && value != null);

  if (fileCache.isActive()) {
    fileCache.write(key, value);
    return;
  }

  if (memCache.reserveSpace(key, value)) {
    memCache.write(key, value);
  } else {
    fileCache.activate();
    fileCache.write(key, value);
  }
}
项目:aliyun-oss-hadoop-fs    文件:IFile.java   
public void append(DataInputBuffer key, DataInputBuffer value)
throws IOException {
  int keyLength = key.getLength() - key.getPosition();
  if (keyLength < 0) {
    throw new IOException("Negative key-length not allowed: " + keyLength + 
                          " for " + key);
  }

  int valueLength = value.getLength() - value.getPosition();
  if (valueLength < 0) {
    throw new IOException("Negative value-length not allowed: " + 
                          valueLength + " for " + value);
  }

  WritableUtils.writeVInt(out, keyLength);
  WritableUtils.writeVInt(out, valueLength);
  out.write(key.getData(), key.getPosition(), keyLength); 
  out.write(value.getData(), value.getPosition(), valueLength); 

  // Update bytes written
  decompressedBytesWritten += keyLength + valueLength + 
                  WritableUtils.getVIntSize(keyLength) + 
                  WritableUtils.getVIntSize(valueLength);
  ++numRecordsWritten;
}
项目:big-c    文件:InMemoryWriter.java   
public void append(DataInputBuffer key, DataInputBuffer value)
throws IOException {
  int keyLength = key.getLength() - key.getPosition();
  if (keyLength < 0) {
    throw new IOException("Negative key-length not allowed: " + keyLength + 
                          " for " + key);
  }

  int valueLength = value.getLength() - value.getPosition();
  if (valueLength < 0) {
    throw new IOException("Negative value-length not allowed: " + 
                          valueLength + " for " + value);
  }

  WritableUtils.writeVInt(out, keyLength);
  WritableUtils.writeVInt(out, valueLength);
  out.write(key.getData(), key.getPosition(), keyLength); 
  out.write(value.getData(), value.getPosition(), valueLength); 
}