Java 类org.apache.hadoop.io.DataOutputBuffer 实例源码

项目:aliyun-oss-hadoop-fs    文件:TestContainerLocalizer.java   
@SuppressWarnings({ "rawtypes", "unchecked" })
static DataInputBuffer createFakeCredentials(Random r, int nTok)
      throws IOException {
    Credentials creds = new Credentials();
    byte[] password = new byte[20];
    Text kind = new Text();
    Text service = new Text();
    Text alias = new Text();
    for (int i = 0; i < nTok; ++i) {
      byte[] identifier = ("idef" + i).getBytes();
      r.nextBytes(password);
      kind.set("kind" + i);
      service.set("service" + i);
      alias.set("token" + i);
      Token token = new Token(identifier, password, kind, service);
      creds.addToken(alias, token);
    }
    DataOutputBuffer buf = new DataOutputBuffer();
    creds.writeTokenStorageToStream(buf);
    DataInputBuffer ret = new DataInputBuffer();
    ret.reset(buf.getData(), 0, buf.getLength());
    return ret;
  }
项目:hadoop-oss    文件:TFile.java   
public void write(DataOutput out) throws IOException {
  if (firstKey == null) {
    Utils.writeVInt(out, 0);
    return;
  }

  DataOutputBuffer dob = new DataOutputBuffer();
  Utils.writeVInt(dob, firstKey.size());
  dob.write(firstKey.buffer());
  Utils.writeVInt(out, dob.size());
  out.write(dob.getData(), 0, dob.getLength());

  for (TFileIndexEntry entry : index) {
    dob.reset();
    entry.write(dob);
    Utils.writeVInt(out, dob.getLength());
    out.write(dob.getData(), 0, dob.getLength());
  }
}
项目:hadoop-oss    文件:TestDelegationToken.java   
private boolean testDelegationTokenIdentiferSerializationRoundTrip(Text owner,
    Text renewer, Text realUser) throws IOException {
  TestDelegationTokenIdentifier dtid = new TestDelegationTokenIdentifier(
      owner, renewer, realUser);
  DataOutputBuffer out = new DataOutputBuffer();
  dtid.writeImpl(out);
  DataInputBuffer in = new DataInputBuffer();
  in.reset(out.getData(), out.getLength());
  try {
    TestDelegationTokenIdentifier dtid2 =
        new TestDelegationTokenIdentifier();
    dtid2.readFields(in);
    assertTrue(dtid.equals(dtid2));
    return true;
  } catch(IOException e){
    return false;
  }
}
项目:hadoop-oss    文件:CryptoStreamsTestBase.java   
@Before
public void setUp() throws IOException {
  // Generate data
  final int seed = new Random().nextInt();
  final DataOutputBuffer dataBuf = new DataOutputBuffer();
  final RandomDatum.Generator generator = new RandomDatum.Generator(seed);
  for(int i = 0; i < count; ++i) {
    generator.next();
    final RandomDatum key = generator.getKey();
    final RandomDatum value = generator.getValue();

    key.write(dataBuf);
    value.write(dataBuf);
  }
  LOG.info("Generated " + count + " records");
  data = dataBuf.getData();
  dataLen = dataBuf.getLength();
}
项目:hadoop-oss    文件:TestCryptoStreams.java   
@Override
protected OutputStream getOutputStream(int bufferSize, byte[] key, byte[] iv) 
    throws IOException {
  DataOutputBuffer out = new DataOutputBuffer() {
    @Override
    public void flush() throws IOException {
      buf = getData();
      bufLen = getLength();
    }
    @Override
    public void close() throws IOException {
      buf = getData();
      bufLen = getLength();
    }
  };
  return new CryptoOutputStream(new FakeOutputStream(out),
      codec, bufferSize, key, iv);
}
项目:hadoop-oss    文件:TestIndexedSort.java   
public WritableSortable(int j) throws IOException {
  seed = r.nextLong();
  r.setSeed(seed);
  Text t = new Text();
  StringBuilder sb = new StringBuilder();
  indices = new int[j];
  offsets = new int[j];
  check = new String[j];
  DataOutputBuffer dob = new DataOutputBuffer();
  for (int i = 0; i < j; ++i) {
    indices[i] = i;
    offsets[i] = dob.getLength();
    genRandom(t, r.nextInt(15) + 1, sb);
    t.write(dob);
    check[i] = t.toString();
  }
  eob = dob.getLength();
  bytes = dob.getData();
  comparator = WritableComparator.get(Text.class);
}
项目:hadoop-oss    文件:TestWritableSerialization.java   
@Test
@SuppressWarnings({"rawtypes", "unchecked"})
public void testWritableComparatorJavaSerialization() throws Exception {
  Serialization ser = new JavaSerialization();

  Serializer<TestWC> serializer = ser.getSerializer(TestWC.class);
  DataOutputBuffer dob = new DataOutputBuffer();
  serializer.open(dob);
  TestWC orig = new TestWC(0);
  serializer.serialize(orig);
  serializer.close();

  Deserializer<TestWC> deserializer = ser.getDeserializer(TestWC.class);
  DataInputBuffer dib = new DataInputBuffer();
  dib.reset(dob.getData(), 0, dob.getLength());
  deserializer.open(dib);
  TestWC deser = deserializer.deserialize(null);
  deserializer.close();
  assertEquals(orig, deser);
}
项目:hadoop-oss    文件:SerializationTestUtil.java   
/**
 * A utility that tests serialization/deserialization. 
 * @param conf configuration to use, "io.serializations" is read to 
 * determine the serialization
 * @param <K> the class of the item
 * @param before item to (de)serialize
 * @return deserialized item
 */
public static <K> K testSerialization(Configuration conf, K before)
    throws Exception {

  SerializationFactory factory = new SerializationFactory(conf);
  Serializer<K> serializer 
    = factory.getSerializer(GenericsUtil.getClass(before));
  Deserializer<K> deserializer 
    = factory.getDeserializer(GenericsUtil.getClass(before));

  DataOutputBuffer out = new DataOutputBuffer();
  serializer.open(out);
  serializer.serialize(before);
  serializer.close();

  DataInputBuffer in = new DataInputBuffer();
  in.reset(out.getData(), out.getLength());
  deserializer.open(in);
  K after = deserializer.deserialize(null);
  deserializer.close();
  return after;
}
项目:aliyun-maxcompute-data-collectors    文件:TestMainframeDatasetInputSplit.java   
@Test
public void testWriteRead() {
  mfDatasetInputSplit.addDataset("dataSet1");
  mfDatasetInputSplit.addDataset("dataSet2");
  DataOutputBuffer dob = new DataOutputBuffer();
  DataInputBuffer dib = new DataInputBuffer();
  MainframeDatasetInputSplit mfReader = new MainframeDatasetInputSplit();
  try {
    mfDatasetInputSplit.write(dob);
    dib.reset(dob.getData(), dob.getLength());
    mfReader.readFields(dib);
    Assert.assertNotNull("MFReader get data from tester", mfReader);
    Assert.assertEquals(2, mfReader.getLength());
    Assert.assertEquals("dataSet1", mfReader.getNextDataset());
    Assert.assertEquals("dataSet2", mfReader.getNextDataset());
  } catch (IOException ioe) {
    Assert.fail("No IOException should be thrown!");
  } catch (InterruptedException ie) {
    Assert.fail("No InterruptedException should be thrown!");
  }
}
项目:hadoop    文件:TestContainerLocalizer.java   
@SuppressWarnings({ "rawtypes", "unchecked" })
static DataInputBuffer createFakeCredentials(Random r, int nTok)
      throws IOException {
    Credentials creds = new Credentials();
    byte[] password = new byte[20];
    Text kind = new Text();
    Text service = new Text();
    Text alias = new Text();
    for (int i = 0; i < nTok; ++i) {
      byte[] identifier = ("idef" + i).getBytes();
      r.nextBytes(password);
      kind.set("kind" + i);
      service.set("service" + i);
      alias.set("token" + i);
      Token token = new Token(identifier, password, kind, service);
      creds.addToken(alias, token);
    }
    DataOutputBuffer buf = new DataOutputBuffer();
    creds.writeTokenStorageToStream(buf);
    DataInputBuffer ret = new DataInputBuffer();
    ret.reset(buf.getData(), 0, buf.getLength());
    return ret;
  }
项目:hadoop    文件:TestPBRecordImpl.java   
@Test(timeout=10000)
public void testLocalResourceStatusSerDe() throws Exception {
  LocalResourceStatus rsrcS = createLocalResourceStatus();
  assertTrue(rsrcS instanceof LocalResourceStatusPBImpl);
  LocalResourceStatusPBImpl rsrcPb = (LocalResourceStatusPBImpl) rsrcS;
  DataOutputBuffer out = new DataOutputBuffer();
  rsrcPb.getProto().writeDelimitedTo(out);
  DataInputBuffer in = new DataInputBuffer();
  in.reset(out.getData(), 0, out.getLength());
  LocalResourceStatusProto rsrcPbD =
    LocalResourceStatusProto.parseDelimitedFrom(in);
  assertNotNull(rsrcPbD);
  LocalResourceStatus rsrcD =
    new LocalResourceStatusPBImpl(rsrcPbD);

  assertEquals(rsrcS, rsrcD);
  assertEquals(createResource(), rsrcS.getResource());
  assertEquals(createResource(), rsrcD.getResource());
}
项目:hadoop    文件:TestPBRecordImpl.java   
@Test(timeout=10000)
public void testLocalizerStatusSerDe() throws Exception {
  LocalizerStatus rsrcS = createLocalizerStatus();
  assertTrue(rsrcS instanceof LocalizerStatusPBImpl);
  LocalizerStatusPBImpl rsrcPb = (LocalizerStatusPBImpl) rsrcS;
  DataOutputBuffer out = new DataOutputBuffer();
  rsrcPb.getProto().writeDelimitedTo(out);
  DataInputBuffer in = new DataInputBuffer();
  in.reset(out.getData(), 0, out.getLength());
  LocalizerStatusProto rsrcPbD =
    LocalizerStatusProto.parseDelimitedFrom(in);
  assertNotNull(rsrcPbD);
  LocalizerStatus rsrcD =
    new LocalizerStatusPBImpl(rsrcPbD);

  assertEquals(rsrcS, rsrcD);
  assertEquals("localizer0", rsrcS.getLocalizerId());
  assertEquals("localizer0", rsrcD.getLocalizerId());
  assertEquals(createLocalResourceStatus(), rsrcS.getResourceStatus(0));
  assertEquals(createLocalResourceStatus(), rsrcD.getResourceStatus(0));
}
项目:hadoop    文件:TestPBRecordImpl.java   
@Test(timeout=10000)
public void testLocalizerHeartbeatResponseSerDe() throws Exception {
  LocalizerHeartbeatResponse rsrcS = createLocalizerHeartbeatResponse();
  assertTrue(rsrcS instanceof LocalizerHeartbeatResponsePBImpl);
  LocalizerHeartbeatResponsePBImpl rsrcPb =
    (LocalizerHeartbeatResponsePBImpl) rsrcS;
  DataOutputBuffer out = new DataOutputBuffer();
  rsrcPb.getProto().writeDelimitedTo(out);
  DataInputBuffer in = new DataInputBuffer();
  in.reset(out.getData(), 0, out.getLength());
  LocalizerHeartbeatResponseProto rsrcPbD =
    LocalizerHeartbeatResponseProto.parseDelimitedFrom(in);
  assertNotNull(rsrcPbD);
  LocalizerHeartbeatResponse rsrcD =
    new LocalizerHeartbeatResponsePBImpl(rsrcPbD);

  assertEquals(rsrcS, rsrcD);
  assertEquals(createResource(), rsrcS.getResourceSpecs().get(0).getResource());
  assertEquals(createResource(), rsrcD.getResourceSpecs().get(0).getResource());
}
项目:hadoop    文件:TestRMAppTransitions.java   
@Test (timeout = 30000)
public void testAppRecoverPath() throws IOException {
  LOG.info("--- START: testAppRecoverPath ---");
  ApplicationSubmissionContext sub =
      Records.newRecord(ApplicationSubmissionContext.class);
  ContainerLaunchContext clc =
      Records.newRecord(ContainerLaunchContext.class);
  Credentials credentials = new Credentials();
  DataOutputBuffer dob = new DataOutputBuffer();
  credentials.writeTokenStorageToStream(dob);
  ByteBuffer securityTokens =
      ByteBuffer.wrap(dob.getData(), 0, dob.getLength());
  clc.setTokens(securityTokens);
  sub.setAMContainerSpec(clc);
  testCreateAppSubmittedRecovery(sub);
}
项目:hadoop    文件:TestMerge.java   
public KeyValueWriter(Configuration conf, OutputStream output,
                      Class<K> kyClass, Class<V> valClass
                     ) throws IOException {
  keyClass = kyClass;
  valueClass = valClass;
  dataBuffer = new DataOutputBuffer();
  SerializationFactory serializationFactory
                                         = new SerializationFactory(conf);
  keySerializer
              = (Serializer<K>)serializationFactory.getSerializer(keyClass);
  keySerializer.open(dataBuffer);
  valueSerializer
            = (Serializer<V>)serializationFactory.getSerializer(valueClass);
  valueSerializer.open(dataBuffer);
  outputStream = new DataOutputStream(output);
}
项目:hadoop    文件:TestIFileStreams.java   
public void testIFileStream() throws Exception {
  final int DLEN = 100;
  DataOutputBuffer dob = new DataOutputBuffer(DLEN + 4);
  IFileOutputStream ifos = new IFileOutputStream(dob);
  for (int i = 0; i < DLEN; ++i) {
    ifos.write(i);
  }
  ifos.close();
  DataInputBuffer dib = new DataInputBuffer();
  dib.reset(dob.getData(), DLEN + 4);
  IFileInputStream ifis = new IFileInputStream(dib, 104, new Configuration());
  for (int i = 0; i < DLEN; ++i) {
    assertEquals(i, ifis.read());
  }
  ifis.close();
}
项目:hadoop    文件:TestWritableJobConf.java   
private <K> K serDeser(K conf) throws Exception {
  SerializationFactory factory = new SerializationFactory(CONF);
  Serializer<K> serializer =
    factory.getSerializer(GenericsUtil.getClass(conf));
  Deserializer<K> deserializer =
    factory.getDeserializer(GenericsUtil.getClass(conf));

  DataOutputBuffer out = new DataOutputBuffer();
  serializer.open(out);
  serializer.serialize(conf);
  serializer.close();

  DataInputBuffer in = new DataInputBuffer();
  in.reset(out.getData(), out.getLength());
  deserializer.open(in);
  K after = deserializer.deserialize(null);
  deserializer.close();
  return after;
}
项目:hadoop    文件:Chain.java   
private <E> E makeCopyForPassByValue(Serialization<E> serialization,
                                      E obj) throws IOException {
  Serializer<E> ser =
    serialization.getSerializer(GenericsUtil.getClass(obj));
  Deserializer<E> deser =
    serialization.getDeserializer(GenericsUtil.getClass(obj));

  DataOutputBuffer dof = threadLocalDataOutputBuffer.get();

  dof.reset();
  ser.open(dof);
  ser.serialize(obj);
  ser.close();
  obj = ReflectionUtils.newInstance(GenericsUtil.getClass(obj),
                                    getChainJobConf());
  ByteArrayInputStream bais =
    new ByteArrayInputStream(dof.getData(), 0, dof.getLength());
  deser.open(bais);
  deser.deserialize(obj);
  deser.close();
  return obj;
}
项目:hadoop    文件:EditLogBackupOutputStream.java   
EditLogBackupOutputStream(NamenodeRegistration bnReg, // backup node
                          JournalInfo journalInfo) // active name-node
throws IOException {
  super();
  this.bnRegistration = bnReg;
  this.journalInfo = journalInfo;
  InetSocketAddress bnAddress =
    NetUtils.createSocketAddr(bnRegistration.getAddress());
  try {
    this.backupNode = NameNodeProxies.createNonHAProxy(new HdfsConfiguration(),
        bnAddress, JournalProtocol.class, UserGroupInformation.getCurrentUser(),
        true).getProxy();
  } catch(IOException e) {
    Storage.LOG.error("Error connecting to: " + bnAddress, e);
    throw e;
  }
  this.doubleBuf = new EditsDoubleBuffer(DEFAULT_BUFFER_SIZE);
  this.out = new DataOutputBuffer(DEFAULT_BUFFER_SIZE);
}
项目:hadoop    文件:TestDelegationTokenRemoteFetcher.java   
@Override
public void handle(Channel channel, Token<DelegationTokenIdentifier> token,
    String serviceUrl) throws IOException {
  Assert.assertEquals(testToken, token);

  Credentials creds = new Credentials();
  creds.addToken(new Text(serviceUrl), token);
  DataOutputBuffer out = new DataOutputBuffer();
  creds.write(out);
  int fileLength = out.getData().length;
  ChannelBuffer cbuffer = ChannelBuffers.buffer(fileLength);
  cbuffer.writeBytes(out.getData());
  HttpResponse response = new DefaultHttpResponse(HTTP_1_1, OK);
  response.setHeader(HttpHeaders.Names.CONTENT_LENGTH,
      String.valueOf(fileLength));
  response.setContent(cbuffer);
  channel.write(response).addListener(ChannelFutureListener.CLOSE);
}
项目:hadoop    文件:TestJspHelper.java   
@Test
public void testReadWriteReplicaState() {
  try {
    DataOutputBuffer out = new DataOutputBuffer();
    DataInputBuffer in = new DataInputBuffer();
    for (HdfsServerConstants.ReplicaState repState : HdfsServerConstants.ReplicaState
        .values()) {
      repState.write(out);
      in.reset(out.getData(), out.getLength());
      HdfsServerConstants.ReplicaState result = HdfsServerConstants.ReplicaState
          .read(in);
      assertTrue("testReadWrite error !!!", repState == result);
      out.reset();
      in.reset();
    }
  } catch (Exception ex) {
    fail("testReadWrite ex error ReplicaState");
  }
}
项目:hadoop    文件:TestDFSPacket.java   
@Test
public void testPacket() throws Exception {
  Random r = new Random(12345L);
  byte[] data =  new byte[chunkSize];
  r.nextBytes(data);
  byte[] checksum = new byte[checksumSize];
  r.nextBytes(checksum);

  DataOutputBuffer os =  new DataOutputBuffer(data.length * 2);

  byte[] packetBuf = new byte[data.length * 2];
  DFSPacket p = new DFSPacket(packetBuf, maxChunksPerPacket,
                              0, 0, checksumSize, false);
  p.setSyncBlock(true);
  p.writeData(data, 0, data.length);
  p.writeChecksum(checksum, 0, checksum.length);
  p.writeTo(os);

  //we have set syncBlock to true, so the header has the maximum length
  int headerLen = PacketHeader.PKT_MAX_HEADER_LEN;
  byte[] readBuf = os.getData();

  assertArrayRegionsEqual(readBuf, headerLen, checksum, 0, checksum.length);
  assertArrayRegionsEqual(readBuf, headerLen + checksum.length, data, 0, data.length);

}
项目:hadoop    文件:TestRecordFactory.java   
public static void testFactory(long targetBytes, long targetRecs)
    throws Exception {
  final Configuration conf = new Configuration();
  final GridmixKey key = new GridmixKey();
  final GridmixRecord val = new GridmixRecord();
  LOG.info("Target bytes/records: " + targetBytes + "/" + targetRecs);
  final RecordFactory f = new AvgRecordFactory(targetBytes, targetRecs, conf);
  targetRecs = targetRecs <= 0 && targetBytes >= 0
    ? Math.max(1,
               targetBytes 
               / conf.getInt(AvgRecordFactory.GRIDMIX_MISSING_REC_SIZE, 
                             64 * 1024))
    : targetRecs;

  long records = 0L;
  final DataOutputBuffer out = new DataOutputBuffer();
  while (f.next(key, val)) {
    ++records;
    key.write(out);
    val.write(out);
  }
  assertEquals(targetRecs, records);
  assertEquals(targetBytes, out.getLength());
}
项目:hadoop    文件:TestGridmixRecord.java   
static void eqSeedTest(GridmixRecord x, GridmixRecord y, int max)
    throws Exception {
  final Random r = new Random();
  final long s = r.nextLong();
  r.setSeed(s);
  LOG.info("eqSeed: " + s);
  assertEquals(x.fixedBytes(), y.fixedBytes());
  final int min = x.fixedBytes() + 1;
  final DataOutputBuffer out1 = new DataOutputBuffer();
  final DataOutputBuffer out2 = new DataOutputBuffer();
  for (int i = min; i < max; ++i) {
    final long seed = r.nextLong();
    setSerialize(x, seed, i, out1);
    setSerialize(y, seed, i, out2);
    assertEquals(x, y);
    assertEquals(x.hashCode(), y.hashCode());

    // verify written contents match
    assertEquals(out1.getLength(), out2.getLength());
    // assumes that writes will grow buffer deterministically
    assertEquals("Bad test", out1.getData().length, out2.getData().length);
    assertArrayEquals(out1.getData(), out2.getData());
  }
}
项目:hadoop    文件:StreamXmlRecordReader.java   
public synchronized boolean next(Text key, Text value) throws IOException {
  numNext++;
  if (pos_ >= end_) {
    return false;
  }

  DataOutputBuffer buf = new DataOutputBuffer();
  if (!readUntilMatchBegin()) {
    return false;
  }
  if (pos_ >= end_ || !readUntilMatchEnd(buf)) {
    return false;
  }

  // There is only one elem..key/value splitting is not done here.
  byte[] record = new byte[buf.getLength()];
  System.arraycopy(buf.getData(), 0, record, 0, record.length);

  numRecStats(record, 0, record.length);

  key.set(record);
  value.set("");

  return true;
}
项目:hadoop    文件:TFile.java   
public void write(DataOutput out) throws IOException {
  if (firstKey == null) {
    Utils.writeVInt(out, 0);
    return;
  }

  DataOutputBuffer dob = new DataOutputBuffer();
  Utils.writeVInt(dob, firstKey.size());
  dob.write(firstKey.buffer());
  Utils.writeVInt(out, dob.size());
  out.write(dob.getData(), 0, dob.getLength());

  for (TFileIndexEntry entry : index) {
    dob.reset();
    entry.write(dob);
    Utils.writeVInt(out, dob.getLength());
    out.write(dob.getData(), 0, dob.getLength());
  }
}
项目:hadoop    文件:TestDelegationToken.java   
private boolean testDelegationTokenIdentiferSerializationRoundTrip(Text owner,
    Text renewer, Text realUser) throws IOException {
  TestDelegationTokenIdentifier dtid = new TestDelegationTokenIdentifier(
      owner, renewer, realUser);
  DataOutputBuffer out = new DataOutputBuffer();
  dtid.writeImpl(out);
  DataInputBuffer in = new DataInputBuffer();
  in.reset(out.getData(), out.getLength());
  try {
    TestDelegationTokenIdentifier dtid2 =
        new TestDelegationTokenIdentifier();
    dtid2.readFields(in);
    assertTrue(dtid.equals(dtid2));
    return true;
  } catch(IOException e){
    return false;
  }
}
项目:hadoop    文件:CryptoStreamsTestBase.java   
@Before
public void setUp() throws IOException {
  // Generate data
  final int seed = new Random().nextInt();
  final DataOutputBuffer dataBuf = new DataOutputBuffer();
  final RandomDatum.Generator generator = new RandomDatum.Generator(seed);
  for(int i = 0; i < count; ++i) {
    generator.next();
    final RandomDatum key = generator.getKey();
    final RandomDatum value = generator.getValue();

    key.write(dataBuf);
    value.write(dataBuf);
  }
  LOG.info("Generated " + count + " records");
  data = dataBuf.getData();
  dataLen = dataBuf.getLength();
}
项目:hadoop    文件:TestCryptoStreams.java   
@Override
protected OutputStream getOutputStream(int bufferSize, byte[] key, byte[] iv) 
    throws IOException {
  DataOutputBuffer out = new DataOutputBuffer() {
    @Override
    public void flush() throws IOException {
      buf = getData();
      bufLen = getLength();
    }
    @Override
    public void close() throws IOException {
      buf = getData();
      bufLen = getLength();
    }
  };
  return new CryptoOutputStream(new FakeOutputStream(out),
      codec, bufferSize, key, iv);
}
项目:hadoop    文件:TestIndexedSort.java   
public WritableSortable(int j) throws IOException {
  seed = r.nextLong();
  r.setSeed(seed);
  Text t = new Text();
  StringBuilder sb = new StringBuilder();
  indices = new int[j];
  offsets = new int[j];
  check = new String[j];
  DataOutputBuffer dob = new DataOutputBuffer();
  for (int i = 0; i < j; ++i) {
    indices[i] = i;
    offsets[i] = dob.getLength();
    genRandom(t, r.nextInt(15) + 1, sb);
    t.write(dob);
    check[i] = t.toString();
  }
  eob = dob.getLength();
  bytes = dob.getData();
  comparator = WritableComparator.get(Text.class);
}
项目:hadoop    文件:TestWritableSerialization.java   
@Test
@SuppressWarnings({"rawtypes", "unchecked"})
public void testWritableComparatorJavaSerialization() throws Exception {
  Serialization ser = new JavaSerialization();

  Serializer<TestWC> serializer = ser.getSerializer(TestWC.class);
  DataOutputBuffer dob = new DataOutputBuffer();
  serializer.open(dob);
  TestWC orig = new TestWC(0);
  serializer.serialize(orig);
  serializer.close();

  Deserializer<TestWC> deserializer = ser.getDeserializer(TestWC.class);
  DataInputBuffer dib = new DataInputBuffer();
  dib.reset(dob.getData(), 0, dob.getLength());
  deserializer.open(dib);
  TestWC deser = deserializer.deserialize(null);
  deserializer.close();
  assertEquals(orig, deser);
}
项目:hadoop    文件:SerializationTestUtil.java   
/**
 * A utility that tests serialization/deserialization. 
 * @param conf configuration to use, "io.serializations" is read to 
 * determine the serialization
 * @param <K> the class of the item
 * @param before item to (de)serialize
 * @return deserialized item
 */
public static <K> K testSerialization(Configuration conf, K before)
    throws Exception {

  SerializationFactory factory = new SerializationFactory(conf);
  Serializer<K> serializer 
    = factory.getSerializer(GenericsUtil.getClass(before));
  Deserializer<K> deserializer 
    = factory.getDeserializer(GenericsUtil.getClass(before));

  DataOutputBuffer out = new DataOutputBuffer();
  serializer.open(out);
  serializer.serialize(before);
  serializer.close();

  DataInputBuffer in = new DataInputBuffer();
  in.reset(out.getData(), out.getLength());
  deserializer.open(in);
  K after = deserializer.deserialize(null);
  deserializer.close();
  return after;
}
项目:ditb    文件:TestKeyValueCompression.java   
@Test
public void testKVWithTags() throws Exception {
  CompressionContext ctx = new CompressionContext(LRUDictionary.class, false, false);
  DataOutputBuffer buf = new DataOutputBuffer(BUF_SIZE);
  KeyValueCompression.writeKV(buf, createKV(1), ctx);
  KeyValueCompression.writeKV(buf, createKV(0), ctx);
  KeyValueCompression.writeKV(buf, createKV(2), ctx);

  ctx.clear();
  DataInputStream in = new DataInputStream(new ByteArrayInputStream(
      buf.getData(), 0, buf.getLength()));

  KeyValue readBack = KeyValueCompression.readKV(in, ctx);
  List<Tag> tags = readBack.getTags();
  assertEquals(1, tags.size());
}
项目:ditb    文件:IndexColumnDescriptor.java   
@Override
/**
 * @return Convert this instance to a the pb column family type
 */ public ColumnFamilySchema convert() {
  try {
    DataOutputBuffer indexout = new DataOutputBuffer();
    // System.out.println("winter write indexColumn descripter, indexType is: "
    // + valueOfIndexType(indexType));
    indexout.writeInt(valueOfIndexType(indexType));
    indexout.writeInt(indexes.size());
    for (IndexDescriptor indexDescriptor : indexes.values()) {
      indexDescriptor.write(indexout);
    }
    super.setValue(INDEX, indexout.getData());
  } catch (IOException e1) {
    e1.printStackTrace();
  }
  return super.convert();
}
项目:azkaban    文件:AzkabanSequenceFileReader.java   
/** Read a compressed buffer */
private synchronized void readBuffer(DataInputBuffer buffer, 
                                     CompressionInputStream filter) throws IOException {
  // Read data into a temporary buffer
  DataOutputBuffer dataBuffer = new DataOutputBuffer();

  try {
    int dataBufferLength = WritableUtils.readVInt(in);
    dataBuffer.write(in, dataBufferLength);

    // Set up 'buffer' connected to the input-stream
    buffer.reset(dataBuffer.getData(), 0, dataBuffer.getLength());
  } finally {
    dataBuffer.close();
  }

  // Reset the codec
  filter.resetState();
}
项目:flink    文件:Utils.java   
public static void setTokensFor(ContainerLaunchContext amContainer, List<Path> paths, Configuration conf) throws IOException {
    Credentials credentials = new Credentials();
    // for HDFS
    TokenCache.obtainTokensForNamenodes(credentials, paths.toArray(new Path[0]), conf);
    // for HBase
    obtainTokenForHBase(credentials, conf);
    // for user
    UserGroupInformation currUsr = UserGroupInformation.getCurrentUser();

    Collection<Token<? extends TokenIdentifier>> usrTok = currUsr.getTokens();
    for (Token<? extends TokenIdentifier> token : usrTok) {
        final Text id = new Text(token.getIdentifier());
        LOG.info("Adding user token " + id + " with " + token);
        credentials.addToken(id, token);
    }
    try (DataOutputBuffer dob = new DataOutputBuffer()) {
        credentials.writeTokenStorageToStream(dob);

        if (LOG.isDebugEnabled()) {
            LOG.debug("Wrote tokens. Credentials buffer length: " + dob.getLength());
        }

        ByteBuffer securityTokens = ByteBuffer.wrap(dob.getData(), 0, dob.getLength());
        amContainer.setTokens(securityTokens);
    }
}
项目:aliyun-oss-hadoop-fs    文件:TestPBRecordImpl.java   
@Test(timeout=10000)
public void testLocalizerStatusSerDe() throws Exception {
  LocalizerStatus rsrcS = createLocalizerStatus();
  assertTrue(rsrcS instanceof LocalizerStatusPBImpl);
  LocalizerStatusPBImpl rsrcPb = (LocalizerStatusPBImpl) rsrcS;
  DataOutputBuffer out = new DataOutputBuffer();
  rsrcPb.getProto().writeDelimitedTo(out);
  DataInputBuffer in = new DataInputBuffer();
  in.reset(out.getData(), 0, out.getLength());
  LocalizerStatusProto rsrcPbD =
    LocalizerStatusProto.parseDelimitedFrom(in);
  assertNotNull(rsrcPbD);
  LocalizerStatus rsrcD =
    new LocalizerStatusPBImpl(rsrcPbD);

  assertEquals(rsrcS, rsrcD);
  assertEquals("localizer0", rsrcS.getLocalizerId());
  assertEquals("localizer0", rsrcD.getLocalizerId());
  assertEquals(createLocalResourceStatus(), rsrcS.getResourceStatus(0));
  assertEquals(createLocalResourceStatus(), rsrcD.getResourceStatus(0));
}
项目:aliyun-oss-hadoop-fs    文件:TestPBRecordImpl.java   
@Test(timeout=10000)
public void testLocalizerHeartbeatResponseSerDe() throws Exception {
  LocalizerHeartbeatResponse rsrcS = createLocalizerHeartbeatResponse();
  assertTrue(rsrcS instanceof LocalizerHeartbeatResponsePBImpl);
  LocalizerHeartbeatResponsePBImpl rsrcPb =
    (LocalizerHeartbeatResponsePBImpl) rsrcS;
  DataOutputBuffer out = new DataOutputBuffer();
  rsrcPb.getProto().writeDelimitedTo(out);
  DataInputBuffer in = new DataInputBuffer();
  in.reset(out.getData(), 0, out.getLength());
  LocalizerHeartbeatResponseProto rsrcPbD =
    LocalizerHeartbeatResponseProto.parseDelimitedFrom(in);
  assertNotNull(rsrcPbD);
  LocalizerHeartbeatResponse rsrcD =
    new LocalizerHeartbeatResponsePBImpl(rsrcPbD);

  assertEquals(rsrcS, rsrcD);
  assertEquals(createResource(), rsrcS.getResourceSpecs().get(0).getResource());
  assertEquals(createResource(), rsrcD.getResourceSpecs().get(0).getResource());
}
项目:aliyun-oss-hadoop-fs    文件:TestAppManager.java   
@Test
public void testRMAppSubmitWithInvalidTokens() throws Exception {
  // Setup invalid security tokens
  DataOutputBuffer dob = new DataOutputBuffer();
  ByteBuffer securityTokens = ByteBuffer.wrap(dob.getData(), 0,
      dob.getLength());
  asContext.getAMContainerSpec().setTokens(securityTokens);
  try {
    appMonitor.submitApplication(asContext, "test");
    Assert.fail("Application submission should fail because" +
        " Tokens are invalid.");
  } catch (YarnException e) {
    // Exception is expected
    Assert.assertTrue("The thrown exception is not" +
        " java.io.EOFException",
        e.getMessage().contains("java.io.EOFException"));
  }
  int timeoutSecs = 0;
  while ((getAppEventType() == RMAppEventType.KILL) &&
      timeoutSecs++ < 20) {
    Thread.sleep(1000);
  }
  Assert.assertEquals("app event type sent is wrong",
      RMAppEventType.APP_REJECTED, getAppEventType());
  asContext.getAMContainerSpec().setTokens(null);
}
项目:aliyun-oss-hadoop-fs    文件:TestRMAppTransitions.java   
@Test (timeout = 30000)
public void testAppRecoverPath() throws IOException {
  LOG.info("--- START: testAppRecoverPath ---");
  ApplicationSubmissionContext sub =
      Records.newRecord(ApplicationSubmissionContext.class);
  ContainerLaunchContext clc =
      Records.newRecord(ContainerLaunchContext.class);
  Credentials credentials = new Credentials();
  DataOutputBuffer dob = new DataOutputBuffer();
  credentials.writeTokenStorageToStream(dob);
  ByteBuffer securityTokens =
      ByteBuffer.wrap(dob.getData(), 0, dob.getLength());
  clc.setTokens(securityTokens);
  sub.setAMContainerSpec(clc);
  testCreateAppSubmittedRecovery(sub);
}