Java 类org.apache.hadoop.io.compress.CompressorStream 实例源码

项目:aliyun-maxcompute-data-collectors    文件:LobFile.java   
@Override
/**
 * {@inheritDoc}
 */
public OutputStream writeBlobRecord(long claimedLen) throws IOException {
  finishRecord(); // finish any previous record.
  checkForNull(this.out);
  startRecordIndex();
  this.header.getStartMark().write(out);
  LOG.debug("Starting new record; id=" + curEntryId
      + "; claimedLen=" + claimedLen);
  WritableUtils.writeVLong(out, curEntryId);
  WritableUtils.writeVLong(out, claimedLen);
  this.curClaimedLen = claimedLen;
  this.userCountingOutputStream = new CountingOutputStream(
      new CloseShieldOutputStream(out));
  if (null == this.codec) {
    // No codec; pass thru the same OutputStream to the user.
    this.userOutputStream = this.userCountingOutputStream;
  } else {
    // Wrap our CountingOutputStream in a compressing OutputStream to
    // give to the user.
    this.compressor.reset();
    this.userOutputStream = new CompressorStream(
        this.userCountingOutputStream, compressor);
  }

  return this.userOutputStream;
}
项目:zSqoop    文件:LobFile.java   
@Override
/**
 * {@inheritDoc}
 */
public OutputStream writeBlobRecord(long claimedLen) throws IOException {
  finishRecord(); // finish any previous record.
  checkForNull(this.out);
  startRecordIndex();
  this.header.getStartMark().write(out);
  LOG.debug("Starting new record; id=" + curEntryId
      + "; claimedLen=" + claimedLen);
  WritableUtils.writeVLong(out, curEntryId);
  WritableUtils.writeVLong(out, claimedLen);
  this.curClaimedLen = claimedLen;
  this.userCountingOutputStream = new CountingOutputStream(
      new CloseShieldOutputStream(out));
  if (null == this.codec) {
    // No codec; pass thru the same OutputStream to the user.
    this.userOutputStream = this.userCountingOutputStream;
  } else {
    // Wrap our CountingOutputStream in a compressing OutputStream to
    // give to the user.
    this.compressor.reset();
    this.userOutputStream = new CompressorStream(
        this.userCountingOutputStream, compressor);
  }

  return this.userOutputStream;
}
项目:sqoop    文件:LobFile.java   
@Override
/**
 * {@inheritDoc}
 */
public OutputStream writeBlobRecord(long claimedLen) throws IOException {
  finishRecord(); // finish any previous record.
  checkForNull(this.out);
  startRecordIndex();
  this.header.getStartMark().write(out);
  LOG.debug("Starting new record; id=" + curEntryId
      + "; claimedLen=" + claimedLen);
  WritableUtils.writeVLong(out, curEntryId);
  WritableUtils.writeVLong(out, claimedLen);
  this.curClaimedLen = claimedLen;
  this.userCountingOutputStream = new CountingOutputStream(
      new CloseShieldOutputStream(out));
  if (null == this.codec) {
    // No codec; pass thru the same OutputStream to the user.
    this.userOutputStream = this.userCountingOutputStream;
  } else {
    // Wrap our CountingOutputStream in a compressing OutputStream to
    // give to the user.
    this.compressor.reset();
    this.userOutputStream = new CompressorStream(
        this.userCountingOutputStream, compressor);
  }

  return this.userOutputStream;
}
项目:hadoop    文件:FSImageFormatProtobuf.java   
private void flushSectionOutputStream() throws IOException {
  if (codec != null) {
    ((CompressorStream) sectionOutputStream).finish();
  }
  sectionOutputStream.flush();
}
项目:aliyun-oss-hadoop-fs    文件:FSImageFormatProtobuf.java   
private void flushSectionOutputStream() throws IOException {
  if (codec != null) {
    ((CompressorStream) sectionOutputStream).finish();
  }
  sectionOutputStream.flush();
}
项目:clowncar    文件:BloscCodec.java   
@Override
public CompressionOutputStream createOutputStream(OutputStream out, Compressor compressor) throws IOException {
    return new CompressorStream(out, compressor, getBufferSize());
}
项目:big-c    文件:FSImageFormatProtobuf.java   
private void flushSectionOutputStream() throws IOException {
  if (codec != null) {
    ((CompressorStream) sectionOutputStream).finish();
  }
  sectionOutputStream.flush();
}
项目:hadoop-2.6.0-cdh5.4.3    文件:FSImageFormatProtobuf.java   
private void flushSectionOutputStream() throws IOException {
  if (codec != null) {
    ((CompressorStream) sectionOutputStream).finish();
  }
  sectionOutputStream.flush();
}
项目:FlexMap    文件:FSImageFormatProtobuf.java   
private void flushSectionOutputStream() throws IOException {
  if (codec != null) {
    ((CompressorStream) sectionOutputStream).finish();
  }
  sectionOutputStream.flush();
}
项目:hadoop-on-lustre2    文件:FSImageFormatProtobuf.java   
private void flushSectionOutputStream() throws IOException {
  if (codec != null) {
    ((CompressorStream) sectionOutputStream).finish();
  }
  sectionOutputStream.flush();
}
项目:hadoop-0.20    文件:TestCodec.java   
public void testGzipCodecWrite() throws IOException {
  // Create a gzipped file using a compressor from the CodecPool,
  // and try to read it back via the regular GZIPInputStream.

  // Don't use native libs for this test.
  Configuration conf = new Configuration();
  conf.setBoolean("hadoop.native.lib", false);
  assertFalse("ZlibFactory is using native libs against request",
      ZlibFactory.isNativeZlibLoaded(conf));

  // Ensure that the CodecPool has a BuiltInZlibDeflater in it.
  Compressor zlibCompressor = ZlibFactory.getZlibCompressor(conf);
  assertNotNull("zlibCompressor is null!", zlibCompressor);
  assertTrue("ZlibFactory returned unexpected deflator",
      zlibCompressor instanceof BuiltInZlibDeflater);
  CodecPool.returnCompressor(zlibCompressor);

  // Create a GZIP text file via the Compressor interface.
  CompressionCodecFactory ccf = new CompressionCodecFactory(conf);
  CompressionCodec codec = ccf.getCodec(new Path("foo.gz"));
  assertTrue("Codec for .gz file is not GzipCodec", codec instanceof GzipCodec);

  final String msg = "This is the message we are going to compress.";
  final String tmpDir = System.getProperty("test.build.data", "/tmp/");
  final String fileName = new Path(new Path(tmpDir),
      "testGzipCodecWrite.txt.gz").toString();

  BufferedWriter w = null;
  Compressor gzipCompressor = CodecPool.getCompressor(codec);
  if (null != gzipCompressor) {
    // If it gives us back a Compressor, we should be able to use this
    // to write files we can then read back with Java's gzip tools.
    OutputStream os = new CompressorStream(new FileOutputStream(fileName),
        gzipCompressor);
    w = new BufferedWriter(new OutputStreamWriter(os));
    w.write(msg);
    w.close();
    CodecPool.returnCompressor(gzipCompressor);

    verifyGzipFile(fileName, msg);
  }

  // Create a gzip text file via codec.getOutputStream().
  w = new BufferedWriter(new OutputStreamWriter(
      codec.createOutputStream(new FileOutputStream(fileName))));
  w.write(msg);
  w.close();

  verifyGzipFile(fileName, msg);
}