Java 类org.apache.hadoop.io.compress.snappy.SnappyDecompressor 实例源码

项目:hadoop-oss    文件:SnappyCodec.java   
/**
 * Are the native snappy libraries loaded & initialized?
 */
public static void checkNativeCodeLoaded() {
    if (!NativeCodeLoader.isNativeCodeLoaded() ||
        !NativeCodeLoader.buildSupportsSnappy()) {
      throw new RuntimeException("native snappy library not available: " +
          "this version of libhadoop was built without " +
          "snappy support.");
    }
    if (!SnappyCompressor.isNativeCodeLoaded()) {
      throw new RuntimeException("native snappy library not available: " +
          "SnappyCompressor has not been loaded.");
    }
    if (!SnappyDecompressor.isNativeCodeLoaded()) {
      throw new RuntimeException("native snappy library not available: " +
          "SnappyDecompressor has not been loaded.");
    }
}
项目:hadoop-oss    文件:TestCompressorDecompressor.java   
@Test
public void testCompressorDecompressor() {
  // no more for this data
  int SIZE = 44 * 1024;

  byte[] rawData = generate(SIZE);
  try {
    CompressDecompressTester.of(rawData)
        .withCompressDecompressPair(new SnappyCompressor(), new SnappyDecompressor())
        .withCompressDecompressPair(new Lz4Compressor(), new Lz4Decompressor())
        .withCompressDecompressPair(new BuiltInZlibDeflater(), new BuiltInZlibInflater())
        .withTestCases(ImmutableSet.of(CompressionTestStrategy.COMPRESS_DECOMPRESS_SINGLE_BLOCK,
                    CompressionTestStrategy.COMPRESS_DECOMPRESS_BLOCK,
                    CompressionTestStrategy.COMPRESS_DECOMPRESS_ERRORS,
                    CompressionTestStrategy.COMPRESS_DECOMPRESS_WITH_EMPTY_STREAM))
        .test();

  } catch (Exception ex) {
    GenericTestUtils.assertExceptionContains(
        "testCompressorDecompressor error !!!", ex);
  }
}
项目:hadoop-oss    文件:TestCompressorDecompressor.java   
@Test
public void testCompressorDecompressorWithExeedBufferLimit() {
  int BYTE_SIZE = 100 * 1024;
  byte[] rawData = generate(BYTE_SIZE);
  try {
    CompressDecompressTester.of(rawData)
        .withCompressDecompressPair(
            new SnappyCompressor(BYTE_SIZE + BYTE_SIZE / 2),
            new SnappyDecompressor(BYTE_SIZE + BYTE_SIZE / 2))
        .withCompressDecompressPair(new Lz4Compressor(BYTE_SIZE),
            new Lz4Decompressor(BYTE_SIZE))
        .withTestCases(ImmutableSet.of(CompressionTestStrategy.COMPRESS_DECOMPRESS_SINGLE_BLOCK,
                    CompressionTestStrategy.COMPRESS_DECOMPRESS_BLOCK,
                    CompressionTestStrategy.COMPRESS_DECOMPRESS_ERRORS,
                    CompressionTestStrategy.COMPRESS_DECOMPRESS_WITH_EMPTY_STREAM))
        .test();

  } catch (Exception ex) {
    GenericTestUtils.assertExceptionContains(
        "testCompressorDecompressorWithExeedBufferLimit error !!!", ex);
  }
}
项目:hadoop    文件:SnappyCodec.java   
/**
 * Are the native snappy libraries loaded & initialized?
 */
public static void checkNativeCodeLoaded() {
    if (!NativeCodeLoader.isNativeCodeLoaded() ||
        !NativeCodeLoader.buildSupportsSnappy()) {
      throw new RuntimeException("native snappy library not available: " +
          "this version of libhadoop was built without " +
          "snappy support.");
    }
    if (!SnappyCompressor.isNativeCodeLoaded()) {
      throw new RuntimeException("native snappy library not available: " +
          "SnappyCompressor has not been loaded.");
    }
    if (!SnappyDecompressor.isNativeCodeLoaded()) {
      throw new RuntimeException("native snappy library not available: " +
          "SnappyDecompressor has not been loaded.");
    }
}
项目:hadoop    文件:TestCompressorDecompressor.java   
@Test
public void testCompressorDecompressor() {
  // no more for this data
  int SIZE = 44 * 1024;

  byte[] rawData = generate(SIZE);
  try {
    CompressDecompressTester.of(rawData)
        .withCompressDecompressPair(new SnappyCompressor(), new SnappyDecompressor())
        .withCompressDecompressPair(new Lz4Compressor(), new Lz4Decompressor())
        .withCompressDecompressPair(new BuiltInZlibDeflater(), new BuiltInZlibInflater())
        .withTestCases(ImmutableSet.of(CompressionTestStrategy.COMPRESS_DECOMPRESS_SINGLE_BLOCK,
                    CompressionTestStrategy.COMPRESS_DECOMPRESS_BLOCK,
                    CompressionTestStrategy.COMPRESS_DECOMPRESS_ERRORS,
                    CompressionTestStrategy.COMPRESS_DECOMPRESS_WITH_EMPTY_STREAM))
        .test();

  } catch (Exception ex) {
    fail("testCompressorDecompressor error !!!" + ex);
  }
}
项目:hadoop    文件:TestCompressorDecompressor.java   
@Test
public void testCompressorDecompressorWithExeedBufferLimit() {
  int BYTE_SIZE = 100 * 1024;
  byte[] rawData = generate(BYTE_SIZE);
  try {
    CompressDecompressTester.of(rawData)
        .withCompressDecompressPair(
            new SnappyCompressor(BYTE_SIZE + BYTE_SIZE / 2),
            new SnappyDecompressor(BYTE_SIZE + BYTE_SIZE / 2))
        .withCompressDecompressPair(new Lz4Compressor(BYTE_SIZE),
            new Lz4Decompressor(BYTE_SIZE))
        .withTestCases(ImmutableSet.of(CompressionTestStrategy.COMPRESS_DECOMPRESS_SINGLE_BLOCK,
                    CompressionTestStrategy.COMPRESS_DECOMPRESS_BLOCK,
                    CompressionTestStrategy.COMPRESS_DECOMPRESS_ERRORS,
                    CompressionTestStrategy.COMPRESS_DECOMPRESS_WITH_EMPTY_STREAM))
        .test();

  } catch (Exception ex) {
    fail("testCompressorDecompressorWithExeedBufferLimit error !!!" + ex);
  }
}
项目:aliyun-oss-hadoop-fs    文件:SnappyCodec.java   
/**
 * Are the native snappy libraries loaded & initialized?
 */
public static void checkNativeCodeLoaded() {
    if (!NativeCodeLoader.isNativeCodeLoaded() ||
        !NativeCodeLoader.buildSupportsSnappy()) {
      throw new RuntimeException("native snappy library not available: " +
          "this version of libhadoop was built without " +
          "snappy support.");
    }
    if (!SnappyCompressor.isNativeCodeLoaded()) {
      throw new RuntimeException("native snappy library not available: " +
          "SnappyCompressor has not been loaded.");
    }
    if (!SnappyDecompressor.isNativeCodeLoaded()) {
      throw new RuntimeException("native snappy library not available: " +
          "SnappyDecompressor has not been loaded.");
    }
}
项目:aliyun-oss-hadoop-fs    文件:TestCompressorDecompressor.java   
@Test
public void testCompressorDecompressor() {
  // no more for this data
  int SIZE = 44 * 1024;

  byte[] rawData = generate(SIZE);
  try {
    CompressDecompressTester.of(rawData)
        .withCompressDecompressPair(new SnappyCompressor(), new SnappyDecompressor())
        .withCompressDecompressPair(new Lz4Compressor(), new Lz4Decompressor())
        .withCompressDecompressPair(new BuiltInZlibDeflater(), new BuiltInZlibInflater())
        .withTestCases(ImmutableSet.of(CompressionTestStrategy.COMPRESS_DECOMPRESS_SINGLE_BLOCK,
                    CompressionTestStrategy.COMPRESS_DECOMPRESS_BLOCK,
                    CompressionTestStrategy.COMPRESS_DECOMPRESS_ERRORS,
                    CompressionTestStrategy.COMPRESS_DECOMPRESS_WITH_EMPTY_STREAM))
        .test();

  } catch (Exception ex) {
    fail("testCompressorDecompressor error !!!" + ex);
  }
}
项目:aliyun-oss-hadoop-fs    文件:TestCompressorDecompressor.java   
@Test
public void testCompressorDecompressorWithExeedBufferLimit() {
  int BYTE_SIZE = 100 * 1024;
  byte[] rawData = generate(BYTE_SIZE);
  try {
    CompressDecompressTester.of(rawData)
        .withCompressDecompressPair(
            new SnappyCompressor(BYTE_SIZE + BYTE_SIZE / 2),
            new SnappyDecompressor(BYTE_SIZE + BYTE_SIZE / 2))
        .withCompressDecompressPair(new Lz4Compressor(BYTE_SIZE),
            new Lz4Decompressor(BYTE_SIZE))
        .withTestCases(ImmutableSet.of(CompressionTestStrategy.COMPRESS_DECOMPRESS_SINGLE_BLOCK,
                    CompressionTestStrategy.COMPRESS_DECOMPRESS_BLOCK,
                    CompressionTestStrategy.COMPRESS_DECOMPRESS_ERRORS,
                    CompressionTestStrategy.COMPRESS_DECOMPRESS_WITH_EMPTY_STREAM))
        .test();

  } catch (Exception ex) {
    fail("testCompressorDecompressorWithExeedBufferLimit error !!!" + ex);
  }
}
项目:big-c    文件:SnappyCodec.java   
/**
 * Are the native snappy libraries loaded & initialized?
 */
public static void checkNativeCodeLoaded() {
    if (!NativeCodeLoader.isNativeCodeLoaded() ||
        !NativeCodeLoader.buildSupportsSnappy()) {
      throw new RuntimeException("native snappy library not available: " +
          "this version of libhadoop was built without " +
          "snappy support.");
    }
    if (!SnappyCompressor.isNativeCodeLoaded()) {
      throw new RuntimeException("native snappy library not available: " +
          "SnappyCompressor has not been loaded.");
    }
    if (!SnappyDecompressor.isNativeCodeLoaded()) {
      throw new RuntimeException("native snappy library not available: " +
          "SnappyDecompressor has not been loaded.");
    }
}
项目:big-c    文件:TestCompressorDecompressor.java   
@Test
public void testCompressorDecompressor() {
  // no more for this data
  int SIZE = 44 * 1024;

  byte[] rawData = generate(SIZE);
  try {
    CompressDecompressTester.of(rawData)
        .withCompressDecompressPair(new SnappyCompressor(), new SnappyDecompressor())
        .withCompressDecompressPair(new Lz4Compressor(), new Lz4Decompressor())
        .withCompressDecompressPair(new BuiltInZlibDeflater(), new BuiltInZlibInflater())
        .withTestCases(ImmutableSet.of(CompressionTestStrategy.COMPRESS_DECOMPRESS_SINGLE_BLOCK,
                    CompressionTestStrategy.COMPRESS_DECOMPRESS_BLOCK,
                    CompressionTestStrategy.COMPRESS_DECOMPRESS_ERRORS,
                    CompressionTestStrategy.COMPRESS_DECOMPRESS_WITH_EMPTY_STREAM))
        .test();

  } catch (Exception ex) {
    fail("testCompressorDecompressor error !!!" + ex);
  }
}
项目:big-c    文件:TestCompressorDecompressor.java   
@Test
public void testCompressorDecompressorWithExeedBufferLimit() {
  int BYTE_SIZE = 100 * 1024;
  byte[] rawData = generate(BYTE_SIZE);
  try {
    CompressDecompressTester.of(rawData)
        .withCompressDecompressPair(
            new SnappyCompressor(BYTE_SIZE + BYTE_SIZE / 2),
            new SnappyDecompressor(BYTE_SIZE + BYTE_SIZE / 2))
        .withCompressDecompressPair(new Lz4Compressor(BYTE_SIZE),
            new Lz4Decompressor(BYTE_SIZE))
        .withTestCases(ImmutableSet.of(CompressionTestStrategy.COMPRESS_DECOMPRESS_SINGLE_BLOCK,
                    CompressionTestStrategy.COMPRESS_DECOMPRESS_BLOCK,
                    CompressionTestStrategy.COMPRESS_DECOMPRESS_ERRORS,
                    CompressionTestStrategy.COMPRESS_DECOMPRESS_WITH_EMPTY_STREAM))
        .test();

  } catch (Exception ex) {
    fail("testCompressorDecompressorWithExeedBufferLimit error !!!" + ex);
  }
}
项目:hadoop-2.6.0-cdh5.4.3    文件:SnappyCodec.java   
/**
 * Are the native snappy libraries loaded & initialized?
 */
public static void checkNativeCodeLoaded() {
    if (!NativeCodeLoader.buildSupportsSnappy()) {
      throw new RuntimeException("native snappy library not available: " +
          "this version of libhadoop was built without " +
          "snappy support.");
    }
    if (!SnappyCompressor.isNativeCodeLoaded()) {
      throw new RuntimeException("native snappy library not available: " +
          "SnappyCompressor has not been loaded.");
    }
    if (!SnappyDecompressor.isNativeCodeLoaded()) {
      throw new RuntimeException("native snappy library not available: " +
          "SnappyDecompressor has not been loaded.");
    }
}
项目:hadoop-2.6.0-cdh5.4.3    文件:TestCompressorDecompressor.java   
@Test
public void testCompressorDecompressor() {
  // no more for this data
  int SIZE = 44 * 1024;

  byte[] rawData = generate(SIZE);
  try {
    CompressDecompressTester.of(rawData)
        .withCompressDecompressPair(new SnappyCompressor(), new SnappyDecompressor())
        .withCompressDecompressPair(new Lz4Compressor(), new Lz4Decompressor())
        .withCompressDecompressPair(new BuiltInZlibDeflater(), new BuiltInZlibInflater())
        .withTestCases(ImmutableSet.of(CompressionTestStrategy.COMPRESS_DECOMPRESS_SINGLE_BLOCK,
                    CompressionTestStrategy.COMPRESS_DECOMPRESS_BLOCK,
                    CompressionTestStrategy.COMPRESS_DECOMPRESS_ERRORS,
                    CompressionTestStrategy.COMPRESS_DECOMPRESS_WITH_EMPTY_STREAM))
        .test();

  } catch (Exception ex) {
    fail("testCompressorDecompressor error !!!" + ex);
  }
}
项目:hadoop-2.6.0-cdh5.4.3    文件:TestCompressorDecompressor.java   
@Test
public void testCompressorDecompressorWithExeedBufferLimit() {
  int BYTE_SIZE = 100 * 1024;
  byte[] rawData = generate(BYTE_SIZE);
  try {
    CompressDecompressTester.of(rawData)
        .withCompressDecompressPair(
            new SnappyCompressor(BYTE_SIZE + BYTE_SIZE / 2),
            new SnappyDecompressor(BYTE_SIZE + BYTE_SIZE / 2))
        .withCompressDecompressPair(new Lz4Compressor(BYTE_SIZE),
            new Lz4Decompressor(BYTE_SIZE))
        .withTestCases(ImmutableSet.of(CompressionTestStrategy.COMPRESS_DECOMPRESS_SINGLE_BLOCK,
                    CompressionTestStrategy.COMPRESS_DECOMPRESS_BLOCK,
                    CompressionTestStrategy.COMPRESS_DECOMPRESS_ERRORS,
                    CompressionTestStrategy.COMPRESS_DECOMPRESS_WITH_EMPTY_STREAM))
        .test();

  } catch (Exception ex) {
    fail("testCompressorDecompressorWithExeedBufferLimit error !!!" + ex);
  }
}
项目:hadoop-plus    文件:SnappyCodec.java   
/**
 * Are the native snappy libraries loaded & initialized?
 */
public static void checkNativeCodeLoaded() {
    if (!NativeCodeLoader.buildSupportsSnappy()) {
      throw new RuntimeException("native snappy library not available: " +
          "this version of libhadoop was built without " +
          "snappy support.");
    }
    if (!SnappyCompressor.isNativeCodeLoaded()) {
      throw new RuntimeException("native snappy library not available: " +
          "SnappyCompressor has not been loaded.");
    }
    if (!SnappyDecompressor.isNativeCodeLoaded()) {
      throw new RuntimeException("native snappy library not available: " +
          "SnappyDecompressor has not been loaded.");
    }
}
项目:hadoop-plus    文件:TestCompressorDecompressor.java   
@Test
public void testCompressorDecompressor() {
  // no more for this data
  int SIZE = 44 * 1024;

  byte[] rawData = generate(SIZE);
  try {
    CompressDecompressTester.of(rawData)
        .withCompressDecompressPair(new SnappyCompressor(), new SnappyDecompressor())
        .withCompressDecompressPair(new Lz4Compressor(), new Lz4Decompressor())
        .withCompressDecompressPair(new BuiltInZlibDeflater(), new BuiltInZlibInflater())
        .withTestCases(ImmutableSet.of(CompressionTestStrategy.COMPRESS_DECOMPRESS_SINGLE_BLOCK,
                    CompressionTestStrategy.COMPRESS_DECOMPRESS_BLOCK,
                    CompressionTestStrategy.COMPRESS_DECOMPRESS_ERRORS,
                    CompressionTestStrategy.COMPRESS_DECOMPRESS_WITH_EMPTY_STREAM))
        .test();

  } catch (Exception ex) {
    fail("testCompressorDecompressor error !!!" + ex);
  }
}
项目:hadoop-plus    文件:TestCompressorDecompressor.java   
@Test
public void testCompressorDecompressorWithExeedBufferLimit() {
  int BYTE_SIZE = 100 * 1024;
  byte[] rawData = generate(BYTE_SIZE);
  try {
    CompressDecompressTester.of(rawData)
        .withCompressDecompressPair(
            new SnappyCompressor(BYTE_SIZE + BYTE_SIZE / 2),
            new SnappyDecompressor(BYTE_SIZE + BYTE_SIZE / 2))
        .withCompressDecompressPair(new Lz4Compressor(BYTE_SIZE),
            new Lz4Decompressor(BYTE_SIZE))
        .withTestCases(ImmutableSet.of(CompressionTestStrategy.COMPRESS_DECOMPRESS_SINGLE_BLOCK,
                    CompressionTestStrategy.COMPRESS_DECOMPRESS_BLOCK,
                    CompressionTestStrategy.COMPRESS_DECOMPRESS_ERRORS,
                    CompressionTestStrategy.COMPRESS_DECOMPRESS_WITH_EMPTY_STREAM))
        .test();

  } catch (Exception ex) {
    fail("testCompressorDecompressorWithExeedBufferLimit error !!!" + ex);
  }
}
项目:hops    文件:SnappyCodec.java   
/**
 * Are the native snappy libraries loaded & initialized?
 */
public static void checkNativeCodeLoaded() {
  if (!NativeCodeLoader.buildSupportsSnappy()) {
    throw new RuntimeException("native snappy library not available: " +
        "this version of libhadoop was built without " +
        "snappy support.");
  }
  if (!NativeCodeLoader.isNativeCodeLoaded()) {
    throw new RuntimeException("Failed to load libhadoop.");
  }
  if (!SnappyCompressor.isNativeCodeLoaded()) {
    throw new RuntimeException("native snappy library not available: " +
        "SnappyCompressor has not been loaded.");
  }
  if (!SnappyDecompressor.isNativeCodeLoaded()) {
    throw new RuntimeException("native snappy library not available: " +
        "SnappyDecompressor has not been loaded.");
  }
}
项目:hops    文件:TestCompressorDecompressor.java   
@Test
public void testCompressorDecompressor() {
  // no more for this data
  int SIZE = 44 * 1024;

  byte[] rawData = generate(SIZE);
  try {
    CompressDecompressTester.of(rawData)
        .withCompressDecompressPair(new SnappyCompressor(), new SnappyDecompressor())
        .withCompressDecompressPair(new Lz4Compressor(), new Lz4Decompressor())
        .withCompressDecompressPair(new BuiltInZlibDeflater(), new BuiltInZlibInflater())
        .withTestCases(ImmutableSet.of(CompressionTestStrategy.COMPRESS_DECOMPRESS_SINGLE_BLOCK,
                    CompressionTestStrategy.COMPRESS_DECOMPRESS_BLOCK,
                    CompressionTestStrategy.COMPRESS_DECOMPRESS_ERRORS,
                    CompressionTestStrategy.COMPRESS_DECOMPRESS_WITH_EMPTY_STREAM))
        .test();

  } catch (Exception ex) {
    GenericTestUtils.assertExceptionContains(
        "testCompressorDecompressor error !!!", ex);
  }
}
项目:hops    文件:TestCompressorDecompressor.java   
@Test
public void testCompressorDecompressorWithExeedBufferLimit() {
  int BYTE_SIZE = 100 * 1024;
  byte[] rawData = generate(BYTE_SIZE);
  try {
    CompressDecompressTester.of(rawData)
        .withCompressDecompressPair(
            new SnappyCompressor(BYTE_SIZE + BYTE_SIZE / 2),
            new SnappyDecompressor(BYTE_SIZE + BYTE_SIZE / 2))
        .withCompressDecompressPair(new Lz4Compressor(BYTE_SIZE),
            new Lz4Decompressor(BYTE_SIZE))
        .withTestCases(ImmutableSet.of(CompressionTestStrategy.COMPRESS_DECOMPRESS_SINGLE_BLOCK,
                    CompressionTestStrategy.COMPRESS_DECOMPRESS_BLOCK,
                    CompressionTestStrategy.COMPRESS_DECOMPRESS_ERRORS,
                    CompressionTestStrategy.COMPRESS_DECOMPRESS_WITH_EMPTY_STREAM))
        .test();

  } catch (Exception ex) {
    GenericTestUtils.assertExceptionContains(
        "testCompressorDecompressorWithExeedBufferLimit error !!!", ex);
  }
}
项目:hadoop-TCP    文件:SnappyCodec.java   
/**
 * Are the native snappy libraries loaded & initialized?
 */
public static void checkNativeCodeLoaded() {
    if (!NativeCodeLoader.buildSupportsSnappy()) {
      throw new RuntimeException("native snappy library not available: " +
          "this version of libhadoop was built without " +
          "snappy support.");
    }
    if (!SnappyCompressor.isNativeCodeLoaded()) {
      throw new RuntimeException("native snappy library not available: " +
          "SnappyCompressor has not been loaded.");
    }
    if (!SnappyDecompressor.isNativeCodeLoaded()) {
      throw new RuntimeException("native snappy library not available: " +
          "SnappyDecompressor has not been loaded.");
    }
}
项目:hadoop-TCP    文件:TestCompressorDecompressor.java   
@Test
public void testCompressorDecompressor() {
  // no more for this data
  int SIZE = 44 * 1024;

  byte[] rawData = generate(SIZE);
  try {
    CompressDecompressTester.of(rawData)
        .withCompressDecompressPair(new SnappyCompressor(), new SnappyDecompressor())
        .withCompressDecompressPair(new Lz4Compressor(), new Lz4Decompressor())
        .withCompressDecompressPair(new BuiltInZlibDeflater(), new BuiltInZlibInflater())
        .withTestCases(ImmutableSet.of(CompressionTestStrategy.COMPRESS_DECOMPRESS_SINGLE_BLOCK,
                    CompressionTestStrategy.COMPRESS_DECOMPRESS_BLOCK,
                    CompressionTestStrategy.COMPRESS_DECOMPRESS_ERRORS,
                    CompressionTestStrategy.COMPRESS_DECOMPRESS_WITH_EMPTY_STREAM))
        .test();

  } catch (Exception ex) {
    fail("testCompressorDecompressor error !!!" + ex);
  }
}
项目:hadoop-TCP    文件:TestCompressorDecompressor.java   
@Test
public void testCompressorDecompressorWithExeedBufferLimit() {
  int BYTE_SIZE = 100 * 1024;
  byte[] rawData = generate(BYTE_SIZE);
  try {
    CompressDecompressTester.of(rawData)
        .withCompressDecompressPair(
            new SnappyCompressor(BYTE_SIZE + BYTE_SIZE / 2),
            new SnappyDecompressor(BYTE_SIZE + BYTE_SIZE / 2))
        .withCompressDecompressPair(new Lz4Compressor(BYTE_SIZE),
            new Lz4Decompressor(BYTE_SIZE))
        .withTestCases(ImmutableSet.of(CompressionTestStrategy.COMPRESS_DECOMPRESS_SINGLE_BLOCK,
                    CompressionTestStrategy.COMPRESS_DECOMPRESS_BLOCK,
                    CompressionTestStrategy.COMPRESS_DECOMPRESS_ERRORS,
                    CompressionTestStrategy.COMPRESS_DECOMPRESS_WITH_EMPTY_STREAM))
        .test();

  } catch (Exception ex) {
    fail("testCompressorDecompressorWithExeedBufferLimit error !!!" + ex);
  }
}
项目:hardfs    文件:SnappyCodec.java   
/**
 * Are the native snappy libraries loaded & initialized?
 */
public static void checkNativeCodeLoaded() {
    if (!NativeCodeLoader.buildSupportsSnappy()) {
      throw new RuntimeException("native snappy library not available: " +
          "this version of libhadoop was built without " +
          "snappy support.");
    }
    if (!SnappyCompressor.isNativeCodeLoaded()) {
      throw new RuntimeException("native snappy library not available: " +
          "SnappyCompressor has not been loaded.");
    }
    if (!SnappyDecompressor.isNativeCodeLoaded()) {
      throw new RuntimeException("native snappy library not available: " +
          "SnappyDecompressor has not been loaded.");
    }
}
项目:hardfs    文件:TestCompressorDecompressor.java   
@Test
public void testCompressorDecompressor() {
  // no more for this data
  int SIZE = 44 * 1024;

  byte[] rawData = generate(SIZE);
  try {
    CompressDecompressTester.of(rawData)
        .withCompressDecompressPair(new SnappyCompressor(), new SnappyDecompressor())
        .withCompressDecompressPair(new Lz4Compressor(), new Lz4Decompressor())
        .withCompressDecompressPair(new BuiltInZlibDeflater(), new BuiltInZlibInflater())
        .withTestCases(ImmutableSet.of(CompressionTestStrategy.COMPRESS_DECOMPRESS_SINGLE_BLOCK,
                    CompressionTestStrategy.COMPRESS_DECOMPRESS_BLOCK,
                    CompressionTestStrategy.COMPRESS_DECOMPRESS_ERRORS,
                    CompressionTestStrategy.COMPRESS_DECOMPRESS_WITH_EMPTY_STREAM))
        .test();

  } catch (Exception ex) {
    fail("testCompressorDecompressor error !!!" + ex);
  }
}
项目:hardfs    文件:TestCompressorDecompressor.java   
@Test
public void testCompressorDecompressorWithExeedBufferLimit() {
  int BYTE_SIZE = 100 * 1024;
  byte[] rawData = generate(BYTE_SIZE);
  try {
    CompressDecompressTester.of(rawData)
        .withCompressDecompressPair(
            new SnappyCompressor(BYTE_SIZE + BYTE_SIZE / 2),
            new SnappyDecompressor(BYTE_SIZE + BYTE_SIZE / 2))
        .withCompressDecompressPair(new Lz4Compressor(BYTE_SIZE),
            new Lz4Decompressor(BYTE_SIZE))
        .withTestCases(ImmutableSet.of(CompressionTestStrategy.COMPRESS_DECOMPRESS_SINGLE_BLOCK,
                    CompressionTestStrategy.COMPRESS_DECOMPRESS_BLOCK,
                    CompressionTestStrategy.COMPRESS_DECOMPRESS_ERRORS,
                    CompressionTestStrategy.COMPRESS_DECOMPRESS_WITH_EMPTY_STREAM))
        .test();

  } catch (Exception ex) {
    fail("testCompressorDecompressorWithExeedBufferLimit error !!!" + ex);
  }
}
项目:hadoop-on-lustre2    文件:SnappyCodec.java   
/**
 * Are the native snappy libraries loaded & initialized?
 */
public static void checkNativeCodeLoaded() {
    if (!NativeCodeLoader.buildSupportsSnappy()) {
      throw new RuntimeException("native snappy library not available: " +
          "this version of libhadoop was built without " +
          "snappy support.");
    }
    if (!SnappyCompressor.isNativeCodeLoaded()) {
      throw new RuntimeException("native snappy library not available: " +
          "SnappyCompressor has not been loaded.");
    }
    if (!SnappyDecompressor.isNativeCodeLoaded()) {
      throw new RuntimeException("native snappy library not available: " +
          "SnappyDecompressor has not been loaded.");
    }
}
项目:hadoop-on-lustre2    文件:TestCompressorDecompressor.java   
@Test
public void testCompressorDecompressor() {
  // no more for this data
  int SIZE = 44 * 1024;

  byte[] rawData = generate(SIZE);
  try {
    CompressDecompressTester.of(rawData)
        .withCompressDecompressPair(new SnappyCompressor(), new SnappyDecompressor())
        .withCompressDecompressPair(new Lz4Compressor(), new Lz4Decompressor())
        .withCompressDecompressPair(new BuiltInZlibDeflater(), new BuiltInZlibInflater())
        .withTestCases(ImmutableSet.of(CompressionTestStrategy.COMPRESS_DECOMPRESS_SINGLE_BLOCK,
                    CompressionTestStrategy.COMPRESS_DECOMPRESS_BLOCK,
                    CompressionTestStrategy.COMPRESS_DECOMPRESS_ERRORS,
                    CompressionTestStrategy.COMPRESS_DECOMPRESS_WITH_EMPTY_STREAM))
        .test();

  } catch (Exception ex) {
    fail("testCompressorDecompressor error !!!" + ex);
  }
}
项目:hadoop-on-lustre2    文件:TestCompressorDecompressor.java   
@Test
public void testCompressorDecompressorWithExeedBufferLimit() {
  int BYTE_SIZE = 100 * 1024;
  byte[] rawData = generate(BYTE_SIZE);
  try {
    CompressDecompressTester.of(rawData)
        .withCompressDecompressPair(
            new SnappyCompressor(BYTE_SIZE + BYTE_SIZE / 2),
            new SnappyDecompressor(BYTE_SIZE + BYTE_SIZE / 2))
        .withCompressDecompressPair(new Lz4Compressor(BYTE_SIZE),
            new Lz4Decompressor(BYTE_SIZE))
        .withTestCases(ImmutableSet.of(CompressionTestStrategy.COMPRESS_DECOMPRESS_SINGLE_BLOCK,
                    CompressionTestStrategy.COMPRESS_DECOMPRESS_BLOCK,
                    CompressionTestStrategy.COMPRESS_DECOMPRESS_ERRORS,
                    CompressionTestStrategy.COMPRESS_DECOMPRESS_WITH_EMPTY_STREAM))
        .test();

  } catch (Exception ex) {
    fail("testCompressorDecompressorWithExeedBufferLimit error !!!" + ex);
  }
}
项目:hadoop-oss    文件:SnappyCodec.java   
/**
 * Create a new {@link Decompressor} for use by this {@link CompressionCodec}.
 *
 * @return a new decompressor for use by this codec
 */
@Override
public Decompressor createDecompressor() {
  checkNativeCodeLoaded();
  int bufferSize = conf.getInt(
      CommonConfigurationKeys.IO_COMPRESSION_CODEC_SNAPPY_BUFFERSIZE_KEY,
      CommonConfigurationKeys.IO_COMPRESSION_CODEC_SNAPPY_BUFFERSIZE_DEFAULT);
  return new SnappyDecompressor(bufferSize);
}
项目:hadoop    文件:SnappyCodec.java   
/**
 * Create a new {@link Decompressor} for use by this {@link CompressionCodec}.
 *
 * @return a new decompressor for use by this codec
 */
@Override
public Decompressor createDecompressor() {
  checkNativeCodeLoaded();
  int bufferSize = conf.getInt(
      CommonConfigurationKeys.IO_COMPRESSION_CODEC_SNAPPY_BUFFERSIZE_KEY,
      CommonConfigurationKeys.IO_COMPRESSION_CODEC_SNAPPY_BUFFERSIZE_DEFAULT);
  return new SnappyDecompressor(bufferSize);
}
项目:aliyun-oss-hadoop-fs    文件:SnappyCodec.java   
/**
 * Create a new {@link Decompressor} for use by this {@link CompressionCodec}.
 *
 * @return a new decompressor for use by this codec
 */
@Override
public Decompressor createDecompressor() {
  checkNativeCodeLoaded();
  int bufferSize = conf.getInt(
      CommonConfigurationKeys.IO_COMPRESSION_CODEC_SNAPPY_BUFFERSIZE_KEY,
      CommonConfigurationKeys.IO_COMPRESSION_CODEC_SNAPPY_BUFFERSIZE_DEFAULT);
  return new SnappyDecompressor(bufferSize);
}
项目:big-c    文件:SnappyCodec.java   
/**
 * Create a new {@link Decompressor} for use by this {@link CompressionCodec}.
 *
 * @return a new decompressor for use by this codec
 */
@Override
public Decompressor createDecompressor() {
  checkNativeCodeLoaded();
  int bufferSize = conf.getInt(
      CommonConfigurationKeys.IO_COMPRESSION_CODEC_SNAPPY_BUFFERSIZE_KEY,
      CommonConfigurationKeys.IO_COMPRESSION_CODEC_SNAPPY_BUFFERSIZE_DEFAULT);
  return new SnappyDecompressor(bufferSize);
}
项目:hadoop-2.6.0-cdh5.4.3    文件:SnappyCodec.java   
/**
 * Create a new {@link Decompressor} for use by this {@link CompressionCodec}.
 *
 * @return a new decompressor for use by this codec
 */
@Override
public Decompressor createDecompressor() {
  checkNativeCodeLoaded();
  int bufferSize = conf.getInt(
      CommonConfigurationKeys.IO_COMPRESSION_CODEC_SNAPPY_BUFFERSIZE_KEY,
      CommonConfigurationKeys.IO_COMPRESSION_CODEC_SNAPPY_BUFFERSIZE_DEFAULT);
  return new SnappyDecompressor(bufferSize);
}
项目:hadoop-EAR    文件:SnappyCodec.java   
/**
 * Get the type of {@link Decompressor} needed by this
 * {@link CompressionCodec}.
 *
 * @return the type of decompressor needed by this codec.
 */
@Override
public Class<? extends Decompressor> getDecompressorType() {
  if (!isNativeSnappyLoaded(conf)) {
    throw new RuntimeException("native snappy library not available");
  }

  return SnappyDecompressor.class;
}
项目:hadoop-EAR    文件:SnappyCodec.java   
/**
 * Create a new {@link Decompressor} for use by this
 * {@link CompressionCodec}.
 *
 * @return a new decompressor for use by this codec
 */
@Override
public Decompressor createDecompressor() {
  if (!isNativeSnappyLoaded(conf)) {
    throw new RuntimeException("native snappy library not available");
  }
  int bufferSize = conf.getInt(
      IO_COMPRESSION_CODEC_SNAPPY_BUFFERSIZE_KEY,
      IO_COMPRESSION_CODEC_SNAPPY_BUFFERSIZE_DEFAULT);
  return new SnappyDecompressor(bufferSize);
}
项目:hadoop-plus    文件:SnappyCodec.java   
/**
 * Create a new {@link Decompressor} for use by this {@link CompressionCodec}.
 *
 * @return a new decompressor for use by this codec
 */
@Override
public Decompressor createDecompressor() {
  checkNativeCodeLoaded();
  int bufferSize = conf.getInt(
      CommonConfigurationKeys.IO_COMPRESSION_CODEC_SNAPPY_BUFFERSIZE_KEY,
      CommonConfigurationKeys.IO_COMPRESSION_CODEC_SNAPPY_BUFFERSIZE_DEFAULT);
  return new SnappyDecompressor(bufferSize);
}
项目:hops    文件:SnappyCodec.java   
/**
 * Create a new {@link Decompressor} for use by this {@link CompressionCodec}.
 *
 * @return a new decompressor for use by this codec
 */
@Override
public Decompressor createDecompressor() {
  checkNativeCodeLoaded();
  int bufferSize = conf.getInt(
      CommonConfigurationKeys.IO_COMPRESSION_CODEC_SNAPPY_BUFFERSIZE_KEY,
      CommonConfigurationKeys.IO_COMPRESSION_CODEC_SNAPPY_BUFFERSIZE_DEFAULT);
  return new SnappyDecompressor(bufferSize);
}
项目:hadoop-TCP    文件:SnappyCodec.java   
/**
 * Create a new {@link Decompressor} for use by this {@link CompressionCodec}.
 *
 * @return a new decompressor for use by this codec
 */
@Override
public Decompressor createDecompressor() {
  checkNativeCodeLoaded();
  int bufferSize = conf.getInt(
      CommonConfigurationKeys.IO_COMPRESSION_CODEC_SNAPPY_BUFFERSIZE_KEY,
      CommonConfigurationKeys.IO_COMPRESSION_CODEC_SNAPPY_BUFFERSIZE_DEFAULT);
  return new SnappyDecompressor(bufferSize);
}