/** * Check if native-bzip2 code is loaded & initialized correctly and * can be loaded for this job. * * @param conf configuration * @return <code>true</code> if native-bzip2 is loaded & initialized * and can be loaded for this job, else <code>false</code> */ public static synchronized boolean isNativeBzip2Loaded(Configuration conf) { String libname = conf.get("io.compression.codec.bzip2.library", "system-native"); if (!bzip2LibraryName.equals(libname)) { nativeBzip2Loaded = false; bzip2LibraryName = libname; if (libname.equals("java-builtin")) { LOG.info("Using pure-Java version of bzip2 library"); } else if (NativeCodeLoader.isNativeCodeLoaded()) { try { // Initialize the native library. Bzip2Compressor.initSymbols(libname); Bzip2Decompressor.initSymbols(libname); nativeBzip2Loaded = true; LOG.info("Successfully loaded & initialized native-bzip2 library " + libname); } catch (Throwable t) { LOG.warn("Failed to load/initialize native-bzip2 library " + libname + ", will use pure-Java version"); } } } return nativeBzip2Loaded; }
/** * Are the native snappy libraries loaded & initialized? */ public static void checkNativeCodeLoaded() { if (!NativeCodeLoader.isNativeCodeLoaded() || !NativeCodeLoader.buildSupportsSnappy()) { throw new RuntimeException("native snappy library not available: " + "this version of libhadoop was built without " + "snappy support."); } if (!SnappyCompressor.isNativeCodeLoaded()) { throw new RuntimeException("native snappy library not available: " + "SnappyCompressor has not been loaded."); } if (!SnappyDecompressor.isNativeCodeLoaded()) { throw new RuntimeException("native snappy library not available: " + "SnappyDecompressor has not been loaded."); } }
@Test(timeout=120000) public void testJceAesCtrCryptoCodec() throws Exception { GenericTestUtils.assumeInNativeProfile(); if (!NativeCodeLoader.buildSupportsOpenssl()) { LOG.warn("Skipping test since openSSL library not loaded"); Assume.assumeTrue(false); } Assert.assertEquals(null, OpensslCipher.getLoadingFailureReason()); cryptoCodecTest(conf, seed, 0, jceCodecClass, jceCodecClass, iv); cryptoCodecTest(conf, seed, count, jceCodecClass, jceCodecClass, iv); cryptoCodecTest(conf, seed, count, jceCodecClass, opensslCodecClass, iv); // Overflow test, IV: xx xx xx xx xx xx xx xx ff ff ff ff ff ff ff ff for(int i = 0; i < 8; i++) { iv[8 + i] = (byte) 0xff; } cryptoCodecTest(conf, seed, count, jceCodecClass, jceCodecClass, iv); cryptoCodecTest(conf, seed, count, jceCodecClass, opensslCodecClass, iv); }
@Test(timeout=120000) public void testOpensslAesCtrCryptoCodec() throws Exception { GenericTestUtils.assumeInNativeProfile(); if (!NativeCodeLoader.buildSupportsOpenssl()) { LOG.warn("Skipping test since openSSL library not loaded"); Assume.assumeTrue(false); } Assert.assertEquals(null, OpensslCipher.getLoadingFailureReason()); cryptoCodecTest(conf, seed, 0, opensslCodecClass, opensslCodecClass, iv); cryptoCodecTest(conf, seed, count, opensslCodecClass, opensslCodecClass, iv); cryptoCodecTest(conf, seed, count, opensslCodecClass, jceCodecClass, iv); // Overflow test, IV: xx xx xx xx xx xx xx xx ff ff ff ff ff ff ff ff for(int i = 0; i < 8; i++) { iv[8 + i] = (byte) 0xff; } cryptoCodecTest(conf, seed, count, opensslCodecClass, opensslCodecClass, iv); cryptoCodecTest(conf, seed, count, opensslCodecClass, jceCodecClass, iv); }
@Test public void testNativeCodeLoaded() { if (requireTestJni() == false) { LOG.info("TestNativeCodeLoader: libhadoop.so testing is not required."); return; } if (!NativeCodeLoader.isNativeCodeLoaded()) { fail("TestNativeCodeLoader: libhadoop.so testing was required, but " + "libhadoop.so was not loaded."); } assertFalse(NativeCodeLoader.getLibraryName().isEmpty()); // library names are depended on platform and build envs // so just check names are available assertFalse(ZlibFactory.getLibraryName().isEmpty()); if (NativeCodeLoader.buildSupportsSnappy()) { assertFalse(SnappyCodec.getLibraryName().isEmpty()); } if (NativeCodeLoader.buildSupportsOpenssl()) { assertFalse(OpensslCipher.getLibraryName().isEmpty()); } assertFalse(Lz4Codec.getLibraryName().isEmpty()); LOG.info("TestNativeCodeLoader: libhadoop.so is loaded."); }
private static boolean isNativeSnappyLoadable() { boolean snappyAvailable = false; boolean loaded = false; try { System.loadLibrary("snappy"); logger.warn("Snappy native library is available"); snappyAvailable = true; boolean hadoopNativeAvailable = NativeCodeLoader.isNativeCodeLoaded(); loaded = snappyAvailable && hadoopNativeAvailable; if (loaded) { logger.info("Snappy native library loaded"); } else { logger.warn("Snappy native library not loaded"); } } catch (Throwable t) { logger.warn("Failed to load snappy: ", t); return false; } return loaded; }
/** * Method for compressor availability check */ private static <T extends Compressor, E extends Decompressor> boolean isAvailable(TesterPair<T, E> pair) { Compressor compressor = pair.compressor; if (compressor.getClass().isAssignableFrom(Lz4Compressor.class) && (NativeCodeLoader.isNativeCodeLoaded())) return true; else if (compressor.getClass().isAssignableFrom(BuiltInZlibDeflater.class) && NativeCodeLoader.isNativeCodeLoaded()) return true; else if (compressor.getClass().isAssignableFrom(ZlibCompressor.class)) { return ZlibFactory.isNativeZlibLoaded(new Configuration()); } else if (compressor.getClass().isAssignableFrom(SnappyCompressor.class) && isNativeSnappyLoadable()) return true; return false; }
@Test(timeout=20000) public void testBZip2NativeCodec() throws IOException { Configuration conf = new Configuration(); conf.set("io.compression.codec.bzip2.library", "system-native"); if (NativeCodeLoader.isNativeCodeLoaded()) { if (Bzip2Factory.isNativeBzip2Loaded(conf)) { codecTest(conf, seed, 0, "org.apache.hadoop.io.compress.BZip2Codec"); codecTest(conf, seed, count, "org.apache.hadoop.io.compress.BZip2Codec"); conf.set("io.compression.codec.bzip2.library", "java-builtin"); codecTest(conf, seed, 0, "org.apache.hadoop.io.compress.BZip2Codec"); codecTest(conf, seed, count, "org.apache.hadoop.io.compress.BZip2Codec"); } else { LOG.warn("Native hadoop library available but native bzip2 is not"); } } }
@Test public void testLz4Codec() throws IOException { if (NativeCodeLoader.isNativeCodeLoaded()) { if (Lz4Codec.isNativeCodeLoaded()) { conf.setBoolean( CommonConfigurationKeys.IO_COMPRESSION_CODEC_LZ4_USELZ4HC_KEY, false); codecTest(conf, seed, 0, "org.apache.hadoop.io.compress.Lz4Codec"); codecTest(conf, seed, count, "org.apache.hadoop.io.compress.Lz4Codec"); conf.setBoolean( CommonConfigurationKeys.IO_COMPRESSION_CODEC_LZ4_USELZ4HC_KEY, true); codecTest(conf, seed, 0, "org.apache.hadoop.io.compress.Lz4Codec"); codecTest(conf, seed, count, "org.apache.hadoop.io.compress.Lz4Codec"); } else { Assert.fail("Native hadoop library available but lz4 not"); } } }
@Test(timeout=20000) public void testSequenceFileBZip2NativeCodec() throws IOException, ClassNotFoundException, InstantiationException, IllegalAccessException { Configuration conf = new Configuration(); conf.set("io.compression.codec.bzip2.library", "system-native"); if (NativeCodeLoader.isNativeCodeLoaded()) { if (Bzip2Factory.isNativeBzip2Loaded(conf)) { sequenceFileCodecTest(conf, 0, "org.apache.hadoop.io.compress.BZip2Codec", 100); sequenceFileCodecTest(conf, 100, "org.apache.hadoop.io.compress.BZip2Codec", 100); sequenceFileCodecTest(conf, 200000, "org.apache.hadoop.io.compress.BZip2Codec", 1000000); } else { LOG.warn("Native hadoop library available but native bzip2 is not"); } } }
/** * Tests that internal renames are done using native code on platforms that * have it. The native rename includes more detailed information about the * failure, which can be useful for troubleshooting. */ @Test public void testDoPreUpgradeIOError() throws IOException { File storageDir = new File(TestEditLog.TEST_DIR, "preupgradeioerror"); List<URI> editUris = Collections.singletonList(storageDir.toURI()); NNStorage storage = setupEdits(editUris, 5); StorageDirectory sd = storage.dirIterator(NameNodeDirType.EDITS).next(); assertNotNull(sd); // Change storage directory so that renaming current to previous.tmp fails. FileUtil.setWritable(storageDir, false); FileJournalManager jm = null; try { jm = new FileJournalManager(conf, sd, storage); exception.expect(IOException.class); if (NativeCodeLoader.isNativeCodeLoaded()) { exception.expectMessage("failure in native rename"); } jm.doPreUpgrade(); } finally { IOUtils.cleanup(LOG, jm); // Restore permissions on storage directory and make sure we can delete. FileUtil.setWritable(storageDir, true); FileUtil.fullyDelete(storageDir); } }
@Test(timeout=120000) public void testJceAesCtrCryptoCodec() throws Exception { if (!"true".equalsIgnoreCase(System.getProperty("runningWithNative"))) { LOG.warn("Skipping since test was not run with -Pnative flag"); Assume.assumeTrue(false); } if (!NativeCodeLoader.buildSupportsOpenssl()) { LOG.warn("Skipping test since openSSL library not loaded"); Assume.assumeTrue(false); } Assert.assertEquals(null, OpensslCipher.getLoadingFailureReason()); cryptoCodecTest(conf, seed, 0, jceCodecClass, jceCodecClass, iv); cryptoCodecTest(conf, seed, count, jceCodecClass, jceCodecClass, iv); cryptoCodecTest(conf, seed, count, jceCodecClass, opensslCodecClass, iv); // Overflow test, IV: xx xx xx xx xx xx xx xx ff ff ff ff ff ff ff ff for(int i = 0; i < 8; i++) { iv[8 + i] = (byte) 0xff; } cryptoCodecTest(conf, seed, count, jceCodecClass, jceCodecClass, iv); cryptoCodecTest(conf, seed, count, jceCodecClass, opensslCodecClass, iv); }
@Test(timeout=120000) public void testOpensslAesCtrCryptoCodec() throws Exception { if (!"true".equalsIgnoreCase(System.getProperty("runningWithNative"))) { LOG.warn("Skipping since test was not run with -Pnative flag"); Assume.assumeTrue(false); } if (!NativeCodeLoader.buildSupportsOpenssl()) { LOG.warn("Skipping test since openSSL library not loaded"); Assume.assumeTrue(false); } Assert.assertEquals(null, OpensslCipher.getLoadingFailureReason()); cryptoCodecTest(conf, seed, 0, opensslCodecClass, opensslCodecClass, iv); cryptoCodecTest(conf, seed, count, opensslCodecClass, opensslCodecClass, iv); cryptoCodecTest(conf, seed, count, opensslCodecClass, jceCodecClass, iv); // Overflow test, IV: xx xx xx xx xx xx xx xx ff ff ff ff ff ff ff ff for(int i = 0; i < 8; i++) { iv[8 + i] = (byte) 0xff; } cryptoCodecTest(conf, seed, count, opensslCodecClass, opensslCodecClass, iv); cryptoCodecTest(conf, seed, count, opensslCodecClass, jceCodecClass, iv); }
@Test public void testTestCompression() { assertTrue(CompressionTest.testCompression("NONE")); assertTrue(CompressionTest.testCompression("GZ")); if (NativeCodeLoader.isNativeCodeLoaded()) { nativeCodecTest("LZO", "lzo2", "com.hadoop.compression.lzo.LzoCodec"); nativeCodecTest("LZ4", null, "org.apache.hadoop.io.compress.Lz4Codec"); nativeCodecTest("SNAPPY", "snappy", "org.apache.hadoop.io.compress.SnappyCodec"); } else { // Hadoop nativelib is not available LOG.debug("Native code not loaded"); assertFalse(CompressionTest.testCompression("LZO")); assertFalse(CompressionTest.testCompression("LZ4")); assertFalse(CompressionTest.testCompression("SNAPPY")); } }
@Before public void startUp() throws Exception { Assume.assumeTrue(NativeCodeLoader.isNativeCodeLoaded()); Assume.assumeTrue(NativeRuntime.isNativeLibraryLoaded()); final ScenarioConfiguration conf = new ScenarioConfiguration(); conf.addcombinerConf(); this.fs = FileSystem.get(conf); this.inputpath = TestConstants.NATIVETASK_COMBINER_TEST_INPUTDIR + "/wordcount"; if (!fs.exists(new Path(inputpath))) { new TestInputFile( conf.getInt(TestConstants.NATIVETASK_COMBINER_WORDCOUNT_FILESIZE, 1000000), Text.class.getName(), Text.class.getName(), conf).createSequenceTestFile(inputpath, 1, (byte)('a')); } this.nativeoutputpath = TestConstants.NATIVETASK_COMBINER_TEST_NATIVE_OUTPUTDIR + "/nativewordcount"; this.hadoopoutputpath = TestConstants.NATIVETASK_COMBINER_TEST_NORMAL_OUTPUTDIR + "/normalwordcount"; }
@Before public void startUp() throws Exception { Assume.assumeTrue(NativeCodeLoader.isNativeCodeLoaded()); Assume.assumeTrue(NativeRuntime.isNativeLibraryLoaded()); final ScenarioConfiguration conf = new ScenarioConfiguration(); final FileSystem fs = FileSystem.get(conf); final Path path = new Path(TestConstants.NATIVETASK_COMPRESS_TEST_INPUTDIR); fs.delete(path, true); if (!fs.exists(path)) { new TestInputFile(hadoopConf.getInt( TestConstants.NATIVETASK_COMPRESS_FILESIZE, 100000), Text.class.getName(), Text.class.getName(), conf) .createSequenceTestFile(TestConstants.NATIVETASK_COMPRESS_TEST_INPUTDIR); } fs.close(); }