/** * Mark a path to be deleted on JVM shutdown. * * @param f the existing path to delete. * * @return true if deleteOnExit is successful, otherwise false. * * @throws AccessControlException If access is denied * @throws UnsupportedFileSystemException If file system for <code>f</code> is * not supported * @throws IOException If an I/O error occurred * * Exceptions applicable to file systems accessed over RPC: * @throws RpcClientException If an exception occurred in the RPC client * @throws RpcServerException If an exception occurred in the RPC server * @throws UnexpectedServerException If server implementation throws * undeclared exception to RPC server */ public boolean deleteOnExit(Path f) throws AccessControlException, IOException { if (!this.util().exists(f)) { return false; } synchronized (DELETE_ON_EXIT) { if (DELETE_ON_EXIT.isEmpty()) { ShutdownHookManager.get().addShutdownHook(FINALIZER, SHUTDOWN_HOOK_PRIORITY); } Set<Path> set = DELETE_ON_EXIT.get(this); if (set == null) { set = new TreeSet<Path>(); DELETE_ON_EXIT.put(this, set); } set.add(f); } return true; }
public static void main(String[] args) { Thread.setDefaultUncaughtExceptionHandler(new YarnUncaughtExceptionHandler()); StringUtils.startupShutdownMessage(SharedCacheManager.class, args, LOG); try { Configuration conf = new YarnConfiguration(); SharedCacheManager sharedCacheManager = new SharedCacheManager(); ShutdownHookManager.get().addShutdownHook( new CompositeServiceShutdownHook(sharedCacheManager), SHUTDOWN_HOOK_PRIORITY); sharedCacheManager.init(conf); sharedCacheManager.start(); } catch (Throwable t) { LOG.fatal("Error starting SharedCacheManager", t); System.exit(-1); } }
protected void shutDown() { new Thread() { @Override public void run() { try { NodeManager.this.stop(); } catch (Throwable t) { LOG.error("Error while shutting down NodeManager", t); } finally { if (shouldExitOnShutdownEvent && !ShutdownHookManager.get().isShutdownInProgress()) { ExitUtil.terminate(-1); } } } }.start(); }
private void initAndStartNodeManager(Configuration conf, boolean hasToReboot) { try { // Remove the old hook if we are rebooting. if (hasToReboot && null != nodeManagerShutdownHook) { ShutdownHookManager.get().removeShutdownHook(nodeManagerShutdownHook); } nodeManagerShutdownHook = new CompositeServiceShutdownHook(this); ShutdownHookManager.get().addShutdownHook(nodeManagerShutdownHook, SHUTDOWN_HOOK_PRIORITY); // System exit should be called only when NodeManager is instantiated from // main() funtion this.shouldExitOnShutdownEvent = true; this.init(conf); this.start(); } catch (Throwable t) { LOG.fatal("Error starting NodeManager", t); System.exit(-1); } }
public static void main(String argv[]) { Thread.setDefaultUncaughtExceptionHandler(new YarnUncaughtExceptionHandler()); StringUtils.startupShutdownMessage(ResourceManager.class, argv, LOG); try { Configuration conf = new YarnConfiguration(); GenericOptionsParser hParser = new GenericOptionsParser(conf, argv); argv = hParser.getRemainingArgs(); // If -format-state-store, then delete RMStateStore; else startup normally if (argv.length == 1 && argv[0].equals("-format-state-store")) { deleteRMStateStore(conf); } else { ResourceManager resourceManager = new ResourceManager(); ShutdownHookManager.get().addShutdownHook( new CompositeServiceShutdownHook(resourceManager), SHUTDOWN_HOOK_PRIORITY); resourceManager.init(conf); resourceManager.start(); } } catch (Throwable t) { LOG.fatal("Error starting ResourceManager", t); System.exit(-1); } }
static ApplicationHistoryServer launchAppHistoryServer(String[] args) { Thread .setDefaultUncaughtExceptionHandler(new YarnUncaughtExceptionHandler()); StringUtils.startupShutdownMessage(ApplicationHistoryServer.class, args, LOG); ApplicationHistoryServer appHistoryServer = null; try { appHistoryServer = new ApplicationHistoryServer(); ShutdownHookManager.get().addShutdownHook( new CompositeServiceShutdownHook(appHistoryServer), SHUTDOWN_HOOK_PRIORITY); YarnConfiguration conf = new YarnConfiguration(); new GenericOptionsParser(conf, args); appHistoryServer.init(conf); appHistoryServer.start(); } catch (Throwable t) { LOG.fatal("Error starting ApplicationHistoryServer", t); ExitUtil.terminate(-1, "Error starting ApplicationHistoryServer"); } return appHistoryServer; }
/** * Report a fatal error to the parent (task) tracker. */ protected void reportFatalError(TaskAttemptID id, Throwable throwable, String logMsg) { LOG.fatal(logMsg); if (ShutdownHookManager.get().isShutdownInProgress()) { return; } Throwable tCause = throwable.getCause(); String cause = tCause == null ? StringUtils.stringifyException(throwable) : StringUtils.stringifyException(tCause); try { umbilical.fatalError(id, cause); } catch (IOException ioe) { LOG.fatal("Failed to contact the tasktracker", ioe); System.exit(-1); } }
static JobHistoryServer launchJobHistoryServer(String[] args) { Thread. setDefaultUncaughtExceptionHandler(new YarnUncaughtExceptionHandler()); StringUtils.startupShutdownMessage(JobHistoryServer.class, args, LOG); JobHistoryServer jobHistoryServer = null; try { jobHistoryServer = new JobHistoryServer(); ShutdownHookManager.get().addShutdownHook( new CompositeServiceShutdownHook(jobHistoryServer), SHUTDOWN_HOOK_PRIORITY); YarnConfiguration conf = new YarnConfiguration(new JobConf()); new GenericOptionsParser(conf, args); jobHistoryServer.init(conf); jobHistoryServer.start(); } catch (Throwable t) { LOG.fatal("Error starting JobHistoryServer", t); ExitUtil.terminate(-1, "Error starting JobHistoryServer"); } return jobHistoryServer; }
DFSClientCache(NfsConfiguration config, int clientCache) { this.config = config; this.clientCache = CacheBuilder.newBuilder() .maximumSize(clientCache) .removalListener(clientRemovalListener()) .build(clientLoader()); this.inputstreamCache = CacheBuilder.newBuilder() .maximumSize(DEFAULT_DFS_INPUTSTREAM_CACHE_SIZE) .expireAfterAccess(DEFAULT_DFS_INPUTSTREAM_CACHE_TTL, TimeUnit.SECONDS) .removalListener(inputStreamRemovalListener()) .build(inputStreamLoader()); ShutdownHookManager.get().addShutdownHook(new CacheFinalizer(), SHUTDOWN_HOOK_PRIORITY); }
/** * Main function of the DistCp program. Parses the input arguments (via OptionsParser), * and invokes the DistCp::run() method, via the ToolRunner. * @param argv Command-line arguments sent to DistCp. */ public static void main(String argv[]) { int exitCode; try { DistCp distCp = new DistCp(); Cleanup CLEANUP = new Cleanup(distCp); ShutdownHookManager.get().addShutdownHook(CLEANUP, SHUTDOWN_HOOK_PRIORITY); exitCode = ToolRunner.run(getDefaultConf(), distCp, argv); } catch (Exception e) { LOG.error("Couldn't complete DistCp operation: ", e); exitCode = DistCpConstants.UNKNOWN_ERROR; } System.exit(exitCode); }
public static SpanReceiverHost get(Configuration conf, String confPrefix) { synchronized (SpanReceiverHost.class) { SpanReceiverHost host = hosts.get(confPrefix); if (host != null) { return host; } final SpanReceiverHost newHost = new SpanReceiverHost(confPrefix); newHost.loadSpanReceivers(conf); ShutdownHookManager.get().addShutdownHook(new Runnable() { public void run() { newHost.closeReceivers(); } }, 0); hosts.put(confPrefix, newHost); return newHost; } }
public static void main(String[] args) { try { Configuration conf = new YarnConfiguration(); String appId = args[0]; String appUser = args[1]; HPCLogAggregateHandler aggregateHandler = new HPCLogAggregateHandler( appId, appUser); initAndStartAggregation(conf, appUser, aggregateHandler); ShutdownHookManager.get().addShutdownHook( new CompositeServiceShutdownHook(aggregateHandler), SHUTDOWN_HOOK_PRIORITY); } catch (Throwable t) { LOG.fatal("Error starting Log Aggregation", t); ExitUtil.terminate(1, t); } }
@Override public void stop(BundleContext context) throws Exception { // There's problem inside OSGi when framwork is being shutdown // hadoop.fs code registers some JVM shutdown hooks throughout the code and this ordered // list of hooks is run in shutdown thread. // At that time bundle class loader / bundle wiring is no longer valid (bundle is stopped) // so ShutdownHookManager can't load additional classes. But there are some inner classes // loaded when iterating over registered hadoop shutdown hooks. // Let's explicitely load these inner classes when bundle is stopped, as there's last chance // to use valid bundle class loader. // This is based on the knowledge of what's contained in SMX bundle // org.apache.servicemix.bundles.hadoop-client-*.jar // the above is just a warning that hadopp may have some quirks when running inside OSGi ClassLoader hadoopCl = ShutdownHookManager.class.getClassLoader(); if (hadoopCl != null) { String shm = ShutdownHookManager.class.getName(); hadoopCl.loadClass(shm + "$1"); hadoopCl.loadClass(shm + "$2"); hadoopCl.loadClass(shm + "$HookEntry"); } }
private void initAndStartNodeManager(Configuration conf, boolean hasToReboot) { try { // Remove the old hook if we are rebooting. if (hasToReboot && null != nodeManagerShutdownHook) { ShutdownHookManager.get().removeShutdownHook(nodeManagerShutdownHook); } nodeManagerShutdownHook = new CompositeServiceShutdownHook(this); ShutdownHookManager.get().addShutdownHook(nodeManagerShutdownHook, SHUTDOWN_HOOK_PRIORITY); this.init(conf); this.start(); } catch (Throwable t) { LOG.fatal("Error starting NodeManager", t); System.exit(-1); } }
public static void main(String argv[]) { Thread.setDefaultUncaughtExceptionHandler(new YarnUncaughtExceptionHandler()); StringUtils.startupShutdownMessage(ResourceManager.class, argv, LOG); try { Configuration conf = new YarnConfiguration(); // If -format-state-store, then delete RMStateStore; else startup normally if (argv.length == 1 && argv[0].equals("-format-state-store")) { deleteRMStateStore(conf); } else { ResourceManager resourceManager = new ResourceManager(); ShutdownHookManager.get().addShutdownHook( new CompositeServiceShutdownHook(resourceManager), SHUTDOWN_HOOK_PRIORITY); resourceManager.init(conf); resourceManager.start(); } } catch (Throwable t) { LOG.fatal("Error starting ResourceManager", t); System.exit(-1); } }
static ApplicationHistoryServer launchAppHistoryServer(String[] args) { Thread .setDefaultUncaughtExceptionHandler(new YarnUncaughtExceptionHandler()); StringUtils.startupShutdownMessage(ApplicationHistoryServer.class, args, LOG); ApplicationHistoryServer appHistoryServer = null; try { appHistoryServer = new ApplicationHistoryServer(); ShutdownHookManager.get().addShutdownHook( new CompositeServiceShutdownHook(appHistoryServer), SHUTDOWN_HOOK_PRIORITY); YarnConfiguration conf = new YarnConfiguration(); appHistoryServer.init(conf); appHistoryServer.start(); } catch (Throwable t) { LOG.fatal("Error starting ApplicationHistoryServer", t); ExitUtil.terminate(-1, "Error starting ApplicationHistoryServer"); } return appHistoryServer; }
static JobHistoryServer launchJobHistoryServer(String[] args) { Thread. setDefaultUncaughtExceptionHandler(new YarnUncaughtExceptionHandler()); StringUtils.startupShutdownMessage(JobHistoryServer.class, args, LOG); JobHistoryServer jobHistoryServer = null; try { jobHistoryServer = new JobHistoryServer(); ShutdownHookManager.get().addShutdownHook( new CompositeServiceShutdownHook(jobHistoryServer), SHUTDOWN_HOOK_PRIORITY); YarnConfiguration conf = new YarnConfiguration(new JobConf()); jobHistoryServer.init(conf); jobHistoryServer.start(); } catch (Throwable t) { LOG.fatal("Error starting JobHistoryServer", t); ExitUtil.terminate(-1, "Error starting JobHistoryServer"); } return jobHistoryServer; }