void exec(String action, File dir, CommandLine cmdLine) { String label = action + ": " + cmdLine; try { DefaultExecutor executor = new DefaultExecutor(); executor.setWorkingDirectory(dir); Map<String, String> environment = EnvironmentUtils.getProcEnvironment(); environment.put("PATH", environment.get("PATH") + ":" + new File(frontendDirectory, "node").getAbsolutePath()); int exitValue = executor.execute(cmdLine, environment); if (exitValue == 0) { getLog().info(label + ": OK"); } else { throw new MojoExecutionException("EXEC FAILURE: " + label); } } catch (RuntimeException re) { throw re; } catch (Exception e) { throw new IllegalStateException("EXEC FAILURE: " + label, e); } }
@SuppressWarnings("rawtypes") public static void execAsync(String display, CommandLine commandline) throws ShellCommandException { log.debug("executing async command: " + commandline); DefaultExecutor exec = new DefaultExecutor(); ExecuteResultHandler handler = new DefaultExecuteResultHandler(); PumpStreamHandler streamHandler = new PumpStreamHandler( new PritingLogOutputStream()); exec.setStreamHandler(streamHandler); try { if (display == null || display.isEmpty()) { exec.execute(commandline, handler); } else { Map env = EnvironmentUtils.getProcEnvironment(); EnvironmentUtils.addVariableToEnvironment(env, "DISPLAY=:" + display); exec.execute(commandline, env, handler); } } catch (Exception e) { throw new ShellCommandException( "An error occured while executing shell command: " + commandline, e); } }
/** * Creates a process with an array of arguments. It seems to be the only * safe way to create a process with arguments containing spaces. The * implementation of {@link Runtime#exec(String, String[], File)} doesn't * appear to process single or double quotes when tokenizing a * command-line. * * @see Runtime#exec(String[], String[], File) */ @Override protected Process launch(CommandLine commandLine, @SuppressWarnings("rawtypes") Map environment, File workingDirectory) throws IOException { if (workingDirectory != null && !workingDirectory.exists()) { throw new IOException(workingDirectory + " doesn't exist."); } String[] envVars = EnvironmentUtils.toStrings(environment); String[] arguments = commandLine.getArguments(); String[] command = new String[arguments.length + 1]; command[0] = commandLine.getExecutable(); System.arraycopy(arguments, 0, command, 1, arguments.length); return Runtime.getRuntime().exec(command, envVars, workingDirectory); }
protected Map<String,String> initialEnvironment() throws ManagedProcessException { try { return EnvironmentUtils.getProcEnvironment(); } catch (IOException e) { throw new ManagedProcessException("Retrieving default environment variables failed", e); } }
protected Map<String, String> setupIPythonEnv() throws IOException { Map<String, String> envs = EnvironmentUtils.getProcEnvironment(); if (envs.containsKey("PYTHONPATH")) { if (additionalPythonPath != null) { envs.put("PYTHONPATH", additionalPythonPath + ":" + envs.get("PYTHONPATH")); } } else { envs.put("PYTHONPATH", additionalPythonPath); } LOGGER.info("PYTHONPATH:" + envs.get("PYTHONPATH")); return envs; }
private Map setupPySparkEnv() throws IOException, InterpreterException { Map env = EnvironmentUtils.getProcEnvironment(); // only set PYTHONPATH in local or yarn-client mode. // yarn-cluster will setup PYTHONPATH automatically. SparkConf conf = getSparkConf(); if (!conf.get("spark.submit.deployMode", "client").equals("cluster")) { if (!env.containsKey("PYTHONPATH")) { env.put("PYTHONPATH", PythonUtils.sparkPythonPath()); } else { env.put("PYTHONPATH", PythonUtils.sparkPythonPath()); } } // get additional class paths when using SPARK_SUBMIT and not using YARN-CLIENT // also, add all packages to PYTHONPATH since there might be transitive dependencies if (SparkInterpreter.useSparkSubmit() && !getSparkInterpreter().isYarnMode()) { String sparkSubmitJars = getSparkConf().get("spark.jars").replace(",", ":"); if (!"".equals(sparkSubmitJars)) { env.put("PYTHONPATH", env.get("PYTHONPATH") + sparkSubmitJars); } } LOGGER.info("PYTHONPATH: " + env.get("PYTHONPATH")); // set PYSPARK_PYTHON if (getSparkConf().contains("spark.pyspark.python")) { env.put("PYSPARK_PYTHON", getSparkConf().get("spark.pyspark.python")); } return env; }
/** * Start R repl * @throws IOException */ public void open() throws IOException, InterpreterException { createRScript(); zeppelinR.put(hashCode(), this); CommandLine cmd = CommandLine.parse(rCmdPath); cmd.addArgument("--no-save"); cmd.addArgument("--no-restore"); cmd.addArgument("-f"); cmd.addArgument(scriptPath); cmd.addArgument("--args"); cmd.addArgument(Integer.toString(hashCode())); cmd.addArgument(Integer.toString(port)); cmd.addArgument(libPath); cmd.addArgument(Integer.toString(sparkVersion.toNumber())); // dump out the R command to facilitate manually running it, e.g. for fault diagnosis purposes logger.debug(cmd.toString()); executor = new DefaultExecutor(); outputStream = new InterpreterOutputStream(logger); input = new PipedOutputStream(); PipedInputStream in = new PipedInputStream(input); PumpStreamHandler streamHandler = new PumpStreamHandler(outputStream, outputStream, in); executor.setWatchdog(new ExecuteWatchdog(ExecuteWatchdog.INFINITE_TIMEOUT)); executor.setStreamHandler(streamHandler); Map env = EnvironmentUtils.getProcEnvironment(); initialOutput = new InterpreterOutput(null); outputStream.setInterpreterOutput(initialOutput); executor.execute(cmd, env, this); rScriptRunning = true; // flush output eval("cat('')"); }
public Map<String, String> getEnvironment() { Map<String, String> env = Collections.emptyMap(); try { env = EnvironmentUtils.getProcEnvironment(); } catch (IOException e) { LOG.error("Unable to get environmental variables", e); } return env; }
/** * Create an environment by merging the current environment and the supplied one. * If the supplied environment is null, null is returned. * @param environment * @return an execution environment */ private static Map<String, String> createEnvironment(Map<String, String> environment) { Map<String, String> result = null; try { result = EnvironmentUtils.getProcEnvironment(); } catch (IOException ex) { throw new ElasticsearchSetupException( "Cannot get the current process environment", ex); } if (environment != null) { result.putAll(environment); } // the elasticsearch start/plugin scripts print warnings if these environment variables are passed // and unsets these. And because the scripts would print a warning, we can't rely on the output :( result.remove("JAVA_TOOL_OPTIONS"); result.remove("JAVA_OPTS"); return result; }
private static Map<String, String> extendEnvironmentWithNodeInPath(NodeManager node) throws IOException { Map<String, String> env = EnvironmentUtils.getProcEnvironment(); if (env.containsKey("PATH")) { String path = env.get("PATH"); env.put("PATH", node.getNodeExecutable().getParent() + File.pathSeparator + path); } else { env.put("PATH", node.getNodeExecutable().getParent()); } return env; }
@Before public void setupExecutors() throws IOException { emptyEnvExecutor = new ShellExecutorHelper(new HashMap<String, String>()); defaultEnvExecutor = new ShellExecutorHelper(EnvironmentUtils.getProcEnvironment()); }
public Process exec(final CommandLine cmd, final Map env) throws IOException { String[] envVar = EnvironmentUtils.toStrings(env); return Runtime.getRuntime().exec(cmd.toStrings(), envVar); }
/** * Launches the given command in a new process, in the given working * directory * * @param cmd * the command line to execute as an array of strings * @param env * the environment to set as an array of strings * @param workingDir * the working directory where the command should run * @throws IOException * probably forwarded from Runtime#exec */ public Process exec(final CommandLine cmd, final Map env, final File workingDir) throws IOException { String[] envVars = EnvironmentUtils.toStrings(env); return Runtime.getRuntime().exec(cmd.toStrings(), envVars, workingDir); }