我们从Python开源项目中,提取了以下44个代码示例,用于说明如何使用subprocess32.check_output()。
def test_should_trigger_on_connect_if_client_connect_valid(server_with_mocks): node_script = ''' module.paths.push('{0}') WebSocket = require('ws') const SubscriptionClient = require('subscriptions-transport-ws').SubscriptionClient new SubscriptionClient('ws://localhost:{1}/socket') '''.format( os.path.join(os.path.dirname(__file__), 'node_modules'), TEST_PORT) try: subprocess.check_output( ['node', '-e', node_script], stderr=subprocess.STDOUT, timeout=.2) except: mock = server_with_mocks.get_nowait() assert mock.name == 'on_connect' mock.assert_called_once()
def test_should_trigger_on_connect_with_correct_cxn_params(server_with_mocks): node_script = ''' module.paths.push('{0}') WebSocket = require('ws') const SubscriptionClient = require('subscriptions-transport-ws').SubscriptionClient const connectionParams = {{test: true}} new SubscriptionClient('ws://localhost:{1}/socket', {{ connectionParams, }}) '''.format( os.path.join(os.path.dirname(__file__), 'node_modules'), TEST_PORT) try: subprocess.check_output( ['node', '-e', node_script], stderr=subprocess.STDOUT, timeout=.2) except: mock = server_with_mocks.get_nowait() assert mock.name == 'on_connect' mock.assert_called_once() mock.assert_called_with({'test': True})
def _services(ip, timeout, *extra_args): args = ['ssh', ip, '-t'] if extra_args: args += list(extra_args) args += ['-o', 'StrictHostKeyChecking no', '-o', 'ConnectTimeout %d' % timeout, '-o', 'BatchMode yes', '--', 'cat', '/etc/aeriscloud.d/*'] try: return [ dict(zip( ['name', 'port', 'path'], service.strip().split(',') )) for service in check_output(args).split('\n') if service ] except CalledProcessError: return []
def _search_variables(search_path, variable): files = set() cmd = "grep -rI '%s = ' %s" % (variable, quote(search_path)) try: grep = subprocess32.check_output(cmd, shell=True) except subprocess32.CalledProcessError: return [] for line in grep.split('\n'): if not line.strip(): continue filename = line[:line.find(':')].strip() if filename.startswith('.'): continue files.add(filename) return files
def get_url(machine_name): """Given a docker machine, retrieve the URL for this machine. This is the DOCKER_HOST env variable from running `docker-machine env {name}` :param str machine_name: the name of the docker machine :rtype: str :returns: machine URL """ dir_path = _machine_path(machine_name) if not os.path.exists(dir_path): raise MachineNotFoundError env_text = check_output( ['env', machine_name]) env = parse_shell_for_exports(env_text) return env['DOCKER_HOST']
def do_run(cmd): try: cwd = os.getcwd() if inherit_cwd else None if not async: if stdin: return subprocess.check_output(cmd, shell=True, stderr=stderr, stdin=subprocess.PIPE, env=env_dict, cwd=cwd) output = subprocess.check_output(cmd, shell=True, stderr=stderr, env=env_dict, cwd=cwd) return output.decode(DEFAULT_ENCODING) # subprocess.Popen is not thread-safe, hence use a mutex here.. try: mutex_popen.acquire() stdin_arg = subprocess.PIPE if stdin else None stdout_arg = open(outfile, 'wb') if isinstance(outfile, six.string_types) else outfile process = subprocess.Popen(cmd, shell=True, stdin=stdin_arg, bufsize=-1, stderr=stderr, stdout=stdout_arg, env=env_dict, cwd=cwd) return process finally: mutex_popen.release() except subprocess.CalledProcessError as e: if print_error: print("ERROR: '%s': %s" % (cmd, e.output)) raise e
def test_trigger_on_disconnect_when_client_disconnects(server_with_mocks): node_script = ''' module.paths.push('{0}') WebSocket = require('ws') const SubscriptionClient = require('subscriptions-transport-ws').SubscriptionClient const client = new SubscriptionClient('ws://localhost:{1}/socket') client.client.close() '''.format( os.path.join(os.path.dirname(__file__), 'node_modules'), TEST_PORT) subprocess.check_output(['node', '-e', node_script]) mock = server_with_mocks.get_nowait() assert mock.name == 'on_disconnect' mock.assert_called_once()
def test_should_call_unsubscribe_when_client_closes_cxn(server_with_mocks): node_script = ''' module.paths.push('{0}') WebSocket = require('ws') const SubscriptionClient = require('subscriptions-transport-ws').SubscriptionClient const client = new SubscriptionClient('ws://localhost:{1}/socket') client.subscribe({{ query: `subscription useInfo($id: String) {{ user(id: $id) {{ id name }} }}`, operationName: 'useInfo', variables: {{ id: 3, }}, }}, function (error, result) {{ // nothing }} ) setTimeout(() => {{ client.client.close() }}, 500) '''.format( os.path.join(os.path.dirname(__file__), 'node_modules'), TEST_PORT) try: subprocess.check_output( ['node', '-e', node_script], stderr=subprocess.STDOUT, timeout=1) except: while True: mock = server_with_mocks.get_nowait() if mock.name == 'on_unsubscribe': mock.assert_called_once() break
def test_should_trigger_on_subscribe_when_client_subscribes(server_with_mocks): node_script = ''' module.paths.push('{0}') WebSocket = require('ws') const SubscriptionClient = require('subscriptions-transport-ws').SubscriptionClient const client = new SubscriptionClient('ws://localhost:{1}/socket') client.subscribe({{ query: `subscription useInfo($id: String) {{ user(id: $id) {{ id name }} }}`, operationName: 'useInfo', variables: {{ id: 3, }}, }}, function (error, result) {{ // nothing }}) '''.format( os.path.join(os.path.dirname(__file__), 'node_modules'), TEST_PORT) try: subprocess.check_output( ['node', '-e', node_script], stderr=subprocess.STDOUT, timeout=.2) except: while True: mock = server_with_mocks.get_nowait() if mock.name == 'on_subscribe': mock.assert_called_once() break
def test_passes_through_websocket_request_to_on_subscribe(server): node_script = ''' module.paths.push('{0}') WebSocket = require('ws') const SubscriptionClient = require('subscriptions-transport-ws').SubscriptionClient const client = new SubscriptionClient('ws://localhost:{1}/socket') client.subscribe({{ query: `subscription context {{ context }}`, variables: {{}}, }}, (error, result) => {{ if (error) {{ console.log(JSON.stringify(error)); }} }} ); '''.format( os.path.join(os.path.dirname(__file__), 'node_modules'), TEST_PORT) try: subprocess.check_output( ['node', '-e', node_script], stderr=subprocess.STDOUT, timeout=.2) except: while True: mock = server.get_nowait() if mock.name == 'on_subscribe': mock.assert_called_once() mock.assert_called_with_contains('websocket') break
def check_output(cmds, shell=False): try: output = subprocess.check_output(cmds, stderr=subprocess.STDOUT, shell=shell) return output except subprocess.CalledProcessError: # logger.warn('Failed to run command: %s', ' '.join(cmds)) # logger.warn('Error output:\n%s', e.output) raise
def idevice(name, *args): exec_name = 'idevice' + name exec_path = look_exec(exec_name) if not exec_path: raise EnvironmentError('Necessary binary ("%s") not found.' % exec_name) cmds = [exec_path] + list(args) try: output = subprocess.check_output(cmds, stderr=subprocess.STDOUT, shell=False) return output except subprocess.CalledProcessError: raise
def _init_instruments(self, bundle_id): self._bootstrap = os.path.join(__dir__, 'bootstrap.sh') self._bundle_id = bundle_id self._env.update({'UDID': self.udid, 'BUNDLE_ID': self._bundle_id}) # 1. remove pipe # subprocess.check_output([self._bootstrap, 'reset'], env=self._env) # 2. start instruments self._proc = subprocess.Popen([self._bootstrap, 'instruments'], env=self._env, stdout=subprocess.PIPE) self.sleep(5.0) self._wait_instruments()
def _run(self, code): # print self._proc.poll() # print code encoded_code = json.dumps({'command': code}) output = subprocess.check_output([self._bootstrap, 'run', encoded_code], env=self._env) # print output try: return json.loads(output) except: print 'unknown json output:', output return output
def _run_nowait(self, code): ''' TODO: change to no wait ''' print self._proc.poll() encoded_code = json.dumps({'command': code, 'nowait': True}) output = subprocess.check_output([self._bootstrap, 'run', '--nowait', encoded_code], env=self._env) return output
def devices(self): '''get a dict of attached devices. key is the device serial, value is device name.''' out = self.run_cmd('devices') #subprocess.check_output([self.adb_path(), 'devices']).decode("utf-8") if 'adb server is out of date' in out: out = self.run_cmd('devices') match = "List of devices attached" index = out.find(match) if index < 0: raise EnvironmentError("adb is not working.") return dict(re.findall(r'([^\s]+)\t(\w+)', out))
def _which(editor): try: editor_path = subprocess.check_output(['which', editor], stderr=subprocess.STDOUT).strip() if six.PY3: editor_path = editor_path.decode() except subprocess.CalledProcessError: editor_path = None return editor_path
def devices(self): '''get a dict of attached devices. key is the device serial, value is device name.''' out = self.run_cmd('devices') #subprocess.check_output([self.adb_path(), 'devices']).decode("utf-8") match = "List of devices attached" index = out.find(match) if index < 0: raise EnvironmentError("adb is not working.") return dict([s.split("\t") for s in out[index + len(match):].strip().splitlines() if s.strip() and not s.strip().startswith('*')])
def parent_identical_or_crashes(self, crash, parent): # Base names cbasename = os.path.basename(crash) pbasename = os.path.basename(parent) ## Filter queue filenames with sig info if self.find_crash_parent_regex.match(pbasename): self.logr("Parent ({}) looks like crashing input!".format(pbasename)) return True try: diff_out = subprocess.check_output("diff -q {} {}".format(crash, parent), stderr=subprocess.STDOUT, shell=True) except Exception, e: diff_out = e.output if not diff_out.rstrip("\n"): self.logr("Crash file ({}) and parent ({}) are identical!" .format(cbasename, pbasename)) return True cov_cmd = self.args.coverage_cmd.replace('AFL_FILE', parent) ### Dry-run to make sure parent doesn't cause a crash if self.does_dry_run_throw_error(cov_cmd): self.logr("Parent ({}) crashes binary!".format(pbasename)) return True return False
def does_dry_run_throw_error(self, cmd): try: out = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True) except Exception, e: return (e.returncode > 128) return False
def check_output(cmd, *args, **kwargs): """This is like subprocess.check_output, except cmd is prefixed with docker-machine --storage-path STORAGE_PATH """ args, kwargs = _process_arguments(cmd, *args, **kwargs) return subprocess.check_output(*args, **kwargs)
def get_version(self): describe_byte_string = subprocess.check_output( ['git', 'describe', '--match', 'v*.*.*']) version_string = re.findall(self.version_pat, describe_byte_string)[0] return version.parse(version_string)
def english_g2p(self, text): text = self.normalize(text) try: arpa_text = subprocess.check_output(['t2p', '"{}"'.format(text)]) arpa_text = arpa_text.decode('utf-8') except OSError: logging.warning('t2p (from flite) is not installed.') arpa_text = '' except subprocess.CalledProcessError: logging.warning('Non-zero exit status from t2p.') arpa_text = '' return self.arpa_to_ipa(arpa_text)
def english_g2p(self, text): text = self.normalize(text).lower() try: arpa_text = subprocess.check_output(['lex_lookup', text]) arpa_text = arpa_text.decode('utf-8') except OSError: logging.warning('lex_lookup (from flite) is not installed.') arpa_text = '' except subprocess.CalledProcessError: logging.warning('Non-zero exit status from lex_lookup.') arpa_text = '' return self.arpa_to_ipa(arpa_text)
def devices(self): '''get a dict of attached devices. key is the device serial, value is device name.''' out = self.run_cmd('devices') #subprocess.check_output([self.adb_path(), 'devices']).decode("utf-8") if 'adb server is out of date' in out: out = self.run_cmd('devices') match = "List of devices attached" index = out.find(match) if index < 0: raise EnvironmentError("adb is not working.") return dict([s.split("\t") for s in out[index + len(match):].strip().splitlines() if s.strip() and not s.strip().startswith('*')])
def run_nmap(net): try: out = subprocess.check_output(["nmap", "-oX", "-" , "-R", "-p", "22-443", "-sV" , net]) except CalledProcessError: print("Error in caller\n") exit(1) return out
def parent_identical_or_crashes(self, crash, parent): # Base names cbasename = os.path.basename(crash) pbasename = os.path.basename(parent) ## Filter queue filenames with sig info if self.find_crash_parent_regex.match(pbasename): self.logr("Parent ({}) looks like crashing input!".format(pbasename)) return True try: diff_out = subprocess.check_output("diff -q {} {}".format(crash, parent), stderr=subprocess.STDOUT, shell=True) except Exception, e: diff_out = e.output if not diff_out.rstrip("\n"): self.logr("Crash file ({}) and parent ({}) are identical!" .format(cbasename, pbasename)) return True cov_cmd = self.coverage_cmd.replace('AFL_FILE', parent) ### Dry-run to make sure parent doesn't cause a crash if self.does_dry_run_throw_error(cov_cmd): self.logr("Parent ({}) crashes binary!".format(pbasename)) return True return False
def does_dry_run_throw_error(self, cmd): env = os.environ.copy() if self.sanitizer == 'asan': spectrum_asan_options(env) try: out = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True, env=env) except Exception, e: # OR condition is bug fix for compute shell returning negative instead of positive code return (e.returncode > 128 or e.returncode < 0) return False
def subproc_call(cmd, timeout=None): try: output = subprocess.check_output( cmd, stderr=subprocess.STDOUT, shell=True, timeout=timeout) return output except subprocess.TimeoutExpired as e: logger.warn("Command timeout!") logger.warn(e.output) except subprocess.CalledProcessError as e: logger.warn("Commnad failed: {}".format(e.returncode)) logger.warn(e.output)
def get_video_meta(self, video_path): ''' get video meta information :param video_path: the absolute path of video file :return: a dictionary { 'width': integer, 'height': integer, 'duration': integer (millisecond) } if an error occurred, this method will return None ''' try: output = subprocess.check_output([ 'ffprobe', '-v', 'error', '-show_entries', 'format=duration:stream=width:stream=height', '-select_streams', 'v:0', '-of', 'json', b'{0}'.format(video_path.encode('utf-8')) ]) meta = json.loads(output) result = {} if 'format' in meta and 'duration' in meta['format']: result['duration'] = int(float(meta['format']['duration']) * 1000) if 'streams' in meta and len(meta['streams']) and 'width' in meta['streams'][0] and 'height' in meta['streams'][0]: result['width'] = meta['streams'][0]['width'] result['height'] = meta['streams'][0]['height'] return result except subprocess.CalledProcessError as error: logger.error(error) return None
def initdb(datadir, prefix='', echo=False): init_args = [ os.path.join(prefix, 'initdb'), '-D', datadir, '-U', 'postgres', '--auth=trust', ] output = subprocess.check_output( init_args, close_fds=True, stderr=subprocess.STDOUT, ) if echo: print(output.decode('utf-8'))
def get_parent(self, filepath, isCrash=True): dirname, basename = os.path.split(filepath) if isCrash: match = self.find_crash_parent_regex.match(basename) # (_, _, session, _, syncname, src_id) = match.groups() (_, _, _, session, _, syncname, src_id) = match.groups() searchdir = self.args.afl_fuzzing_dir # if syncname: # searchdir += '/' + syncname + '/queue' if session: searchdir += '/' + session + '/queue' else: assert False, "Parent of crash file {} cannot be found".format(basename) else: match = self.find_queue_parent_regex.match(basename) if not match: self.logr("No parent could be found for {}".format(basename)) return None (_, syncname, src_id) = match.groups() searchdir = dirname if syncname: searchdir += '/../../' + syncname + '/queue' search_cmd = "find " + searchdir + " -maxdepth 1" + " -name id:" + src_id + "*" parent_fname = subprocess.check_output(search_cmd, stderr=subprocess.STDOUT, shell=True) parent_list = filter(None, parent_fname.split("\n")) if (len(parent_list) == 0): self.logr("No parents found for file {}".format(basename)) return None if (len(parent_list) > 1): self.logr("Multiple parents found for file {}. Selecting first.".format(basename)) return os.path.abspath(parent_list[0].rstrip("\n"))
def run_and_wait(args, timeout=None, logfile=None, append=False, env=None, cwd=None): """Run a command in a subprocess, then wait for it to finish. Parameters ---------- args : string or list[string] the command to run. Should be either a command string or a list of command string and its arguments as strings. A list is preferred; see Python subprocess documentation. timeout : float or None the amount of time to wait for the command to finish, in seconds. If None, waits indefinitely. logfile : string or None If given, stdout and stderr will be written to this file. append : bool True to append to the logfile. Defaults to False. env : dict[string, any] If not None, environment variables of the subprocess will be set according to this dictionary instead of inheriting from current process. cwd : string or None The current working directory of the subprocess. Returns ------- output : string the standard output and standard error from the command. Raises ------ subprocess.CalledProcessError if any error occurred in the subprocess. """ output = subprocess.check_output(args, stderr=subprocess.STDOUT, timeout=timeout, env=env, cwd=cwd) output = output.decode(encoding=bag_encoding, errors=bag_codec_error) if logfile is not None: write_file(logfile, output, append=append) return output
def get_parent(self, filepath, isCrash=True): dirname, basename = os.path.split(filepath) if isCrash: match = self.find_crash_parent_regex.match(basename) # (_, _, session, _, syncname, src_id) = match.groups() (_, _, session, _, syncname, src_id) = match.groups() searchdir = self.afl_fuzzing_dir # if syncname: # searchdir += '/' + syncname + '/queue' if session: searchdir += '/' + session + '/queue' else: assert False, "Parent of crash file {} cannot be found".format(basename) else: match = self.find_queue_parent_regex.match(basename) if not match: self.logr("No parent could be found for {}".format(basename)) return None (_, syncname, src_id) = match.groups() searchdir = dirname if syncname: searchdir += '/../../' + syncname + '/queue' search_cmd = "find " + searchdir + " -maxdepth 1" + " -name id:" + src_id + "*" parent_fname = subprocess.check_output(search_cmd, stderr=subprocess.STDOUT, shell=True) parent_list = filter(None, parent_fname.split("\n")) if (len(parent_list) == 0): self.logr("No parents found for file {}".format(basename)) return None if (len(parent_list) > 1): self.logr("Multiple parents found for file {}. Selecting first.".format(basename)) return os.path.abspath(parent_list[0].rstrip("\n"))
def generate_processor_script(events_file, log_file=None): script_file = os.path.join(tempfile.gettempdir(), 'kclipy.%s.processor.py' % short_uid()) if log_file: log_file = "'%s'" % log_file else: log_file = 'None' content = """#!/usr/bin/env python import os, sys, glob, json, socket, time, logging, tempfile import subprocess32 as subprocess logging.basicConfig(level=logging.INFO) for path in glob.glob('%s/lib/python*/site-packages'): sys.path.insert(0, path) sys.path.insert(0, '%s') from localstack.config import DEFAULT_ENCODING from localstack.utils.kinesis import kinesis_connector from localstack.utils.common import timestamp events_file = '%s' log_file = %s error_log = os.path.join(tempfile.gettempdir(), 'kclipy.error.log') if __name__ == '__main__': sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) num_tries = 3 sleep_time = 2 error = None for i in range(0, num_tries): try: sock.connect(events_file) error = None break except Exception as e: error = e if i < num_tries: msg = '%%s: Unable to connect to UNIX socket. Retrying.' %% timestamp() subprocess.check_output('echo "%%s" >> %%s' %% (msg, error_log), shell=True) time.sleep(sleep_time) if error: print("WARN: Unable to connect to UNIX socket after retrying: %%s" %% error) raise error def receive_msg(records, checkpointer, shard_id): try: # records is a list of amazon_kclpy.messages.Record objects -> convert to JSON records_dicts = [j._json_dict for j in records] message_to_send = {'shard_id': shard_id, 'records': records_dicts} string_to_send = '%%s\\n' %% json.dumps(message_to_send) bytes_to_send = string_to_send.encode(DEFAULT_ENCODING) sock.send(bytes_to_send) except Exception as e: msg = "WARN: Unable to forward event: %%s" %% e print(msg) subprocess.check_output('echo "%%s" >> %%s' %% (msg, error_log), shell=True) kinesis_connector.KinesisProcessor.run_processor(log_file=log_file, processor_func=receive_msg) """ % (LOCALSTACK_VENV_FOLDER, LOCALSTACK_ROOT_FOLDER, events_file, log_file) save_file(script_file, content) run('chmod +x %s' % script_file) TMP_FILES.append(script_file) return script_file