我们从Python开源项目中,提取了以下20个代码示例,用于说明如何使用subprocess32.check_call()。
def update(name): """Update the addon server. """ echo_heading('Retrieving addons config.', marker='-', marker_color='magenta') database = store.get('addon__database_container_name', default=False) if database is False: raise click.exceptions.ClickException('Addons config not found. Was an addon server created with `{} addons create`?'.format(settings.APP_NAME)) click.echo('Done.') project_path = get_project_path() echo_heading('Generating docker-compose file.', marker='-', marker_color='magenta') _generate_compose_file(project_path, database) click.echo('Done.') echo_heading('Updating addons proxy.', marker='-', marker_color='magenta') env_text = docker_machine.check_output(['env', name]) env = os.environ.copy() env.update(utils.parse_shell_for_exports(env_text)) subprocess.check_call(['docker-compose', '-f', os.path.join( project_path, 'proxy/docker-compose.prod.yml'), '-p', settings.ADDONS_COMPOSE_PROJECT_NAME, 'build'], env=env) subprocess.check_call(['docker-compose', '-f', os.path.join( project_path, 'proxy/docker-compose.prod.yml'), '-p', settings.ADDONS_COMPOSE_PROJECT_NAME, 'up', '-d'], env=env)
def write_potcar(self, inputdict, dst): """ Concatenates multiple paw files into a POTCAR :param inputdict: required by baseclass :param dst: absolute path of the file to write to """ import subprocess32 as sp catcom = ['cat'] # ~ structure = inputdict['structure'] # ~ structure = self.inp.structure # order the symbols according to order given in structure if 'elements' not in self.attrs(): self._prestore() for kind in self.elements: paw = inputdict[self._get_paw_linkname(kind)] catcom.append(paw.get_abs_path('POTCAR')) # cat the pawdata nodes into the file with open(dst, 'w') as potcar_f: sp.check_call(catcom, stdout=potcar_f)
def up_node1(): subproc.check_call(['vagrant', 'destroy', '-f', 'node1']) subproc.check_call(['vagrant', 'up', 'node1', '--no-provision']) yield "node1 is ready" print("Destroying node1...") subproc.call(['vagrant', 'destroy', '-f', 'node1']) print("Node1 is destroyed.")
def up_node2(): subproc.check_call(['vagrant', 'destroy', '-f', 'node2']) subproc.check_call(['vagrant', 'up', 'node2']) yield "node2 is ready" print("Destroying node2...") subproc.call(['vagrant', 'destroy', '-f', 'node2']) print("Node2 is destroyed.")
def up_node3(): subproc.check_call(['vagrant', 'destroy', '-f', 'node3']) subproc.check_call(['vagrant', 'up', 'node3']) yield "node3 is ready" print("Destroying node3...") subproc.call(['vagrant', 'destroy', '-f', 'node3']) print("Node3 is destroyed.")
def bootstrap(up_node1): subproc.check_call([ 'vagrant', 'ssh', 'node1', '-c', 'sudo /vagrant/bootstrap --pypi-mirror -m https://l2ohopf9.mirror.aliyuncs.com -r docker.io/laincloud --vip={}'. format(CONFIG.vip) ])
def prepare_demo_images(bootstrap): subproc.check_call([ 'vagrant', 'ssh', 'node1', '-c', 'sudo sh /vagrant/bootstrap_test/prepare_demo_images.sh' ])
def check_call(cmd, *args, **kwargs): """This is like subprocess.check_call, except cmd is prefixed with docker-machine --storage-path STORAGE_PATH """ args, kwargs = _process_arguments(cmd, *args, **kwargs) return subprocess.check_call(*args, **kwargs)
def _install_deisctl(): script = requests.get(settings.DEISCTL_INSTALL_URL).text subprocess.check_call(['bash', '-c', script, 'install.sh', '1.12.3', private_dir.private_dir_path(settings.APP_NAME)]) os.chmod(path_utils.executable_path('deisctl'), stat.S_IRWXU)
def _install_deis(): script = requests.get(settings.DEIS_INSTALL_URL).text with contextmanagers.chdir(private_dir.private_dir_path(settings.APP_NAME)): subprocess.check_call(['bash', '-c', script, 'install.sh', '1.12.3']) os.chmod(path_utils.executable_path('deis'), stat.S_IRWXU)
def update(name): """Update the main TigerHost server. This also updates the documentation. """ echo_heading('Retrieving server config.', marker='-', marker_color='magenta') project_path = get_project_path() database = store.get('main__database_url') secret = store.get('main__django_secret') addon_name = store.get('main__addon_name') if database is None or secret is None or addon_name is None: raise click.exceptions.ClickException('Server config not found. Was a TigerHost server created with `{} main create`?'.format(settings.APP_NAME)) click.echo('Done.') echo_heading('Making sure addon machine exists.', marker='-', marker_color='magenta') addon_docker_host = docker_machine.get_url(addon_name) click.echo('Done.') echo_heading('Copying addon machine credentials.', marker='-', marker_color='magenta') target_path = os.path.join(project_path, 'web/credentials') if not os.path.exists(target_path): os.mkdir(target_path) docker_machine.retrieve_credentials(addon_name, target_path) click.echo('Done.') echo_heading('Generating docker-compose file.', marker='-', marker_color='magenta') _generate_compose_file(project_path, database, addon_docker_host, secret) click.echo('Done.') echo_heading('Initializing TigerHost containers.', marker='-', marker_color='magenta') env_text = docker_machine.check_output(['env', name]) env = os.environ.copy() env.update(parse_shell_for_exports(env_text)) subprocess.check_call(['docker-compose', '-f', os.path.join( get_project_path(), 'docker-compose.prod.yml'), '-p', settings.MAIN_COMPOSE_PROJECT_NAME, 'build'], env=env) subprocess.check_call(['docker-compose', '-f', os.path.join( get_project_path(), 'docker-compose.prod.yml'), '-p', settings.MAIN_COMPOSE_PROJECT_NAME, 'up', '-d'], env=env)
def _update_docker_machine_ip(machine_name, new_ip): path = path_utils.docker_machine_path(machine_name) config_path = os.path.join(path, 'config.json') with open(config_path, 'r') as f: data = json.load(f) data['Driver']['IPAddress'] = new_ip with open(config_path, 'w') as f: json.dump(data, f) docker_machine.check_call( ['regenerate-certs', '--force', machine_name])
def destroy(): """A shortcut to destroy the main TigerHost server, the addons server, and the Deis cluster, in that order. """ subprocess.check_call([settings.APP_NAME, 'main', 'destroy']) subprocess.check_call([settings.APP_NAME, 'addons', 'destroy']) subprocess.check_call([settings.APP_NAME, 'deis', 'destroy'])
def update(): """This is a shortcut to update the addons server and the main server, in that order. """ subprocess.check_call([settings.APP_NAME, 'addons', 'update']) subprocess.check_call([settings.APP_NAME, 'main', 'update'])
def create(ctx, name, instance_type, database): """Create machine for the addon server. """ # TODO verify that database is [a-zA-Z0-9_] echo_heading('Creating machine {name} with type {type}.'.format( name=name, type=instance_type), marker='-', marker_color='magenta') if settings.DEBUG: docker_machine.check_call( ['create', '--driver', 'virtualbox', name]) else: docker_machine.check_call( ['create', '--driver', 'amazonec2', '--amazonec2-instance-type', instance_type, name]) utils.set_aws_security_group_ingress_rule('docker-machine', 0, 65535, '0.0.0.0/0') project_path = get_project_path() echo_heading('Generating docker-compose file.', marker='-', marker_color='magenta') _generate_compose_file(project_path, database) echo_heading('Instantiating addons proxy.', marker='-', marker_color='magenta') env_text = docker_machine.check_output(['env', name]) env = os.environ.copy() env.update(utils.parse_shell_for_exports(env_text)) subprocess.check_call(['docker-compose', '-f', os.path.join( project_path, 'proxy/docker-compose.prod.yml'), '-p', settings.ADDONS_COMPOSE_PROJECT_NAME, 'up', '-d'], env=env) store.set('addon__database_container_name', database)
def create(stack): """Create a new Deis cluster. """ deisctl = path_utils.executable_path('deisctl') subprocess.check_call(['ssh-add', path_utils.ssh_path('deis')]) with contextmanagers.chdir(os.path.join(get_project_path(), 'deis')): subprocess.check_call(['make', 'discovery-url']) click.echo('Provisioning machines.') with contextmanagers.chdir('contrib/aws'): subprocess.check_call(['./provision-aws-cluster.sh', stack]) ec2 = boto3.resource('ec2') instances = ec2.instances.filter( Filters=[ { 'Name': 'instance-state-name', 'Values': ['running'], }, { 'Name': 'tag:aws:cloudformation:stack-name', 'Values': [stack], }, ] ).limit(1) ip = None for i in instances: ip = i.public_ip_address assert ip is not None click.echo('Machines provisioned. An IP address is {}.'.format(ip)) env = { 'DEISCTL_TUNNEL': ip } env.update(os.environ) click.echo('Installing Deis.') subprocess.check_call([deisctl, 'config', 'platform', 'set', 'sshPrivateKey=' + path_utils.ssh_path('deis')], env=env) subprocess.check_call( [deisctl, 'config', 'platform', 'set', 'domain=' + settings.DOMAIN_NAME], env=env) subprocess.check_call([deisctl, 'refresh-units'], env=env) subprocess.check_call([deisctl, 'install', 'platform'], env=env) subprocess.check_call([deisctl, 'start', 'platform'], env=env)
def create_thumbnail(self, video_path, time, output_path): try: subprocess.check_call(['ffmpeg', '-y', '-ss', time, '-i', b'{0}'.format(video_path.encode('utf-8')), '-vframes', '1', output_path]) return output_path except subprocess.CalledProcessError as error: logger.error(error, exc_info=True) return output_path
def actualSolve(self, lp): """Solve a well formulated lp problem""" if not self.executable(self.path): raise PulpSolverError("PuLP: cannot execute "+self.path) # TODO: should we use tempfile instead? if not self.keepFiles: pid = os.getpid() tmpLp = os.path.join(self.tmpDir, "%d-pulp.lp" % pid) tmpSol = os.path.join(self.tmpDir, "%d-pulp.sol" % pid) else: tmpLp = lp.name + "-pulp.lp" tmpSol = lp.name + "-pulp.sol" lp.writeLP(tmpLp) proc = [ 'scip', '-c', 'read "%s"' % tmpLp, '-c', 'optimize', '-c', 'write solution "%s"' % tmpSol, '-c', 'quit' ] proc.extend(self.options) if not self.msg: proc.append('-q') self.solution_time = clock() subprocess.check_call(proc, stdout=sys.stdout, stderr=sys.stderr) self.solution_time += clock() if not os.path.exists(tmpSol): raise PulpSolverError("PuLP: Error while executing "+self.path) lp.status, values = self.readsol(tmpSol) # Make sure to add back in any 0-valued variables SCIP leaves out. finalVals = {} for v in lp.variables(): finalVals[v.name] = values.get(v.name, 0.0) lp.assignVarsVals(finalVals) if not self.keepFiles: for f in (tmpLp, tmpSol): try: os.remove(f) except: pass return lp.status
def _build_unity(platform, unity_path): methods = { 'ios': 'BuildEditorScript.PerformiOSBuild', 'android': 'BuildEditorScript.PerformAndroidBuild', 'osx': 'BuildEditorScript.PerformMacOSXBuild' } if platform not in methods: fatal("Unsupported platform.") unity_path = os.path.join(unity_path, "Unity.app/Contents/MacOS/Unity") command = "{unity_path} -quit -batchmode " \ "-executeMethod {method} " \ "-logFile ./build.log " \ "-projectPath {current_dir}" \ .format(unity_path=quote(unity_path), method=methods[platform], current_dir=quote(os.getcwd())) info("""The following command will be executed: {0}.""".format(bold(command))) returncode = subprocess32.call(command, shell=True) if returncode != 0: error("An error occurred, please check the content " "of the {0} log file.".format(bold('build.log'))) sys.exit(returncode) if platform == 'ios': os.chdir(os.path.join(os.getcwd(), 'Build', 'iPhone')) command = "xcodebuild -scheme Unity-iPhone archive " \ "-archivePath Unity-iPhone.xcarchive" info("""The following command will be executed: {0}.""".format(bold(command))) subprocess32.check_call(command, shell=True) command = "xcodebuild -exportArchive " \ "-exportFormat ipa " \ "-archivePath \"Unity-iPhone.xcarchive\" " \ "-exportPath \"Unity-iPhone.ipa\" " \ "-exportProvisioningProfile \"wildcard_Development\"" info("""The following command will be executed: {0}.""".format(bold(command))) subprocess32.check_call(command, shell=True) success(""" Your project has been built. """)
def create(elastic_ip_id, email, rds_database, secret, hosted_zone_id): """This is a shortcut to create the Deis cluster, the addons server, and the main TigerHost server in that order. This also configures the DNS for Deis and the main server. """ if secret is None: secret = _get_secret() if elastic_ip_id is None: if not settings.DEBUG: echo_heading('Allocating a new Elastic IP.', marker='-', marker_color='magenta') client = boto3.client('ec2') elastic_ip_id = client.allocate_address(Domain='vpc')['AllocationId'] click.echo('Done. Allocation ID: {}'.format(elastic_ip_id)) else: # not used anyways elastic_ip_id = 'dummy-ip-id' subprocess.check_call([settings.APP_NAME, 'deis', 'create']) subprocess.check_call([settings.APP_NAME, 'deis', 'configure-dns', '--hosted-zone-id', hosted_zone_id]) database_url = None addons_ip = None if rds_database: # TODO implement this click.abort() else: db_container_name = random_string(length=50) subprocess.check_call( [settings.APP_NAME, 'addons', 'create', '--database', db_container_name]) addons_ip = docker_machine.check_output( ['ip', 'tigerhost-addons-aws']).strip() database_url = 'postgres://{name}@{ip}:5432/{name}'.format( name=db_container_name, ip=addons_ip, ) subprocess.check_call( [settings.APP_NAME, 'main', 'create', '--database', database_url, '--elastic-ip-id', elastic_ip_id, '--secret', secret, ]) subprocess.check_call([settings.APP_NAME, 'main', 'configure-dns', '--elastic-ip-id', elastic_ip_id, '--hosted-zone-id', hosted_zone_id]) subprocess.check_call([settings.APP_NAME, 'deis', 'create-admin', '--email', email])