我们从Python开源项目中,提取了以下23个代码示例,用于说明如何使用fabric.api.get()。
def get_test_server_info(game): """ Get all game info. It will get a dict like: { 'astd_17wan_1' : '10.6.120.23', 'astd_37wan_98': '10.4.5.5', . . . 'astd_37wan_8' : '10.4.5.15' } """ _server_info = {} for each_part in ['server_list', 'pay_proxy', 'voice']: tmp_value = eval(gameOption(each_part, default='{}')) _server_info.update(tmp_value) server_info = {'{}_{}'.format(game, each): _server_info[each] for each in _server_info} return server_info
def setup_supervisor(): # We use supervisord to keep Crestify running in the background # Recover from crashes, and to start automatically on bootup # Also, using more than 1 gunicorn worker resulted in socket not being released, so only 1 worker will be used sudo('apt-get -y install supervisor') sudo('mkdir /var/log/crestify/') sudo( 'cd /home/crestify/crestify && ../crestifyenv/bin/honcho export -s /bin/sh -a crestify supervisord /etc/supervisor/conf.d') fd = StringIO() get('/etc/supervisor/conf.d/crestify.conf', fd) content = fd.getvalue().splitlines() for n, i in enumerate(content): if i.startswith("environment="): content[n] = i + ",PATH=/home/crestify/crestifyenv/bin:%(ENV_PATH)s" if i.startswith("user="): content[n] = "user=crestify" if i.startswith("stopsignal="): content[n] = "stopsignal=TERM" # Both Gunicorn and Celery use SIGTERM for graceful shutdown content = StringIO("\n".join(content)) put(content, "/etc/supervisor/conf.d/crestify.conf", use_sudo=True) sudo('supervisorctl reread') sudo('supervisorctl update')
def update_config(self, content, path): old_file = six.BytesIO() if files.exists(path, use_sudo=self.sudo): fab.get(remote_path=path, local_path=old_file, use_sudo=self.sudo) old_content = old_file.getvalue() need_update = content != old_content if need_update: fabricio.move_file( path_from=path, path_to=path + '.backup', sudo=self.sudo, ignore_errors=True, ) fab.put(six.BytesIO(content), path, use_sudo=self.sudo, mode='0644') fabricio.log('{path} updated'.format(path=path)) else: fabricio.log('{path} not changed'.format(path=path)) return need_update
def _backup_geonode(t=None, remote=None, local=None): t = _request_input("Type (vanilla/geoshape)", t, True, options=GEONODE_TYPES) remote = _request_input("Remote Destination Folder", remote, True) local = _request_input("Local File Path", local, False) if _request_continue(): print "Backing up data..." sudo("[ -d {d} ] || mkdir {d}".format(d=remote)) sudo("[ -d {d}/db ] || mkdir {d}/db".format(d=remote)) sudo('chown -R {u}:{g} {d}/db'.format(u="postgres", g="postgres", d=remote)) with settings(sudo_user='postgres'): sudo('pg_dump geonode | gzip > {d}/db/geonode.gz'.format(d=remote)) sudo('pg_dump geonode_imports | gzip > {d}/db/geonode_imports.gz'.format(d=remote)) sudo('cp -R {gsd} {d}/geoserver'.format(gsd=PATH_GEOSERVER_DATA, d=remote)) if local: local_files = get(remote, local_path=local) for local_file in local_files: print "Downloaded Local File: "+local_file print "Backup complete."
def _check_want_to_be_running(stackname, autostart=False): try: context = context_handler.load_context(stackname) if not _are_there_existing_servers(context): return False except context_handler.MissingContextFile as e: LOG.warn(e) instance_list = core.find_ec2_instances(stackname, allow_empty=True) num_instances = len(instance_list) if num_instances >= 1: return instance_list if not autostart: should_start = utils._pick('should_start', [True, False], message='Stack not running. Should it be started?') if not should_start: return False core_lifecycle.start(stackname) # to get the ip addresses that are assigned to the now-running instances # and that weren't there before return core.find_ec2_instances(stackname)
def glob(path, remote=True): ''' Glob a directory path to get the list of files. ''' with hide('everything'): return runner.run('ls -1 {}'.format(path), remote=remote).split()
def read_remote_file(path): ''' Read remote file contents. ''' fd = StringIO() get(path, fd) return fd.getvalue()
def load_db_dump(dump_file): """Given a dump on your home dir on the server, load it to the server's database, overwriting any existing data. BE CAREFUL!""" require('environment') if not files.exists("%(home)s/.pgpass" % env): abort("Please get a copy of .pgpass and put it in your home dir") temp_file = os.path.join(env.home, '%(project)s-%(environment)s.sql' % env) put(dump_file, temp_file) run('psql -h %s -U %s -d %s -f %s' % (env.db_host, env.db_user, env.db, temp_file))
def ip(self): """ Poll vcenter to get the virtual machine IP :return: Return the ip """ if self._vm_object: if not self._vm_object.summary.guest.ipAddress: timeout_loop( self.timeout, 'Get IP', 1, False, lambda: self._vm_object.summary.guest.ipAddress ) ip = self._vm_object.summary.guest.ipAddress validate_ip(ip) return ip
def load_file(game_servers, local_file, remote_file, load_type='upload'): test_server_info = get_test_server_info(GAME) check_game_servers(game_servers, test_server_info) locate_game_servers = transform_gameservers(game_servers, test_server_info) ips = locate_game_servers.keys() @hosts(ips) def _upload_file(): upload(local_file, REMOTE_DIR, env.host_string) for game_server in locate_game_servers[env.host_string]: replace_file(game_server, remote_file) @hosts(ips) def _download_file(): for game_server in locate_game_servers[env.host_string]: local_path = '{}/{}/'.format(local_root_path, game_server) local('su - astd -c "mkdir -p {}"'.format(local_path)) target_file = '/app/{}/{}'.format(game_server, remote_file) with quiet(): target_file_exists = run('test -f {}'.format(target_file)).succeeded if target_file_exists: get(target_file, local_path) else: raise Exception('File {} NOT exists on {}'.format(target_file, game_server)) local('chown -R astd.astd {}'.format(local_root_path)) if load_type == 'upload': ftp_file_check(local_file) file_name_consistence_check(local_file, remote_file) execute(_upload_file) print('{} was uploaded to {} successfully.'.format(local_file, game_servers)) elif load_type == 'download': ftp_path = 'download/{}/{}'.format(GAME, TIMESTAMP) local_root_path = '/app/online/{}'.format(ftp_path) execute(_download_file) print('Downloaded remote file: {} to FTP: {}/'.format(remote_file, ftp_path))
def export_db(game_servers, export_type='data'): test_server_info = get_test_server_info(GAME) check_game_servers(game_servers, test_server_info) locate_game_servers = transform_gameservers(game_servers, test_server_info) ips = locate_game_servers.keys() ftp_path = 'download/{}/{}'.format(GAME, TIMESTAMP) local_root_path = '/app/online/{}'.format(ftp_path) @hosts(ips) def _export_db(): for game_server in locate_game_servers[env.host_string]: local_path = '{}/{}/'.format(local_root_path, game_server) local('su - astd -c "mkdir -p {}"'.format(local_path)) run('mkdir -p {}'.format(REMOTE_DIR)) sql_name = '{}.sql.rb{}'.format(game_server, TIMESTAMP) if export_type == 'no-data': run('pandora --dump -R --opt -d {} >{}/{}'.format(game_server, REMOTE_DIR, sql_name)) elif export_type == 'data': run('pandora --dump -R --opt {} >{}/{}'.format(game_server, REMOTE_DIR, sql_name)) with cd(REMOTE_DIR): run('tar zcf {0}.tgz {0}'.format(sql_name)) target_file = '{}/{}.tgz'.format(REMOTE_DIR, sql_name) get(target_file, local_path) local('chown -R astd.astd {}'.format(local_root_path)) print('Start dumping db...') sys.stdout.flush() execute(_export_db) print('Downloaded db to FTP: {}/'.format(ftp_path))
def install_python_reqs(): sudo('apt-get update --fix-missing') # We need the Python headers (python-dev) for compiling some libraries sudo('apt-get -y install python-virtualenv python-dev python-pip') # Readability dependencies for lxml sudo('apt-get -y install libxslt1-dev libxml2-dev libz-dev')
def install_rabbitmq(): # RabbitMQ is our message queue sudo('echo "deb http://www.rabbitmq.com/debian/ testing main" >> /etc/apt/sources.list') sudo('apt-get -y install wget sudo') sudo('apt-get -y install ca-certificates') sudo('wget --quiet -O - https://www.rabbitmq.com/rabbitmq-release-signing-key.asc | sudo apt-key add -') sudo('apt-get update --fix-missing') sudo('apt-get -y install rabbitmq-server')
def install_postgres(): sudo('echo "deb http://apt.postgresql.org/pub/repos/apt/ trusty-pgdg main" >> /etc/apt/sources.list.d/pgdg.list') sudo('wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | \ sudo apt-key add -') sudo('apt-get update') sudo('apt-get -y install postgresql-9.4 postgresql-server-dev-9.4 postgresql-contrib-9.4')
def install_redis(): # Used for temporarily saving incoming tab saves sudo('apt-get -y install redis-server')
def install_phantomjs(): # The PhantomJS headless browser sudo('apt-get -y install fontconfig') run('wget https://bitbucket.org/ariya/phantomjs/downloads/phantomjs-2.1.1-linux-x86_64.tar.bz2') run('tar xjf phantomjs-2.1.1-linux-x86_64.tar.bz2') sudo('mv phantomjs-2.1.1-linux-x86_64/bin/phantomjs /usr/local/bin/phantomjs')
def config_environment(): sudo('apt-get -y install git screen') sudo('adduser crestify --disabled-password --gecos GECOS') sudo('locale-gen en_US.UTF-8') with settings(sudo_user='crestify', shell='/bin/bash -c'): with cd('/home/crestify'): sudo('git clone https://github.com/crestify/crestify.git crestify') sudo('virtualenv crestifyenv') with prefix('source crestifyenv/bin/activate'): sudo('pip install -r crestify/requirements.txt')
def update(self, tag=None, registry=None, account=None, force=False): if not fab.env.parallel: fab.abort( 'Master-slave configuration update requires parallel mode. ' 'Use Fabric\'s `--parallel` option to enable this mode ' 'for a current session.' ) self.instances.put(None) try: recovery_config_updated = self.update_recovery_config( tag=tag, registry=registry, account=account, ) container_updated = super( StreamingReplicatedPostgresqlContainer, self, ).update(force=force, tag=tag, registry=registry, account=account) if not container_updated and recovery_config_updated: self.reload() self.master_obtained.set() # one who first comes here is master return container_updated or recovery_config_updated except Exception as exception: self.multiprocessing_data.exception = exception raise finally: try: self.master_lock.release() except ValueError: # ignore "released too many times" error pass self.instances.get() self.instances.task_done() self.instances.join() # wait until all instances will be updated # reset state at the end to prevent fail of the next Fabric command self.master_obtained.clear()
def scp_from_remote(*files): cur_dir = os.getcwd() for file in files: total_file_path = file.rsplit('/', 1) if len(total_file_path) > 1: file_path, file_name = total_file_path[0], total_file_path[1] else: file_path, file_name = '', total_file_path[0] get(os.path.join(base_dir, file), os.path.join(cur_dir, file_path)) #[ get(os.path.join(base_dir, file), file) for file in files ]
def _are_there_existing_servers(context): if not 'ec2' in context: # very old stack, canned response return True if isinstance(context['ec2'], bool): # no ec2 instances or an instance whose buildvars haven't been updated. # either way, the value here can be used as-is return context['ec2'] num_suppressed = len(context['ec2'].get('suppressed', [])) cluster_size = context['ec2'].get('cluster-size', 1) return context['ec2'] and num_suppressed < cluster_size
def download_file(stackname, path, destination='.', allow_missing="False", use_bootstrap_user="False"): """ Downloads `path` from `stackname` putting it into the `destination` folder, or the `destination` file if it exists and it is a file. If `allow_missing` is "True", a non-existant `path` will be skipped without errors. If `use_bootstrap_user` is "True", the owner_ssh user will be used for connecting instead of the standard deploy user. Boolean arguments are expressed as strings as this is the idiomatic way of passing them from the command line. """ allow_missing, use_bootstrap_user = map(strtobool, [allow_missing, use_bootstrap_user]) with stack_conn(stackname, username=BOOTSTRAP_USER if use_bootstrap_user else DEPLOY_USER): if allow_missing and not files.exists(path): return # skip download get(path, destination, use_sudo=True)
def ssh_download( self, remote_path, local_path, use_sudo=False, quiet=False, **kwargs ): """ Download a file or directory from the virtual machine :param remote_path: The remote location :param local_path: The local local :param use_sudo: If True, it runs as sudo :param quiet: Whether to hide the stdout/stderr output or not :return: The list of downloaded files :raise: DownloadError: If the task fails """ if self._vm_object: self._wait_for_ssh_service( kwargs['vcdriver_vm_ssh_username'], kwargs['vcdriver_vm_ssh_password'] ) with fabric_context( self.ip(), kwargs['vcdriver_vm_ssh_username'], kwargs['vcdriver_vm_ssh_password'] ): if quiet: with hide('everything'): result = get( remote_path, local_path, use_sudo=use_sudo ) else: result = get(remote_path, local_path, use_sudo=use_sudo) if result.failed: raise DownloadError( local_path=local_path, remote_path=remote_path ) else: return result