我们从Python开源项目中,提取了以下33个代码示例,用于说明如何使用fabric.api.execute()。
def tcp_port(self): """ The tcp port used for the game server. Will try to get only once and save to self._tcp_port for later use. """ def get_tcp_port(): cmd = '''grep 'name="port" type="int"' conf.xml |awk -F[\<\>] '{print $3}' ''' @hosts(self.int_ip) def _get_tcp_port(): with cd('/app/{}/backend/apps'.format(self.name)): result = run(cmd) lines = result.splitlines() if len(lines) == 1: return int(lines[0]) else: raise Exception("Can't get tcp port using cmd: {}".format(cmd)) result = execute(_get_tcp_port) return result[self.int_ip] if not self._tcp_port: self._tcp_port = get_tcp_port() return self._tcp_port
def dns(self): """ The dns for the game server. Will try to get only once and save to self._dns for later use. """ def get_dns(): cmd = '''grep server_name %s.conf | awk '{print $2}' | tr -d ";" ''' % self.name @hosts(self.int_ip) def _get_dns(): with cd('/app/nginx/conf/vhost'.format(self.name)): result = run(cmd) lines = result.splitlines() if len(lines) == 1: return lines[0] else: raise Exception("Can't get dns using cmd: {}".format(cmd)) result = execute(_get_dns) return result[self.int_ip] if not self._dns: self._dns = get_dns() return self._dns
def hotswap(file, type, keywords): #env.parallel = True #env.pool_size = 10 result_for_gameServer = {} result = execute(_hotswap, file, type, keywords, hosts=IPS) for each_ip in IPS: result_for_gameServer.update(result[each_ip]) print('Hotswap results show as below:') for each in result_for_gameServer: if result_for_gameServer[each]: print('[SUCC] <===> {}'.format(each)) else: print('[FAIL] <===> {}'.format(each)) if not all(result_for_gameServer): sys.exit(1)
def add_match_dns(self): @hosts('dns') def _add_match_dns(id, ip): dns_add_cmd = '/app/opbin/dns/dnsapi -g {0} -a add -d match{1}.{0} -l 1 -i {2}'.format(GAME, id, ip) print('??????????????: {}'.format(dns_add_cmd)) ret_value = run(dns_add_cmd) if ret_value != 'Record add success': print('[WARNING] Failed to add dns, you can try again manually: {}'.format(dns_add_cmd)) execute(_add_match_dns, self.id, self.ip) # def clean_job(self): # # @hosts(self.template_matchServer) # def _clean_on_template_server(): # with cd('/app/nginx/conf/vhost'): # run('rm -f download_{}.conf'.format(TIME)) # reload_nginx() # # execute(_clean_on_template_server)
def upgrade( self, tag=None, force=False, backup=False, migrate=True, ): """ upgrade service to a new version (backup -> pull -> migrate -> update) """ if utils.strtobool(backup): execute(self.backup) execute(self.pull, tag=tag) if utils.strtobool(migrate): execute(self.migrate, tag=tag) execute(self.update, tag=tag, force=force)
def deploy( self, tag=None, force=False, backup=False, migrate=True, ): """ full service deploy (prepare -> push -> upgrade) """ execute(self.prepare, tag=tag) execute(self.push, tag=tag) execute( self.upgrade, tag=tag, force=force, backup=backup, migrate=migrate, )
def swarm_init(): """ enable Docker swarm mode """ def _swarm_init(): if swarm_init.worker_join_command is None: fabricio.run( 'docker swarm init --advertise-addr {0}'.format(fab.env.host), ignore_errors=True, ) join_token = fabricio.run( 'docker swarm join-token --quiet manager', ignore_errors=True, ) swarm_init.worker_join_command = ( 'docker swarm join --token {join_token} {host}:2377' ).format(join_token=join_token, host=fab.env.host) else: fabricio.run( swarm_init.worker_join_command, ignore_errors=True, ) with fab.settings(hosts=hosts): fab.execute(_swarm_init)
def test_rollback(self, revert, migrate_back): tasks_list = tasks.DockerTasks(service=TestContainer(), hosts=['host']) rollback = mock.Mock() rollback.attach_mock(migrate_back, 'migrate_back') rollback.attach_mock(revert, 'revert') revert.return_value = True # with migrate_back disabled tasks_list.rollback.name = '{0}__migrate_disabled'.format(self) fab.execute(tasks_list.rollback, migrate_back='no') migrate_back.assert_not_called() revert.assert_called_once() rollback.reset_mock() # default case tasks_list.rollback.name = '{0}__default'.format(self) fab.execute(tasks_list.rollback) self.assertListEqual( [mock.call.migrate_back(), mock.call.revert()], rollback.mock_calls, ) rollback.reset_mock()
def get_external_ip(int_ip): @hosts(int_ip) def _get_external_ip(): ext_ip = run('''curl -s ip.cn |awk '{split($2,x,"?");print x[2]}' ''') return ext_ip ret_value = execute(_get_external_ip)[int_ip] return ret_value
def transform(self, gameServers, all_gameServer_info=None): """ Transform funcion. eg: it will transformat from ['astd_37wan_2', 'astd_51wan_99', 'astd_uoyoo_90'] to { '10.6.20.1':['astd_37wan_2', 'astd_51wan_99'], '10.6.20.2':['astd_uoyoo_90'] } """ if not all_gameServer_info: all_gameServer_info = self.all_gameServer_info IPS = list(set([ all_gameServer_info[each] for each in gameServers ])) locate_game_servers = { each:[] for each in IPS } for each in gameServers: locate_game_servers[all_gameServer_info[each]].append(each) return locate_game_servers # def sql_content_exec(self, gameServers, sql_content, backup='Yes', remote_dir=REMOTE_DIR): # locate_game_servers = self.transform(gameServers) # ips = locate_game_servers.keys() # # def _sql_content_exec(sql_content, locate_game_servers, backup): # for gameServer in locate_game_servers[env.host_string]: # backup_dir = '{}/{}'.format(remote_dir, gameServer) # run('[ -d {0} ] || mkdir -p {0}'.format(backup_dir)) # if backup.lower() == 'yes': # run('pandora --dump --opt -R {0} >{1}/rollback_{0}.sql'.format(gameServer, backup_dir)) # run('''pandora --update {} -e '{}' '''.format(gameServer, sql_content)) # # execute(_sql_content_exec, sql_content, locate_game_servers, backup=backup, hosts=ips) #
def dump_db(self, remote_dir, timestamp=TIMESTAMP): dest = '{}/{}.sql.rb{}'.format(remote_dir, self.name, timestamp) self.mkdir(os.path.dirname(dest)) @hosts(self.int_ip) def _dump_db(): run('''pandora --dump --opt -R {} >{}'''.format(self.name, dest)) return dest result = execute(_dump_db) return result[self.int_ip]
def _operation(self, action): def _op(): run('set -m; /app/{}/backend/bin/startup.sh {} && sleep 0.2'.format(self.name, action), warn_only=True) execute(_op, hosts=[self.int_ip])
def sql_exec(self, sql_file): def _sql_exec(): run('pandora --update {} <{}'.format(self.name, sql_file)) execute(_sql_exec, hosts=[self.int_ip])
def upload_log(self, logtype=None, date=None, logfile=None, ftp_ip=None): """ An example: pandora --ftp -r 30 -t 1200 -z -m 42.62.119.164 /tjmob_log/tjmob_37wan_1 /app/tjmob_37wan_1/backend/logs/game/dayreport/dayreport_2015-05-03.log.bz2* """ from bible.utils import BZIP2 ftp_log_path = '/{}_log/{}'.format(self.game, self.name) logtypes = ['dayreport', 'rtreport'] date = date if date else time.strftime('%Y-%m-%d') ftp_ip = ftp_ip if ftp_ip else '42.62.119.164' if logfile: logfiles = [logfile] else: if logtype: logfiles = ['/app/{0}/backend/logs/game/{1}/{1}_{2}.log'.format(self.name, logtype, date)] else: logfiles = ['/app/{0}/backend/logs/game/{1}/{1}_{2}.log'.format(self.name, each_logtype, date) for each_logtype in logtypes] @hosts(self.int_ip) def _upload_log(): for each_log in logfiles: dir, filename = os.path.split(each_log) with cd(dir): file_bz2 = '{}.bz2'.format(filename) file_md5 = '{}.MD5'.format(file_bz2) run('[ -f {0} ] && echo "{0} already exists" || {1} {2}'.format(file_bz2, BZIP2, filename)) run('[ -f {0} ] && echo "{0} already exists" || md5sum {1} >{0}'.format(file_md5, file_bz2)) run('''pandora --ftp -r 30 -t 1200 -z -m {} {} {}.bz2*'''.format(ftp_ip, ftp_log_path, each_log) ) execute(_upload_log) #End class GameServer ###############################################
def template_matchServer_running(template_matchServer_ip, template_matchServer_id=1): template_matchServer = '{}_match_{}'.format(GAME, template_matchServer_id) @hosts(template_matchServer_ip) def _template_matchServer_running(): with quiet(): running = run('ps x | grep "[j]ava -Dstart.home=/app/{}/backend"'.format(template_matchServer)).succeeded if not running: raise Exception("Can't find the running java process for {}".format(template_matchServer)) execute(_template_matchServer_running)
def matchServer_exists(matchServer, ip): with quiet(): exists = local('''grep "\\b{}\\b" /etc/hosts '''.format(matchServer)).succeeded if exists: raise Exception('''The match server {} already exists in /etc/hosts'''.format(matchServer)) else: matchServer_dir_exists = execute(remote_dir_exists, '/app/{}'.format(matchServer), hosts=[ip])[ip] if matchServer_dir_exists: raise Exception('''The match dir: /app/{} already exists on {}'''.format(matchServer, ip)) #def create_nginx_conf(remote_dir): # server_name = 'match_download_{}'.format(TIME) # conf_name = 'download_{}.conf'.format(TIME) # with cd('/app/nginx/conf/vhost'): # run('''echo -e "server {\\n listen 80;\\n server_name %s;\\n root %s;\\n index Main.html;\\n access_log logs/default.access.log main;\\n location / {\\n expires 0;\\n }\\n\\n error_page 404 500 502 503 504 /404.html;\\n}" >%s''' % (server_name, remote_dir, conf_name))
def job_on_template_server(self): @hosts(self.template_matchServer_ip) def _job_on_template_server(): mk_remote_dir(self.remote_dir) print('???{}????????????????{}/package.tgz ...'.format(self.template_matchServer, self.remote_dir)) packaging_data(self.template_matchServer, self.remote_dir) #print('??????????...') #create_nginx_conf(self.remote_dir) #reload_nginx() #ext_ip = run('''curl -s ipip.net |awk '{split($2,x,"?");print x[2]}' ''') #return ext_ip #self.template_matchServer_ip = execute(_job_on_template_server)[self.template_matchServer] execute(_job_on_template_server)
def add_match_to_gw(self): @hosts('{}_gw'.format(GAME)) def _add_match_to_gw(): gw_db_name = '{}_gw'.format(GAME) mk_remote_dir(self.remote_dir) with cd(self.remote_dir): run('''pandora --dump --opt -R {0} match_server_info >{0}.match_server_info.sql.rb{1}'''.format(gw_db_name, TIME)) run('''pandora --update {} -e 'INSERT INTO match_server_info (type,match_adress,match_id,match_name) VALUES (1,"{}:8092",{},"{}_match_{}");' '''.format(gw_db_name, self.dns, self.id, GAME, self.id)) result = run('''pandora {} -e 'SELECT * FROM match_server_info' '''.format(gw_db_name)) print('?????\n??GW?????match??:\n{}'.format(result)) execute(_add_match_to_gw)
def resRsyncResult(): try: print "??30????????..." sys.stdout.flush() time.sleep(30) rsync_module = gameOption("rsync_module",default="").strip() rsync_root = gameOption("rsync_root",default="").strip() rsync_backup_ip = gameOption("rsync_backup_ip",default="").strip() rootSshObj = ssh.ssh(ip,port=port,user="root") if rsync_module != "" and rsync_root != "" and rsync_backup_ip != "" : for i in range(6): out = execute(fabriccmd,'''cd %s && rsync -art -R --dry-run --delete --out-format="%%n" ./ %s::%s --password-file=/etc/rsyncd.secret'''%(rsync_root,rsync_backup_ip,rsync_module), hosts=[ip])[ip] if out == None: break elif out.strip() != "": print "??????????????,??60s?????..." sys.stdout.flush() time.sleep(60) else: print "???????" break else: print "WARNNING: ??????????30s??????version.lua?????????!!!!!!!" sys.stdout.flush() time.sleep(30) except Exception,e10: print "WARNNING:??????????!err:%s"%str(e10)
def upload_to_resource_server(game, file): dir, filename = os.path.split(file) resource_dir = '/app/www/{}/{}/{}'.format(game, RELEASE_TYPE, TIMESTAMP) resource_ip = gameOption('www_ssh_ip') execute(mk_remote_dir, resource_dir, hosts=[resource_ip]) with lcd(dir), settings(host_string=resource_ip): put(filename, resource_dir) put('md5.txt', resource_dir) #local('{} {}/{{{},md5.txt}} {}:{}/'.format(RSYNC, dir, filename, resource_ip, resource_dir))
def execute(*args, **kwargs): try: task, args = args[0], args[1:] except IndexError: raise TypeError('must provide task to execute') default_name = '{command}.{task_name}({id})'.format( command=fab.env.command, task_name=getattr(task, 'name', task.__name__), id=id(task), ) with utils.patch(task, 'name', _uncrawl(task) or default_name): return fab.execute(task, *args, **kwargs)
def rollback(self, migrate_back=True): """ rollback service to a previous version (migrate-back -> revert) """ if utils.strtobool(migrate_back): execute(self.migrate_back) execute(self.revert)
def test_skip_unknown_host(self): mocked_task = mock.Mock() @fabricio.tasks.skip_unknown_host def task(): mocked_task() with fab.settings(fab.hide('everything')): fab.execute(task) mocked_task.assert_not_called() fab.execute(task, host='host') mocked_task.assert_called_once()
def test_infrastructure(self): class AbortException(Exception): pass def task(): pass cases = dict( default=dict( decorator=tasks.infrastructure, expected_infrastructure='task', ), invoked=dict( decorator=tasks.infrastructure(), expected_infrastructure='task', ), with_custom_name=dict( decorator=tasks.infrastructure(name='infrastructure'), expected_infrastructure='infrastructure', ), ) with fab.settings(abort_on_prompts=True, abort_exception=AbortException): with mock.patch.object(fab, 'abort', side_effect=AbortException): for case, data in cases.items(): with self.subTest(case=case): decorator = data['decorator'] infrastructure = decorator(task) self.assertTrue(is_task_object(infrastructure.confirm)) self.assertTrue(is_task_object(infrastructure.default)) fab.execute(infrastructure.confirm) self.assertEqual(data['expected_infrastructure'], fab.env.infrastructure) fab.env.infrastructure = None with mock.patch.object(console, 'confirm', side_effect=[True, False]): fab.execute(infrastructure.default) self.assertEqual(data['expected_infrastructure'], fab.env.infrastructure) with self.assertRaises(AbortException): fab.execute(infrastructure.default)
def install_deps(packages): run('su -c \'apt-get update && apt-get install '+packages+'\'') #execute(su, pwd, 'apt-get update && apt-get install '+packages)
def serial_work(single_node_work, params): with settings(**params): return execute(serial(single_node_work), hosts=params['public_ips'].values())
def parallel_work(single_node_work, params): with settings(**params): return execute(parallel(single_node_work), hosts=params['public_ips'].values())
def quiet_ssh(command, hosts, ssh_key): """Execute command over SSH on hosts, suppressing all output. This is useful for instances where you may only want to see if a command succeeds or times out, since stdout is otherwise discarded.""" return execute(_quiet_task, command=command, hosts=hosts, ssh_key=ssh_key)
def ssh(command, hosts, ssh_key): """Execute command over SSH on hosts.""" return execute(_task, command=command, hosts=hosts, ssh_key=ssh_key)
def transfer(self, files, dest_dir): md5 = {} def md5sum(file): return run("md5sum %s | awk '{print $1}'" % file) def wget_files(filenames, dest_dir): mk_remote_dir(dest_dir) with cd(dest_dir): for each_filename in filenames: #?????????????????ip?????????????????????md5??????????md5?????????????????????????wget -O?????!!)???????????????? file_exits = file_exists_check(each_filename) if file_exits: if md5[each_filename] == md5sum(each_filename): break run('''{0} -O {1} --header="Host:{2}" http://{3}/{1}'''.format(WGET, each_filename, self.server_name, self.source_ext_ip)) @hosts(self.source_ip) def _transfer(files, dest_dir): filenames = retrieve_file_names(files) root_dir = '/app/opbak/download_{}_{}'.format(TIMESTAMP, self.server_name) with setting_trans_env(self.server_name): for each_file in files: filename = os.path.basename(each_file) md5[filename] = md5sum(each_file) with cd(root_dir): run('ln -sf {} {}'.format(each_file, filename)) execute(wget_files, filenames, dest_dir, hosts=[self.target_ip]) @hosts(self.source_ip) def _copy(files, dest_dir): for each_file in files: run('{} {} {}/'.format(RSYNC, each_file, dest_dir)) def _execute_transfer(): if self.source_ip == self.target_ip: execute(_copy, files, dest_dir) else: execute(_transfer, files, dest_dir) if self._debug: _execute_transfer() else: with settings(hide('everything')): _execute_transfer() #End class Uploader ########################################################################
def upload_log(self, logtype=None, date=None, logfile=None, ftp_ip=None): """ An example: pandora --ftp -r 30 -t 1200 -z -m 42.62.119.164 /tjmob_log/tjmob_37wan_1 /app/tjmob_37wan_1/backend/logs/game/dayreport/dayreport_2015-05-03.log.bz2* """ from bible.utils import BZIP2 ftp_log_path = '/{}_log/{}'.format(self.game, self.name) logtypes = ['dayreport', 'rtreport'] date = date if date else time.strftime('%Y-%m-%d') ftp_ip = ftp_ip if ftp_ip else '42.62.119.164' if logfile: logfiles = [logfile] else: if logtype: logfiles = ['/app/{0}/backend/logs/game/{1}/{1}_{2}.log'.format(self.name, logtype, date)] else: logfiles = ['/app/{0}/backend/logs/game/{1}/{1}_{2}.log'.format(self.name, each_logtype, date) for each_logtype in logtypes] @hosts(self.int_ip) def _upload_log(): for each_log in logfiles: dir, filename = os.path.split(each_log) with cd(dir): file_bz2 = '{}.bz2'.format(filename) file_md5 = '{}.MD5'.format(file_bz2) run('[ -f {0} ] && echo "{0} already exists" || {1} {2}'.format(file_bz2, BZIP2, filename)) run('[ -f {0} ] && echo "{0} already exists" || md5sum {1} >{0}'.format(file_md5, file_bz2)) run('''pandora --ftp -r 30 -t 1200 -z -m {} {} {}.bz2*'''.format(ftp_ip, ftp_log_path, each_log) ) execute(_upload_log) #class Resource(object): # """ # ???????????????????????????????? # ????????????????? # # """ # def __init__(self, # # def upload(self, file): # dir, filename = os.path.split(file) # resource_dir = '/app/www/{}/{}/{}'.format(game, RELEASE_TYPE, TIMESTAMP) # resource_ip = gameOption('www_ssh_ip') # execute(mk_remote_dir, resource_dir, hosts=[resource_ip]) # local('rsync -aP {}/{{{},md5.txt}} {}:{}/'.format(dir, filename, resource_ip, resource_dir)) # # def download(self, file): # remote_dir, filename = os.path.split(file) # mk_remote_dir(REMOTE_DIR) # with cd(remote_dir): # wget = 'wget -c -t 10 -T 10 -q' # server_name = gameOption('www_header') # for each_file in [filename, 'md5.txt']: # run('''{} --header="Host:{}" http://{}/{}/{}/{}/{}'''.format(wget, server_name, gameOption('www_ip'), game, RELEASE_TYPE, TIMESTAMP, each_file)) # run('dos2unix md5.txt && md5sum -c md5.txt')
def flush(self, gameservers, command, table=None, path=None): locate_gameservers = self.game_pj.transform(gameservers) ips = locate_gameservers.keys() def _flush(gameserver, http_port): base_url = 'http://127.0.0.1:{}/root/gateway.action'.format(http_port) real_url = '{}?command={}'.format(base_url, command) if table is not None: real_url += '&tableName={}'.format(table) if path is not None: real_url += '&path={}'.format(path) return run('curl "{}"'.format(real_url)) def _flush_task(): if path is not None: download_from_resource(self.game, path) ret = {} for gameserver in locate_gameservers[env.host_string]: print('Working on {}...'.format(gameserver)) http_port = get_http_port(gameserver) result = _flush(gameserver, http_port) _result = result.lower() success_tags = ['succ', 'success'] if any(each in _result for each in success_tags): ret[gameserver] = (True, result) else: ret[gameserver] = (False, result) return ret result = execute(_flush_task, hosts=ips) total = {} for each_result in result: for each_gameserver in result[each_result]: total[each_gameserver] = result[each_result][each_gameserver] return total