我们从Python开源项目中,提取了以下49个代码示例,用于说明如何使用fabric.api.hide()。
def run_script(script, remote=True): ''' Run a script. ''' custom_scripts = _get_config()['scripts'] # If the script is not defined raise error. if not is_script_defined(script): raise RuntimeError('Missing script "{}"'.format(script)) # Get the command defined in the script. script_cmd = custom_scripts[script] info_text = 'Running {}\n{}'.format( cyan(script), cyan('> ' + script_cmd) ) host_info(info_text, remote=remote) # Run a custom script defined in the config. with hide('running'): run(script_cmd, remote)
def install_jenkins(*args, **kwargs): home = run('echo $HOME') version = kwargs.get('version', 'latest') init = os.path.join(home,'init') jenkins_base_dir = os.path.join(home, 'jenkins') jenkins_init = os.path.join(init, 'jenkins') port = kwargs.get('port') if not exists(jenkins_base_dir): run('mkdir ' + jenkins_base_dir) if not exists(os.path.join(jenkins_base_dir, 'jenkins.war')): with hide('output'): run('wget http://mirrors.jenkins-ci.org/war/%s/jenkins.war -O ~/jenkins/jenkins.war' % version) if not exists(os.path.join(jenkins_base_dir, 'org.jenkinsci.main.modules.sshd.SSHD.xml')): with hide('output'): run('wget https://templates.wservices.ch/jenkins/org.jenkinsci.main.modules.sshd.SSHD.xml -O ~/jenkins/org.jenkinsci.main.modules.sshd.SSHD.xml') if not exists(init): run('mkdir ~/init') if not exists(jenkins_init): with hide('output'): run('wget https://templates.wservices.ch/jenkins/jenkins.init -O ~/init/jenkins') run('chmod 750 ~/init/jenkins') sed(jenkins_init, 'PORT=HTTP_PORT', 'PORT=%s' % port) run('~/init/jenkins start') else: run('~/init/jenkins restart')
def load_config(conf_file, base_conf=[], spec_conf=[], delimiter=' '): if exists(conf_file): with hide('output'): config_data = run('cat %s' % conf_file) else: config_data = '' confs = base_conf + spec_conf for conf in confs: param, value = conf.split(delimiter, 1) value = re.sub(r'#.*$', "", str(value)) # Delete comments match = re.search('^%s[ ]?%s[ ]?(.*)' % (param, delimiter), config_data, re.MULTILINE) if match: orig_value = match.group(1).strip() orig_line = '%s' % match.group(0).strip() if orig_value != str(value): if config_data and param in spec_conf: continue # Do not override already existing specific configurations print('%s %s change to %s' % (param, orig_value, value)) sed(conf_file, orig_line, '%s%s%s' % (param, delimiter, value)) else: print('Config OK: %s%s%s' % (param, delimiter, value)) else: print('Add config %s%s%s' % (param, delimiter, value)) append(conf_file, '%s%s%s' % (param, delimiter, value))
def vulture(): """ try to find dead code paths """ with api.quiet(): if not api.local('which vulture').succeeded: print 'vulture not found, installing it' api.local('pip install vulture') ignore_functions_grep = 'egrep -v "{0}"'.format( '|'.join(VULTURE_IGNORE_FUNCTIONS)) excluded = ",".join(VULTURE_EXCLUDE_PATHS) excluded_paths = (' --exclude ' + excluded) if excluded else '' vulture_cmd = '\n vulture {pkg_name}{exclude}{pipes}' vulture_cmd = vulture_cmd.format( pkg_name=PKG_NAME, exclude=excluded_paths, pipes='|'.join(['', ignore_functions_grep])) changedir = api.lcd(os.path.dirname(__file__)) warn_only = api.settings(warn_only=True) be_quit = api.hide('warnings') with contextlib.nested(changedir, warn_only, be_quit): result = api.local(vulture_cmd, capture=True) exit_code = result.return_code print result.strip() raise SystemExit(exit_code)
def list_platforms(root_dir): """ ??????????????? """ def is_platform(dir): """ ????version.lua??????????????????? """ with quiet(): return run('test -f "{}/{}/version.lua"'.format(root_dir, dir)).succeeded with cd(root_dir), hide('stdout'): result = run('''find ./ -mindepth 1 -maxdepth 1 -type d -print |grep --color=never -vE '([0-9]+(\.[0-9]+){3}\\b)|(lyServers)' ''') dirs = [each.lstrip('./') for each in result.splitlines()] return [each for each in dirs if is_platform(each)]
def make_diff(remote_script_dir, diff_from_lua, diff_to_lua, resource_dir, dest): """ ???????????????????????with cd(dir): ??????? Example: /app/opbak/make_diff_3/make_diff.py --resource-dir 3.6.1.0/res --diff-from 3.6.0.9/res/res.lua --diff-to 3.6.1.0/res/res.lua --dest /app/opbak/make_diff_20150909_xxxxx/3.6.1.0,/app/opbak/make_diff_20150909_xxxxx/3.6.1.0.zip """ with hide('running', 'stdout'): run('''python {remote_script_dir}/make_diff.py --resource-dir {resource_dir} --diff-from {diff_from_lua} --diff-to {diff_to_lua} --dest {dest}'''.format(remote_script_dir=remote_script_dir, resource_dir=resource_dir, diff_from_lua=diff_from_lua, diff_to_lua=diff_to_lua, dest=dest)) #?????????.lua?? _zipfile = dest.split(',')[0] zipfile = _zipfile.rstrip('.zip') zip_lua = '{}.lua'.format(zipfile) with hide('running', 'stdout'): file_size = run('stat --printf="%s" {}'.format(zipfile)) md5 = run("md5sum {} | awk '{{print $1}}'".format(zipfile)).strip('\n') run('''echo -ne 'local updateZipSize = {{}}\nupdateZipSize.value = {file_size}\nupdateZipSize.md5 = "{md5}"\nreturn updateZipSize' >{zip_lua}'''.format(file_size=file_size, md5=md5, zip_lua=zip_lua))
def build(name, ask=True, **kwargs): """ Build the malicious mote to its target hardware. :param name: experiment name (or absolute path to experiment) :param ask: ask confirmation :param path: expanded path of the experiment (dynamically filled in through 'command' decorator with 'expand') :param kwargs: simulation keyword arguments (see the documentation for more information) """ def is_device_present(): with settings(hide(*HIDDEN_ALL), warn_only=True): return local("if [ -c /dev/ttyUSB0 ]; then echo 'ok'; else echo 'nok'; fi", capture=True) == 'ok' console = kwargs.get('console') counter, interval = 0.0, 0.5 while not is_device_present(): sleep(interval) counter += interval if counter % 5 == 0: logger.warning("Waiting for mote to be detected...") elif counter >= 120: logger.error("Something failed with the mote ; check that it mounts to /dev/ttyUSB0") return remake(name, build=True, **kwargs) if console is None else console.do_remake(name, build=True, **kwargs) return "Mote built on /dev/ttyUSB0"
def _command( fabric_method, command, ignore_errors=False, quiet=True, hide=('running', 'aborts'), show=(), abort_exception=RuntimeError, **kwargs ): if quiet: hide += ('output', 'warnings') log('{method}: {command}'.format( method=fabric_method.__name__, command=command, )) with fab.settings( fab.hide(*hide), fab.show(*show), abort_exception=abort_exception, warn_only=ignore_errors, ): return fabric_method(command, **kwargs)
def create_db(): with settings(warn_only=True), hide('output', 'running'): if env.get('settings'): execute('servers.stop_service', 'uwsgi') with shell_env(**app_config.database): local('dropdb --if-exists %s' % app_config.database['PGDATABASE']) if not env.get('settings'): local('psql -c "DROP USER IF EXISTS %s;"' % app_config.database['PGUSER']) local('psql -c "CREATE USER %s WITH SUPERUSER PASSWORD \'%s\';"' % (app_config.database['PGUSER'], app_config.database['PGPASSWORD'])) with shell_env(**app_config.database): local('createdb %s' % app_config.database['PGDATABASE']) if env.get('settings'): execute('servers.start_service', 'uwsgi')
def _monitor_neighbor(self): with hide('running', 'stdout'): while True: eventlet.sleep(1) try: results = {} cmd = 'gobgp -j neighbor -u {0}'.format(self.mgmt_addr) output = local(cmd, capture=True) ret = json.loads(output) for i in range(len(ret)): addr = ret[i]['conf']['remote_ip'] state = ret[i]['info']['bgp_state'] results[addr] = state change_result_list = self._extract_change_state(results) if change_result_list != []: result_queue.put(change_result_list) except: continue
def request_spot_instances( price=0.01, zone='us-west-2a', inst_type=INSTANCE_TYPE, inst_id="ami-9abea4fb"): ' request spot instances ' # ami-9abea4fb - ubuntu-trusty-14.04-amd64-server launch_specification = ''' {{ "ImageId": "{}", "KeyName": "ubuntu_trusty", "SecurityGroupIds": [ "sg-94a671f3" ], "InstanceType": "{}" }} '''.format(inst_id, inst_type) ls_encode = launch_specification.replace('\n', '') ls_encode = ls_encode.replace('"', '\\"') aws_cmd = 'aws ec2 request-spot-instances --spot-price "{}" --instance-count 1 --type "one-time" --availability-zone-group {} --launch-specification "{}"' aws_cmd = aws_cmd.format(price, zone, ls_encode) jq_cmd = 'jq -c -C "."' with hide("running"): local('|'.join([aws_cmd, jq_cmd]))
def tar_archive(name, path, remote=True): ''' Compress the source path into a tar archive. ''' cmd = 'tar -czvf {} {}'.format(name, path) with hide('stdout'): runner.run(cmd, remote=remote)
def glob(path, remote=True): ''' Glob a directory path to get the list of files. ''' with hide('everything'): return runner.run('ls -1 {}'.format(path), remote=remote).split()
def load_history(): ''' Load build history. ''' with hide('everything'): data = fs.read_remote_file(get_builds_file()) return json.loads(data)
def check(): ''' Check the current remote branch and the last commit. ''' with hide('running'): # Show the current branch remote_branch = git.current_branch() remote_print('Branch: {}'.format(remote_branch)) # Show the last commit git.show_last_commit()
def last_commit(remote=True, short=False): ''' Get the last commit of the git repository. Note: This assumes the current working directory (on remote or local host) to be a git repository. So, make sure current directory is set before using this. ''' cmd = 'git rev-parse{}HEAD'.format(' --short ' if short else ' ') with hide('everything'): result = run(cmd) if remote else local(cmd, capture=True) return result.strip()
def host_print(msg, remote=True, leading_chars='\n'): ''' Print a raw message on the host. ''' cmd = 'echo "{0}{1}"'.format(leading_chars, msg) with hide('running'): if remote: _run(cmd) else: _local(cmd)
def stop_mysql(self): with settings(hide('running', 'stdout')): result = local('service mysql stop') return result.return_code == 0, "stop_mysql"
def start_mysql(self): with settings(hide('running', 'stdout')): return local('service mysql start --skip-slave-start').return_code == 0, "start_mysql"
def failover(self, *args, **kwargs): cred_file = self.config.get('failover_creds', '/etc/mysql/failover.cnf') master = kwargs.get('master_host') if not master: return False, "No master_host given" with settings(hide('running')): return local("/usr/bin/mysqlmaster.py switch --new-master {} --defaults-extra-file={} " "--dead-master --assume-yes".format(master, cred_file)).return_code == 0, ""
def run_remote_command(host_string, command, timeout=NODE_COMMAND_TIMEOUT, jsonresult=False, catch_exitcodes=None): """Executes command on remote host via fabric run. Optionally timeout may be specified. If result of execution is expected in json format, then the output will be treated as json. """ with settings(hide(NODE_STATUSES.running, 'warnings', 'stdout', 'stderr'), host_string=host_string, warn_only=True): return execute_run(command, timeout=timeout, jsonresult=jsonresult, catch_exitcodes=catch_exitcodes)
def compare_tar_against_git(): """ Compare the contents of the tarball against git ls-files """ with hide("commands"): with cd("/home/vagrant/repos/sympy"): git_lsfiles = set([i.strip() for i in run("git ls-files").split("\n")]) tar_output_orig = set(show_files('source', print_=False).split("\n")) tar_output = set() for file in tar_output_orig: # The tar files are like sympy-0.7.3/sympy/__init__.py, and the git # files are like sympy/__init__.py. split_path = full_path_split(file) if split_path[-1]: # Exclude directories, as git ls-files does not include them tar_output.add(os.path.join(*split_path[1:])) # print tar_output # print git_lsfiles fail = False print() print(blue("Files in the tarball from git that should not be there:", bold=True)) print() for line in sorted(tar_output.intersection(git_whitelist)): fail = True print(line) print() print(blue("Files in git but not in the tarball:", bold=True)) print() for line in sorted(git_lsfiles - tar_output - git_whitelist): fail = True print(line) print() print(blue("Files in the tarball but not in git:", bold=True)) print() for line in sorted(tar_output - git_lsfiles - tarball_whitelist): fail = True print(line) if fail: error("Non-whitelisted files found or not found in the tarball")
def ssh(self, command, use_sudo=False, quiet=False, **kwargs): """ Executes a shell command through ssh :param command: The command to be executed :param use_sudo: If True, it runs as sudo :param quiet: Whether to hide the stdout/stderr output or not :return: The fabric equivalent of run and sudo :raise: SshError: If the command fails """ if self._vm_object: self._wait_for_ssh_service( kwargs['vcdriver_vm_ssh_username'], kwargs['vcdriver_vm_ssh_password'] ) with fabric_context( self.ip(), kwargs['vcdriver_vm_ssh_username'], kwargs['vcdriver_vm_ssh_password'] ): if use_sudo: runner = sudo else: runner = run if quiet: with hide('everything'): result = runner(command) else: result = runner(command) if result.failed: raise SshError(command, result.return_code, result.stdout) return result
def winrm(self, script, winrm_kwargs=dict(), quiet=False, **kwargs): """ Executes a remote windows powershell script :param script: A string with the powershell script :param winrm_kwargs: The pywinrm Protocol class kwargs :param quiet: Whether to hide the stdout/stderr output or not :return: A tuple with the status code, the stdout and the stderr :raise: WinRmError: If the command fails """ if self._vm_object: self._wait_for_winrm_service( kwargs['vcdriver_vm_winrm_username'], kwargs['vcdriver_vm_winrm_password'], **winrm_kwargs ) winrm_session = self._open_winrm_session( kwargs['vcdriver_vm_winrm_username'], kwargs['vcdriver_vm_winrm_password'], winrm_kwargs ) if not quiet: print('Executing remotely on {} ...'.format(self.ip())) styled_print(Style.DIM)(script) status, stdout, stderr = self._run_winrm_ps(winrm_session, script) if not quiet: styled_print(Style.BRIGHT)('CODE: {}'.format(status)) styled_print(Fore.GREEN)(stdout) if status != 0: if not quiet: styled_print(Fore.RED)(stderr) raise WinRmError(script, status, stdout, stderr) else: return status, stdout, stderr
def list_inner_scopes(root_dir, version): with cd('{}/{}'.format(root_dir, version)), hide('running', 'stdout'): result = run('''find ./ -mindepth 1 -maxdepth 1 -type d -print''') return [each.lstrip('./') for each in result.splitlines()]
def list_existed_versions(root_dir): """ ??????????????? """ with cd(root_dir), hide('running', 'stdout'): result = run('''( find ./ -mindepth 1 -maxdepth 1 -type d -print |grep --color=never -E '[0-9]+(\.[0-9]+){3}\\b' ) || echo "no_version_found"''') if result == "no_version_found": return [] else: return [each.lstrip('./') for each in result.splitlines()]
def list_existed_diff_packages(version_dir): with cd(version_dir), hide('running', 'stdout'): result = run('''( find ./ -mindepth 1 -maxdepth 1 -type f -print |grep --color=never -E '[0-9]+(\.[0-9]+){3}$' ) || echo "no_diff_package_found"''') if result == "no_diff_package_found": return [] else: return [each.lstrip('./') for each in result.splitlines()]
def check_local_merge_scripts(local_dir): scripts = ['clear_small_user.sql', 'db.yml', 'forceId.sql', 'hf.py', 'hf_reward.sql', 'table.yml'] with settings(hide('everything')): with lcd(local_dir): for each_file in scripts: local('test -f {}'.format(each_file)) for replace_str in ['first_force_id', 'second_force_id', 'third_force_id']: local('grep {} forceId.sql >/dev/null'.format(replace_str)) local('grep "db: db1" db.yml') local('grep "db: db2" db.yml')
def rsync_to_backup(game, region): print("??????????????...") sys.stdout.flush() time.sleep(20) config = ConfigReader(game, region) rsync_module = config.get("rsync_module") rsync_root = config.get("rsync_root") rsync_backup_ip = config.get("rsync_backup_ip") if rsync_module == "" or rsync_root == "" or rsync_backup_ip == "" : raise Exception('rsync config is not proper in the game config file') with cd(rsync_root), settings(user='root'), hide("stdout"): run('''rsync -art -R --delete --out-format="%n" --password-file=/etc/rsyncd.secret ./ {}::{}'''.format(rsync_backup_ip, rsync_module))
def rsync_to_backup(game, region): print("??????????????...") sys.stdout.flush() time.sleep(30) config = ConfigReader(game, region) rsync_module = config.get("rsync_module") rsync_root = config.get("rsync_root") rsync_backup_ip = config.get("rsync_backup_ip") if rsync_module == "" or rsync_root == "" or rsync_backup_ip == "" : raise Exception('rsync config is not proper in the game config file') with cd(rsync_root), settings(user='root'), hide("stdout"): run('''rsync -art -R --delete --out-format="%n" --password-file=/etc/rsyncd.secret ./ {}::{}'''.format(rsync_backup_ip, rsync_module)) #for i in range(3): # with cd(rsync_root), settings(user='root'), hide("stdout"): # out = run('''rsync -art -R --dry-run --delete --out-format="%n" --password-file=/etc/rsyncd.secret ./ {}::{}'''.format(rsync_backup_ip, rsync_module), timeout=120) # # if out.strip() != "": # print("??????????????, ??60s?????...") # sys.stdout.flush() # time.sleep(60) # else: # print("??????!") # break #else: # print("[WARNING]: ?????????, 30s??????version.lua, ????????!!!!!!!") # sys.stdout.flush() # time.sleep(30)
def exec_sql(game, language, backstage_db, backstageip, sql ): with settings(hide('running', 'stdout', 'stderr'),host_string=backstageip): return run('pandora -e "use {};{}"'.format(backstage_db,sql ))
def clean(name, ask=True, **kwargs): """ Remove an experiment. :param name: experiment name (or absolute path to experiment) :param ask: ask confirmation :param kwargs: simulation keyword arguments (see the documentation for more information) """ path = kwargs.get('path') console = kwargs.get('console') if console is None or not any([i['name'] == name and i['status'] == 'PENDING' for i in console.tasklist.values()]): logger.debug(" > Cleaning folder...") with hide(*HIDDEN_ALL): local("rm -rf {}".format(path)) return "Cleaned"
def cooja(name, with_malicious=True, **kwargs): """ Start an experiment in Cooja with/without the malicious mote and updates the experiment if motes' positions were changed. :param name: experiment name :param with_malicious: use the simulation WITH the malicious mote or not :param path: expanded path of the experiment (dynamically filled in through 'command' decorator with 'expand') :param kwargs: simulation keyword arguments (see the documentation for more information) """ sim_path = join(kwargs['path'], 'with{}-malicious'.format(['out', ''][with_malicious is True])) motes_before = get_motes_from_simulation(join(sim_path, 'simulation.csc'), as_dictionary=True) with hide(*HIDDEN_ALL): with lcd(sim_path): local("make cooja TASK={}".format(kwargs.get('task', "cooja"))) motes_after = get_motes_from_simulation(join(sim_path, 'simulation.csc'), as_dictionary=True) # if there was a change, update the other simulation in this experiment if len(set(motes_before.items()) & set(motes_after.items())) > 0: other_sim_path = join(kwargs['path'], 'with{}-malicious'.format(['', 'out'][with_malicious is True])) set_motes_to_simulation(join(other_sim_path, 'simulation.csc'), motes_after) # if this experiment is part of a campaign, update this campaign = read_config(kwargs['path']).get('campaign') if campaign is not None: for experiment in get_experiments(campaign): if experiment in ['BASE', name]: continue exp_path = join(EXPERIMENT_FOLDER, experiment) set_motes_to_simulation(join(exp_path, 'with-malicious', 'simulation.csc'), motes_after) set_motes_to_simulation(join(exp_path, 'without-malicious', 'simulation.csc'), motes_after)
def update(silent=False, **kwargs): """ Update Contiki-OS and RPL Attacks Framework. :param silent: run command silently :param kwargs: simulation keyword arguments (see the documentation for more information) """ for folder, repository in zip([CONTIKI_FOLDER, FRAMEWORK_FOLDER], ["Contiki-OS", "RPL Attacks Framework"]): with hide(*HIDDEN_ALL): with lcd(folder): if "Could not resolve proxy" in local('git fetch --all', capture=True): logger.error("Update failed ; please check your proxy settings") break uptodate = "branch is up-to-date" in local('git checkout master', capture=True).strip().split('\n')[-1] if not uptodate: req_exists = exists("requirements.txt") if req_exists: req_md5 = hash_file("requirements.txt") logger.warn("You are about to loose any custom change made to {} ;".format(repository)) if silent or std_input("Proceed anyway ? (yes|no) [default: no] ", 'yellow') == 'yes': local('git submodule update --init') local('git fetch --all') local('git reset --hard origin/master') local('git pull') if req_exists and hash_file("requirements.txt") != req_md5: local('pip install -r requirements.txt') if repository == "RPL Attacks Framework": remove_files(folder, "Vagrantfile") remove_folder(join(folder, "provisioning")) logger.debug(" > {} {}".format(repository, ["updated", "already up-to-date"][uptodate])) if not silent: logger.warn("Restarting the framework...") restart(PIDFILE)
def make_incremental_build(name, basis="live"): """ Archive a build, hard-linking unchanged files from the "basis" build (default live) This can significantly reduce the disk space used by multiple builds. On mac, requires ``brew install coreutils`` """ cp_bin = find_executable('gcp') if find_executable('gcp'): cp_bin = "gcp" else: cp_bin = "cp" live_name = dealias_build("_live") stage_name = dealias_build("_stage") if name in (live_name, stage_name): abort("Cannot turn the live or stage build into an incremental build") basis = dealias_build(basis) stop(name) with lcd("%s/deploy/builds" % ROOT_DIR): local("mv %(name)s %(name)s~" % {'name': name}) with settings(hide("stderr"), warn_only=True): cp_ret = local("%(cp_bin)s -al %(basis)s %(name)s" % { 'basis': basis, 'name': name, 'cp_bin': cp_bin, }) if not cp_ret.succeeded: local("mv %(name)s~ %(name)s" % {'name': name}) abort("Local cp bin does not support -l flag (on mac: brew install coreutils)") local("rsync -acH --delete %(name)s~/ %(name)s" % {'name': name}) local("rm -rf %(name)s~" % {'name': name})
def exist(self): with settings(warn_only=True): with hide('commands'): result = local('systemctl list-unit-files', capture=True) for line in result.split('\n'): if line.startswith(self.name): return True return False
def install_mysql(): with settings(hide('warnings', 'stderr'), warn_only=True): result = sudo('dpkg-query --show mysql-server') if result.failed is False: warn('MySQL is already installed') return mysql_password = prompt('Please enter MySQL root password:') sudo('echo "mysql-server-5.5 mysql-server/root_password password ' \ '%s" | debconf-set-selections' % mysql_password) sudo('echo "mysql-server-5.5 mysql-server/root_password_again password ' \ '%s" | debconf-set-selections' % mysql_password) apt_get_install('mysql-server')
def spot_price_history(inst_type=INSTANCE_TYPE, region='us-west-2'): ' spot price history ' now = datetime.datetime.utcnow() start_time = '{:%Y-%m-%dT%H:00:00}'.format(now) aws_cmd = 'aws ec2 describe-spot-price-history --start-time {} --product "Linux/UNIX" --instance-type "{}"' aws_cmd = aws_cmd.format(start_time, inst_type) jq_cmd = 'jq -c -C ".SpotPriceHistory[] | {SpotPrice, AvailabilityZone, InstanceType }"' with hide("running"): os.environ['AWS_DEFAULT_REGION'] = region local('|'.join([aws_cmd, jq_cmd]))
def regions(): ' get list of regions ' aws_cmd = 'aws ec2 describe-regions' jq_cmd = 'jq -c -C ".Regions[]"' with hide("running"): local('|'.join([aws_cmd, jq_cmd]))
def run_instances(): ' run instances ' aws_cmd = 'aws ec2 run-instances --image-id ami-9abea4fb --count 1 --instance-type m3.medium --key-name ubuntu_trusty --security-group-ids ssh-ip-only' jq_cmd = 'jq -c -C "."' with hide("running"): local('|'.join([aws_cmd, jq_cmd]))
def describe_instances(): ' describe instances ' aws_cmd = 'aws ec2 describe-instances' jq_cmd = 'jq -c -C ".Reservations[].Instances[]| {InstanceType, KeyName, State: .State.Name, PublicIpAddress, InstanceId, ImageId}"' with hide("running"): local('|'.join([aws_cmd, jq_cmd]))
def cancel_spot_instance_requests(request_id): ' cancel spot instance requests ' aws_cmd = 'aws ec2 cancel-spot-instance-requests --spot-instance-request-ids {}'.format(request_id) jq_cmd = 'jq -c -C ".CancelledSpotInstanceRequests[] | {State, RequestId:.SpotInstanceRequestId}"' with hide("running"): local('|'.join([aws_cmd, jq_cmd])) # local(aws_cmd)
def terminate_instances(inst_id): ' terminate instances ' aws_cmd = 'aws ec2 terminate-instances --instance-ids {}' aws_cmd = aws_cmd.format(inst_id) with hide("running"): local(aws_cmd)
def storm_list(): ' list ssh config ' cmd = 'storm list' with hide("running"): local(cmd)
def storm_delete(name): ' delete ssh config by name ' cmd = 'storm delete {}'.format(name) with hide("running"): local(cmd)
def install_mongodb(*args, **kwargs): version = kwargs.get('version') or '3.2.6' home = run('echo $HOME') port = kwargs.get('port') base_dir = kwargs.get('base_dir') or os.path.join(home, 'mongodb') data_dir = os.path.join(base_dir, 'data') data_backup_dir = os.path.join(home, 'mongodb_data_bak') if not port: print('Enter a local port for the mongodb server') return 1 if exists(data_dir): if exists(data_backup_dir): print('Backup directory %s already exists.' % data_backup_dir) return 1 else: run('mv %s %s' % (data_dir, data_backup_dir)) if exists(base_dir): run('rm -rf %s' % (base_dir)) LONG_BIT = run('getconf LONG_BIT') if LONG_BIT == '64': package_name = 'mongodb-linux-x86_64-%s.tgz' % version else: package_name = 'mongodb-linux-i686-%s.tgz' % version package_path = os.path.join(home, package_name) if not exists(package_path): with hide('output'): run('wget https://fastdl.mongodb.org/linux/%s -O %s' % (package_name, package_path)) if not exists(package_path.replace('.tgz', '')): run('tar xzf ' + package_path) run('mv %s %s' % (package_path.replace('.tgz', ''), base_dir)) if exists(data_backup_dir): run('mv %s %s' % (data_backup_dir, data_dir)) else: run('mkdir -p %s/db' % data_dir) base_conf = [ 'port = %s' % port, ] spec_conf = [ 'bind_ip = 127.0.0.1', 'logappend = True', 'journal = true', 'nohttpinterface = true', ] load_config( os.path.join(base_dir, 'mongodb.conf'), base_conf=base_conf, spec_conf=spec_conf, delimiter='= ' ) if not exists('~/init'): run('mkdir ~/init') run('wget https://templates.wservices.ch/mongodb/init.%s -O ~/init/mongodb' % LONG_BIT) sed('~/init/mongodb', 'DAEMON=/usr/bin/mongod', 'DAEMON=$HOME/mongodb/bin/mongod') run('chmod 750 ~/init/mongodb') run('~/init/mongodb start')