我们从Python开源项目中,提取了以下50个代码示例,用于说明如何使用fabric.api.lcd()。
def style(): """Use flake8 to check Python style, PEP8, and McCabe complexity. See http://pypi.python.org/pypi/flake8/ .. note:: * Files with the following header are skipped:: # flake8: noqa * Lines that end with a ``# NOQA`` comment will not issue a warning. """ with lcd(_relative_to_fabfile('rq_dashboard')): local( 'flake8 ' '--exclude=".svn,CVS,.bzr,.hg,.git,__pycache__,._*" ' '--max-complexity=9 .')
def check_config(use_config=None): if use_config: with lcd(codedir): local('cp config.{}.py config.py'.format(use_config)) try: config = imp.load_source('config', os.path.join(codedir, 'config.py')) except IOError: error('config.py not found. Did you create it by copying config.example.py?') try: config_example = imp.load_source('config_example', os.path.join(codedir, 'config.example.py')) except IOError: error('config.example.py not found. Did you remove it?') if config.signing_key == config_example.signing_key: error('You need to change the signing key to your own unique text.') if config.s3_bucket == config_example.s3_bucket: error('You need to change the s3 bucket name to a bucket you control.') puts('Your config.py appears to be set up.') return config
def prebuild(build_dir='/tmp/build_spacy'): if file_exists(build_dir): shutil.rmtree(build_dir) os.mkdir(build_dir) spacy_dir = path.dirname(__file__) wn_url = 'http://wordnetcode.princeton.edu/3.0/WordNet-3.0.tar.gz' build_venv = path.join(build_dir, '.env') with lcd(build_dir): local('git clone %s .' % spacy_dir) local('virtualenv ' + build_venv) with prefix('cd %s && PYTHONPATH=`pwd` && . %s/bin/activate' % (build_dir, build_venv)): local('pip install cython fabric fabtools pytest') local('pip install --no-cache-dir -r requirements.txt') local('fab clean make') local('cp -r %s/corpora/en/wordnet corpora/en/' % spacy_dir) local('PYTHONPATH=`pwd` python bin/init_model.py en lang_data corpora spacy/en/data') local('PYTHONPATH=`pwd` fab test') local('PYTHONPATH=`pwd` python -m spacy.en.download --force all') local('PYTHONPATH=`pwd` py.test --models spacy/tests/')
def vulture(): """ try to find dead code paths """ with api.quiet(): if not api.local('which vulture').succeeded: print 'vulture not found, installing it' api.local('pip install vulture') ignore_functions_grep = 'egrep -v "{0}"'.format( '|'.join(VULTURE_IGNORE_FUNCTIONS)) excluded = ",".join(VULTURE_EXCLUDE_PATHS) excluded_paths = (' --exclude ' + excluded) if excluded else '' vulture_cmd = '\n vulture {pkg_name}{exclude}{pipes}' vulture_cmd = vulture_cmd.format( pkg_name=PKG_NAME, exclude=excluded_paths, pipes='|'.join(['', ignore_functions_grep])) changedir = api.lcd(os.path.dirname(__file__)) warn_only = api.settings(warn_only=True) be_quit = api.hide('warnings') with contextlib.nested(changedir, warn_only, be_quit): result = api.local(vulture_cmd, capture=True) exit_code = result.return_code print result.strip() raise SystemExit(exit_code)
def versions(**kwargs): """ Check versions of Contiki-OS and RPL Attacks Framework. :param kwargs: simulation keyword arguments (see the documentation for more information) """ with hide(*HIDDEN_ALL): with lcd(CONTIKI_FOLDER): cversion = local('git --git-dir .git describe --tags --always', capture=True) logger.warn("Contiki-OS: {}".format(cversion)) with lcd(FRAMEWORK_FOLDER): cversion = local('git --git-dir .git describe --tags --always', capture=True) logger.warn("RPL Attacks Framework: {}".format(cversion)) # **************************************** MAGICAL COMMANDS ***************************************
def lcd_task(task, cd_to): """ wrap a function so that it will be executed in a specific directory on the host machine uses `fabric.api.lcd(dest)` the returned function will have the same __name__ and __doc__ as the input `task` :param task: the function to execute :param cd_to: the directory in which to execute :return: a function that will be executed in `cd_to` """ def func(*args, **kwargs): with lcd(cd_to): task(*args, **kwargs) func.__name__ = task.__name__ func.__doc__ = task.__doc__ return func
def sync_src(): get_vars() with fab.lcd('..'): destination = '/home/%s/senic-hub' % AV['build_user'] fab.sudo('mkdir -p %s' % destination, user=AV['build_user']) rsync( '-rlptvD', '--exclude', '.*', '--exclude', '*.egg-info', '--exclude', '__pycache__', '--exclude', 'node_modules', '--exclude', '/build', '--exclude', '/development', '--exclude', '/dist', '--exclude', '/docs', '--exclude', '/venv', '.', '{host_string}:%s' % destination)
def deploy(): """ Deploys the latest rev for use """ env.currentrev = _release_name() env.app_shortname = env.appname[:3].upper() env.releasepath = env.app_shortname + "-" + env.currentrev[:12] print cyan("Release: %s" % env.releasepath) print red("Deployment user: %s" % env.user) pkg_health = _pkg_check() if pkg_health: abort("One or more required system packages are unavailable.\n\nTry 'fab bootstrap'ing this system or installing the required package(s) by hand.") _prep_deploy_dest() with lcd(env.deploydir): _setup_local_virtualenv(env.virtualenv_dir) _copy_release(env.releasepath) _deploy_appconf() _symlink_release(env.releasepath, env.app_shortname)
def configure_nginx(): """Configure nginx. Installs nginx config for the application. """ # copy configuration with lcd(local_config_dir): with cd('/etc/nginx/sites-available'): put('./nginx.conf', './{}.conf'.format(app_name), use_sudo=True) # enable configuration if exists('/etc/nginx/sites-enabled/{}.conf'.format(app_name)) is False: sudo('ln -s /etc/nginx/sites-available/{}.conf'.format(app_name) + ' /etc/nginx/sites-enabled/{}.conf'.format(app_name)) # reload configuration sudo('service nginx reload') # END BOOTSTRAPING HELPERS # DEPLOYMENT HELPERS
def todo(*args): """List the TODOs and FIXMEs in the code and documentation.""" with lcd(_relative_to_fabfile()): local( 'grin -e ".pyc,.pyo" "FIXME|TODO" *') local( 'grind -0 \'*.feature\' | ' 'grin -I \'*.feature\' -0 -f - "FIXME|TODO"')
def isort(): """Use isort to automatically (re-)order the import statements on the top of files""" with lcd(_relative_to_fabfile()): local('isort **/*.py')
def clean(): """Remove all generated files (.pyc, .coverage, .egg, etc).""" with lcd(_relative_to_fabfile()): local('find -name "*.pyc" | xargs rm -f') local('find -name .coverage | xargs rm -f') local('find -name .DS_Store | xargs rm -f') # Created by OSX local('find -name ._DS_Store | xargs rm -f') # Created by OSX local('find -name "._*.*" | xargs rm -f') # E.g. created by Caret local('rm -f .coverage.*') local('rm -rf build') local('rm -rf dist')
def upload(index_server='pypitest'): """Submit build package to index server as found in `~/.pypirc`. The default is to PyPI test. Typically `~/.pypirc` will contain: [distutils] index-servers= pypi pypitest [pypitest] repository = https://testpypi.python.org/pypi username = <username> password = <password> [pypi] repository = https://pypi.python.org/pypi username = <username> password = <password> """ _abort_if_tag_is_not_at_head() with lcd(_relative_to_fabfile()): # TODO switch to twine once the following bug has been fixed: # https://bugs.launchpad.net/pkginfo/+bug/1437570 local( 'python setup.py sdist bdist_wheel upload ' ' -r {} --show-response'.format(index_server) )
def local_git_clone(repo_url): "clones master of repo_url" with lcd(LOGDIR): local('if [ -d letsencrypt ]; then rm -rf letsencrypt; fi') local('git clone %s letsencrypt'% repo_url) local('tar czf le.tar.gz letsencrypt')
def local_git_branch(repo_url, branch_name): "clones branch <branch_name> of repo_url" with lcd(LOGDIR): local('if [ -d letsencrypt ]; then rm -rf letsencrypt; fi') local('git clone %s letsencrypt --branch %s --single-branch'%(repo_url, branch_name)) local('tar czf le.tar.gz letsencrypt')
def local_git_PR(repo_url, PRnumstr, merge_master=True): "clones specified pull request from repo_url and optionally merges into master" with lcd(LOGDIR): local('if [ -d letsencrypt ]; then rm -rf letsencrypt; fi') local('git clone %s letsencrypt'% repo_url) local('cd letsencrypt && git fetch origin pull/%s/head:lePRtest'%PRnumstr) local('cd letsencrypt && git checkout lePRtest') if merge_master: local('cd letsencrypt && git remote update origin') local('cd letsencrypt && git merge origin/master -m "testmerge"') local('tar czf le.tar.gz letsencrypt')
def local_repo_to_remote(): "copies local tarball of repo to remote" with lcd(LOGDIR): put(local_path='le.tar.gz', remote_path='') run('tar xzf le.tar.gz')
def local_repo_clean(): "delete tarball" with lcd(LOGDIR): local('rm le.tar.gz')
def setup_virtualenv(): with lcd(basepath): local(''' virtualenv --no-site-packages --distribute .env \ && source .env/bin/activate \ && pip install -r requirements.txt '''.format( basepath=basepath, ))
def download(corpus_dir, out_file, lang, date=DEFAULT_DATE): data_dir = corpus_dir if exists(join(data_dir, out_file)): return url = DUMP_URL.format(lang=lang, date=date) local("mkdir -p {dir}".format(dir=data_dir)) with lcd(data_dir): local("wget {}".format(url)) local("mv {} {}".format(DUMP_FILE.format(lang=lang, date=date), out_file)) # @task
def install_brown(path=BROWN_DIR): local("mkdir -p {}".format(path)) local("git clone https://github.com/percyliang/brown-cluster.git ./{}".format(path)) with lcd(path): local("make")
def make(): with virtualenv(VENV_DIR): with lcd(path.dirname(__file__)): local('pip install cython') local('pip install murmurhash') local('pip install -r requirements.txt') local('python setup.py build_ext --inplace')
def clean(): with lcd(path.dirname(__file__)): local('python setup.py clean --all')
def train(json_dir=None, dev_loc=None, model_dir=None): if json_dir is None: json_dir = 'corpora/en/json' if model_dir is None: model_dir = 'models/en/' with virtualenv(VENV_DIR): with lcd(path.dirname(__file__)): local('python bin/init_model.py en lang_data/ corpora/ ' + model_dir) local('python bin/parser/train.py -p en %s/train/ %s/development %s' % (json_dir, json_dir, model_dir))
def wheels(): """ Remotely build python binaries on image-factory server """ with lcd(env.local_path): put('./requirements.txt', '/srv/build/wheel_requirements.txt') put('./etc/base_image/image_requirements.txt', '/srv/build/requirements.txt') with cd('/srv/build/wheelhouse'): run('rm -rf *.whl') compose(cmd='-f service.yml -p %s run --rm wheel-factory' % env.project_name, path='/srv/build')
def build_base(): """ Remotely build base python image with all installed packages on image-factory server """ with lcd(env.local_path): put('./requirements.txt', '/srv/build/requirements.txt') with cd('/srv/build'): run('docker build -t {base_image_name} .'.format( base_image_name=env.base_image_name, ))
def update_css(): """ [dev] Recompile CSS """ with lcd('react-theme/css'): local('pwd') local('sass theme.scss > compiled/endaga.css') local('cp compiled/endaga.css ../../endagaweb/static/css/endaga.css')
def prepdeploy(): """ [deploy] Create deploy package and push to S3 """ local('mkdir -p /tmp/deploydir') pkg_version = package() pkg_file = "endagaweb_%s_all.deb" % pkg_version local('mv %s /tmp/deploydir/endagaweb_all.deb' % pkg_file) local('cp -pr configs/deployment/scripts /tmp/deploydir/.') local('cp -pr configs/deployment/appspec.yml /tmp/deploydir/.') with lcd('/tmp/deploydir'): local('zip endagaweb_%s appspec.yml endagaweb_all.deb scripts/*' % (pkg_version)) local('aws s3 cp endagaweb_%s.zip s3://endagaweb-deployment/' % pkg_version) local('rm -r /tmp/deploydir') puts("Deployment bundle: s3://endagaweb-deployment/endagaweb_%s.zip" % pkg_version) return "endagaweb_%s.zip" % pkg_version
def dev(): with api.lcd('../client'): client_fabfile.dev()
def integrate(): with lcd("../django_ecommerce/"): local("pwd") local("./manage.py test ../tests/unit") with settings(warn_only=True): local("git add -p && git commit") local("git pull") local("./manage.py test ../tests") local("git push")
def release(force=False): """ releases the master branch at the current version to pypi """ with api.lcd(THIS_DIR): return _release(force=force)
def _hotswap(file, type, keywords): file_with_full_path = '/app/online/{}{}'.format(GAME, file) file_path = os.path.dirname(file_with_full_path) #local('rsync -aqP {}/{{hotswap.zip,md5.txt}} {}@{}:{}/'.format(file_path, env.user, env.host_string, REMOTE_DIR)) run('mkdir -p {}'.format(REMOTE_DIR)) with lcd(file_path): put('hotswap.zip', REMOTE_DIR) put('md5.txt', REMOTE_DIR) with cd(REMOTE_DIR): run('dos2unix md5.txt && md5sum -c md5.txt') run('unzip -q hotswap.zip') run('cd hotswap && chmod +x attach remote update') ret_value = {} for gameServer in LOCATE_GAME_SRVS[env.host_string]: with cd('/app/{}/backend/logs'.format(gameServer)): run('echo >start.out') with cd('{}/hotswap'.format(REMOTE_DIR)): pid = gameServer_pid(gameServer) run('./{} {}'.format(type, pid)) with cd('/app/{}/backend/logs'.format(gameServer)): for each_keyword in keywords.split(','): with quiet(): do_hotswap_success = run('grep --color=never -E -A 20 "reload.*{}" {} | grep --color=never "reload succ"'.format(each_keyword, 'start.out')).succeeded if not do_hotswap_success: ret_value[gameServer] = False break else: ret_value[gameServer] = True return ret_value
def check_local_merge_scripts(local_dir): scripts = ['clear_small_user.sql', 'db.yml', 'forceId.sql', 'hf.py', 'hf_reward.sql', 'table.yml'] with settings(hide('everything')): with lcd(local_dir): for each_file in scripts: local('test -f {}'.format(each_file)) for replace_str in ['first_force_id', 'second_force_id', 'third_force_id']: local('grep {} forceId.sql >/dev/null'.format(replace_str)) local('grep "db: db1" db.yml') local('grep "db: db2" db.yml')
def add_base_hosts_for_ssh(self): with lcd('/app/opbin/krb_hosts'): result = local('''grep -nE '[0-9]{1,3}(\.[0-9]{1,3}){3} +%s_match_%s' base_hosts''' % (GAME, self.id - 1)) lines = result.splitlines() if len(lines) == 1: rowNum, line = lines[0].split(':', 1) #??????? run('cp base_hosts bak/base_hosts.rb{}'.format(TIME)) run("sed -i '{}a {} {}' base_hosts".format(rowNum, self.ip, self.matchServer)) else: print('[WARNING] Failed to add_base_hosts_for_ssh, can NOT locate a proper postion to add the new match entry, because there are more than one entry for {}_match_{}'.format(GAME, self.id - 1))
def upload_to_resource_server(game, file): dir, filename = os.path.split(file) resource_dir = '/app/www/{}/{}/{}'.format(game, RELEASE_TYPE, TIMESTAMP) resource_ip = gameOption('www_ssh_ip') execute(mk_remote_dir, resource_dir, hosts=[resource_ip]) with lcd(dir), settings(host_string=resource_ip): put(filename, resource_dir) put('md5.txt', resource_dir) #local('{} {}/{{{},md5.txt}} {}:{}/'.format(RSYNC, dir, filename, resource_ip, resource_dir))
def clean(): with lcd(os.path.dirname(__file__)): # local("python3.6 setup.py clean --all") local("find . | grep -E \"(__pycache__|\.pyc$)\" | xargs rm -rf") local("rm -rf ./docs/build || true") local("rm -rf ./docs/source/reference/_autosummary || true")
def docs(): with lcd("./docs"): local("make clean") local("make html")
def cooja(name, with_malicious=True, **kwargs): """ Start an experiment in Cooja with/without the malicious mote and updates the experiment if motes' positions were changed. :param name: experiment name :param with_malicious: use the simulation WITH the malicious mote or not :param path: expanded path of the experiment (dynamically filled in through 'command' decorator with 'expand') :param kwargs: simulation keyword arguments (see the documentation for more information) """ sim_path = join(kwargs['path'], 'with{}-malicious'.format(['out', ''][with_malicious is True])) motes_before = get_motes_from_simulation(join(sim_path, 'simulation.csc'), as_dictionary=True) with hide(*HIDDEN_ALL): with lcd(sim_path): local("make cooja TASK={}".format(kwargs.get('task', "cooja"))) motes_after = get_motes_from_simulation(join(sim_path, 'simulation.csc'), as_dictionary=True) # if there was a change, update the other simulation in this experiment if len(set(motes_before.items()) & set(motes_after.items())) > 0: other_sim_path = join(kwargs['path'], 'with{}-malicious'.format(['', 'out'][with_malicious is True])) set_motes_to_simulation(join(other_sim_path, 'simulation.csc'), motes_after) # if this experiment is part of a campaign, update this campaign = read_config(kwargs['path']).get('campaign') if campaign is not None: for experiment in get_experiments(campaign): if experiment in ['BASE', name]: continue exp_path = join(EXPERIMENT_FOLDER, experiment) set_motes_to_simulation(join(exp_path, 'with-malicious', 'simulation.csc'), motes_after) set_motes_to_simulation(join(exp_path, 'without-malicious', 'simulation.csc'), motes_after)
def test(**kwargs): """ Run framework's tests. :param kwargs: simulation keyword arguments (see the documentation for more information) """ with settings(warn_only=True): print(FRAMEWORK_FOLDER) with lcd(FRAMEWORK_FOLDER): local("python -m unittest -v tests")
def update(silent=False, **kwargs): """ Update Contiki-OS and RPL Attacks Framework. :param silent: run command silently :param kwargs: simulation keyword arguments (see the documentation for more information) """ for folder, repository in zip([CONTIKI_FOLDER, FRAMEWORK_FOLDER], ["Contiki-OS", "RPL Attacks Framework"]): with hide(*HIDDEN_ALL): with lcd(folder): if "Could not resolve proxy" in local('git fetch --all', capture=True): logger.error("Update failed ; please check your proxy settings") break uptodate = "branch is up-to-date" in local('git checkout master', capture=True).strip().split('\n')[-1] if not uptodate: req_exists = exists("requirements.txt") if req_exists: req_md5 = hash_file("requirements.txt") logger.warn("You are about to loose any custom change made to {} ;".format(repository)) if silent or std_input("Proceed anyway ? (yes|no) [default: no] ", 'yellow') == 'yes': local('git submodule update --init') local('git fetch --all') local('git reset --hard origin/master') local('git pull') if req_exists and hash_file("requirements.txt") != req_md5: local('pip install -r requirements.txt') if repository == "RPL Attacks Framework": remove_files(folder, "Vagrantfile") remove_folder(join(folder, "provisioning")) logger.debug(" > {} {}".format(repository, ["updated", "already up-to-date"][uptodate])) if not silent: logger.warn("Restarting the framework...") restart(PIDFILE)
def make_incremental_build(name, basis="live"): """ Archive a build, hard-linking unchanged files from the "basis" build (default live) This can significantly reduce the disk space used by multiple builds. On mac, requires ``brew install coreutils`` """ cp_bin = find_executable('gcp') if find_executable('gcp'): cp_bin = "gcp" else: cp_bin = "cp" live_name = dealias_build("_live") stage_name = dealias_build("_stage") if name in (live_name, stage_name): abort("Cannot turn the live or stage build into an incremental build") basis = dealias_build(basis) stop(name) with lcd("%s/deploy/builds" % ROOT_DIR): local("mv %(name)s %(name)s~" % {'name': name}) with settings(hide("stderr"), warn_only=True): cp_ret = local("%(cp_bin)s -al %(basis)s %(name)s" % { 'basis': basis, 'name': name, 'cp_bin': cp_bin, }) if not cp_ret.succeeded: local("mv %(name)s~ %(name)s" % {'name': name}) abort("Local cp bin does not support -l flag (on mac: brew install coreutils)") local("rsync -acH --delete %(name)s~/ %(name)s" % {'name': name}) local("rm -rf %(name)s~" % {'name': name})
def make_knnaas(): local('rm -rf knnaas/worker/build') local('mkdir -p knnaas/worker/build') with lcd('knnaas/worker/build'): local('ls') local('pwd') local('cmake ..') local('make')
def make_caffe(): with lcd('build'): local('git clone https://github.com/nikhilketkar/caffe.git') with lcd('build/caffe'): local('rm -rf build') local('mkdir build') with lcd('build/caffe/build'): local('cmake ..') local('make') local('make pycaffe')