我们从Python开源项目中,提取了以下49个代码示例,用于说明如何使用fabric.api.cd()。
def deploy(target='dev', sha1=None): if sha1 is None: # get current working git sha1 sha1 = local('git rev-parse HEAD', capture=True) # server code reset to current working sha1 home_dir = '/home/pyconkr/{target}.pycon.kr/pyconkr-2016'.format(target=target) if target == 'dev': python_env = '/home/pyconkr/.pyenv/versions/pyconkr-2016-dev' else: python_env = '/home/pyconkr/.pyenv/versions/pyconkr-2016' with settings(cd(home_dir), shell_env(DJANGO_SETTINGS_MODULE='pyconkr.settings_prod')): sudo('git fetch --all -p', user='pyconkr') sudo('git reset --hard ' + sha1, user='pyconkr') sudo('bower install', user='pyconkr') sudo('%s/bin/pip install -r requirements.txt' % python_env, user='pyconkr') sudo('%s/bin/python manage.py compilemessages' % python_env, user='pyconkr') sudo('%s/bin/python manage.py migrate' % python_env, user='pyconkr') sudo('%s/bin/python manage.py collectstatic --noinput' % python_env, user='pyconkr') # worker reload run('echo r > /var/run/pyconkr-2016-%s.fifo' % target)
def delete_old_builds(history): ''' Auto delete unnecessary build directories from the filesystem. ''' build_path = get_release_dir() kept_builds = map(lambda x: get_build_name(x['id']), history['builds']) found_builds = fs.glob(build_path) to_be_deleted_builds = [x for x in found_builds if x not in kept_builds] deletion_count = len(to_be_deleted_builds) # Skip, if there are no builds to be deleted. if deletion_count == 0: return # Remove directories to be deleted. with cd(build_path): fs.rm_rf(to_be_deleted_builds) remote_info( 'Deleted {} old build(s) from the remote'.format(deletion_count) )
def update(version=None): require('srvr', 'path', 'within_virtualenv', provided_by=env.servers) if version: # try specified version first to_version = version elif not version and env.srvr in ['local', 'vagrant', 'dev']: # if local, vagrant or dev deploy to develop branch to_version = 'develop' else: # else deploy to master branch to_version = 'master' with cd(env.path), prefix(env.within_virtualenv): run('git pull') run('git checkout {}'.format(to_version))
def flatpages_mig(direction='www'): dev_env = '/home/pyconkr/.pyenv/versions/pyconkr-2016-dev/bin/python' www_env = '/home/pyconkr/.pyenv/versions/pyconkr-2016/bin/python' from_env, to_env = (dev_env, www_env) if direction=='www' else (www_env, dev_env) dev_dir = '/home/pyconkr/dev.pycon.kr/pyconkr-2016' www_dir = '/home/pyconkr/www.pycon.kr/pyconkr-2016' from_dir, to_dir = (dev_dir, www_dir) if direction=='www' else (www_dir, dev_dir) with settings(cd(from_dir), shell_env(DJANGO_SETTINGS_MODULE='pyconkr.settings_prod') ): sudo('{python} manage.py dumpdata --indent 2 flatpages -o {fixture_to}'.format( fixture_to=os.path.join(to_dir, 'pyconkr', 'fixtures', 'flatpages.json'), python=from_env)) with settings(cd(to_dir), shell_env(DJANGO_SETTINGS_MODULE='pyconkr.settings_prod') ): sudo('{python} manage.py loaddata flatpages'.format( python=to_env))
def package_install_requirements(path): """Use FPM's setup metadata library to package dependencies.""" fpm_path = os.path.split(run('gem which fpm')) metadata_pkg_path = os.path.join(fpm_path[0], 'fpm/package') with cd(path): with shell_env(PYTHONPATH=metadata_pkg_path): run('python3 setup.py --command-packages=pyfpm get_metadata ' '--output=package_metadata.json') get(remote_path='package_metadata.json', local_path='/tmp/package_metadata.json') with open('/tmp/package_metadata.json') as metadata_file: package_metadata = json.load(metadata_file) for dependency in package_metadata['dependencies']: if _run_fpm_python('\'%s\'' % (dependency), warn_only=True).failed: # If this fails, it is likely that this is an Endaga python # package and will be fulfilled from the Endaga repo. print('Ignoring dependency %s' % dependency) # We don't want to clobber dependencies built previously. run('mv -n *.%s %s' % (env.pkgfmt, PKG_DIR), quiet=True) run('rm *.%s' % (env.pkgfmt, ), quiet=True) run('rm package_metadata.json')
def _package_external(directory, package_name, make_clean): """Builds packages with mk-build-deps and dpkg-buildpackage. Args: directory: the path to a repo synced on the VM via vagrant package_name: the name of the debian package that will be created """ if env.pkgfmt != "deb": print("External packages only support deb, not building.") return if not exists(directory): print('path %s does not exist, cannot package' % directory) return print('packaging %s as %s' % (directory, package_name)) run('mkdir -p ~/endaga-packages') with cd('/home/vagrant/'): with cd(directory): run('echo y | sudo mk-build-deps') run('sudo gdebi --n %s-build-deps*.deb' % package_name) run('rm -f %s-build-deps*.deb' % package_name) clean_arg = '' if make_clean == 'yes' else '-nc' run('dpkg-buildpackage -b -uc -us %s' % clean_arg) run('mv %s_*.deb ~/endaga-packages/.' % package_name) run('rm %s_*' % package_name)
def gitrepos(branch=None, fork='sympy'): """ Clone the repo fab vagrant prepare (namely, checkout_cache()) must be run first. By default, the branch checked out is the same one as the one checked out locally. The master branch is not allowed--use a release branch (see the README). No naming convention is put on the release branch. To test the release, create a branch in your fork, and set the fork option. """ with cd("/home/vagrant"): if not exists("sympy-cache.git"): error("Run fab vagrant prepare first") if not branch: # Use the current branch (of this git repo, not the one in Vagrant) branch = local("git rev-parse --abbrev-ref HEAD", capture=True) if branch == "master": raise Exception("Cannot release from master") run("mkdir -p repos") with cd("/home/vagrant/repos"): run("git clone --reference ../sympy-cache.git https://github.com/{fork}/sympy.git".format(fork=fork)) with cd("/home/vagrant/repos/sympy"): run("git checkout -t origin/%s" % branch)
def test_tarball(release='2'): """ Test that the tarball can be unpacked and installed, and that sympy imports in the install. """ if release not in {'2', '3'}: # TODO: Add win32 raise ValueError("release must be one of '2', '3', not %s" % release) venv = "/home/vagrant/repos/test-{release}-virtualenv".format(release=release) tarball_formatter_dict = tarball_formatter() with use_venv(release): make_virtualenv(venv) with virtualenv(venv): run("cp /vagrant/release/{source} releasetar.tar".format(**tarball_formatter_dict)) run("tar xvf releasetar.tar") with cd("/home/vagrant/{source-orig-notar}".format(**tarball_formatter_dict)): run("python setup.py install") run('python -c "import sympy; print(sympy.__version__)"')
def start_service(): path = os.path.join(os.path.dirname(__file__), "workspace") if not os.path.exists(path): os.mkdir(path) with cd(path): try: local("ejabberdctl start") except: pass local("service elasticsearch start") local("service mongod start") local("nohup nsqd -data-path=%s &" % path) local("/bin/bash -l -c 'source envs/mongo/bin/activate && nohup " "mongo-connector -m localhost:27017 -t localhost:9200 -d " "elastic2_doc_manager --continue-on-error &'")
def deploy(): test() with cd('/home/deploy/webapp'): run("git pull") run("pip install -r requirements.txt") sudo("cp supervisord.conf /etc/supervisor/conf.d/webapp.conf") sudo("cp nginx.conf /etc/nginx/sites-available/your_domain") sudo("ln -sf /etc/nginx/sites-available/your_domain " "/etc/nginx/sites-enabled/your_domain") sudo("cp apache.conf /etc/apache2/sites-available/your_domain") sudo("ln -sf /etc/apache2/sites-available/your_domain " "/etc/apache2/sites-enabled/your_domain") sudo("service nginx restart") sudo("service apache2 restart")
def setting_trans_env(server_name): conf_name = 'download_{}.conf'.format(server_name) root_dir = '/app/opbak/download_{}_{}'.format(TIMESTAMP, server_name) mk_remote_dir(root_dir) try: with cd('/app/nginx/conf/vhost'): run('''echo -e "server {\\n listen 80;\\n server_name %s;\\n root %s;\\n index Main.html;\\n access_log logs/default.access.log main;\\n location / {\\n expires 0;\\n }\\n\\n error_page 404 500 502 503 504 /404.html;\\n}" >%s''' % (server_name, root_dir, conf_name)) reload_nginx() yield finally: with cd('/app/nginx/conf/vhost'): run('rm -f {}'.format(conf_name)) reload_nginx() run('rm -rf {}'.format(root_dir))
def tcp_port(self): """ The tcp port used for the game server. Will try to get only once and save to self._tcp_port for later use. """ def get_tcp_port(): cmd = '''grep 'name="port" type="int"' conf.xml |awk -F[\<\>] '{print $3}' ''' with settings(host_string=self.int_ip), cd('/app/{}/backend/apps'.format(self.name)): result = run(cmd) lines = result.splitlines() if len(lines) == 1: return int(lines[0]) else: raise Exception("Can't get tcp port using cmd: {}".format(cmd)) if not self._tcp_port: self._tcp_port = get_tcp_port() return self._tcp_port
def dns(self): """ The dns for the game server. Will try to get only once and save to self._dns for later use. """ def get_dns(): cmd = '''grep server_name %s.conf | awk '{print $2}' | tr -d ";" ''' % self.name with settings(host_string=self.int_ip), cd('/app/nginx/conf/vhost'.format(self.name)): result = run(cmd) lines = result.splitlines() if len(lines) == 1: return lines[0] else: raise Exception("Can't get dns using cmd: {}".format(cmd)) if not self._dns: self._dns = get_dns() return self._dns
def tcp_port(self): """ The tcp port used for the game server. Will try to get only once and save to self._tcp_port for later use. """ def get_tcp_port(): cmd = '''grep 'name="port" type="int"' conf.xml |awk -F[\<\>] '{print $3}' ''' @hosts(self.int_ip) def _get_tcp_port(): with cd('/app/{}/backend/apps'.format(self.name)): result = run(cmd) lines = result.splitlines() if len(lines) == 1: return int(lines[0]) else: raise Exception("Can't get tcp port using cmd: {}".format(cmd)) result = execute(_get_tcp_port) return result[self.int_ip] if not self._tcp_port: self._tcp_port = get_tcp_port() return self._tcp_port
def dns(self): """ The dns for the game server. Will try to get only once and save to self._dns for later use. """ def get_dns(): cmd = '''grep server_name %s.conf | awk '{print $2}' | tr -d ";" ''' % self.name @hosts(self.int_ip) def _get_dns(): with cd('/app/nginx/conf/vhost'.format(self.name)): result = run(cmd) lines = result.splitlines() if len(lines) == 1: return lines[0] else: raise Exception("Can't get dns using cmd: {}".format(cmd)) result = execute(_get_dns) return result[self.int_ip] if not self._dns: self._dns = get_dns() return self._dns
def list_platforms(root_dir): """ ??????????????? """ def is_platform(dir): """ ????version.lua??????????????????? """ with quiet(): return run('test -f "{}/{}/version.lua"'.format(root_dir, dir)).succeeded with cd(root_dir), hide('stdout'): result = run('''find ./ -mindepth 1 -maxdepth 1 -type d -print |grep --color=never -vE '([0-9]+(\.[0-9]+){3}\\b)|(lyServers)' ''') dirs = [each.lstrip('./') for each in result.splitlines()] return [each for each in dirs if is_platform(each)]
def make_diff(remote_script_dir, diff_from_lua, diff_to_lua, resource_dir, dest): """ ???????????????????????with cd(dir): ??????? Example: /app/opbak/make_diff_3/make_diff.py --resource-dir 3.6.1.0/res --diff-from 3.6.0.9/res/res.lua --diff-to 3.6.1.0/res/res.lua --dest /app/opbak/make_diff_20150909_xxxxx/3.6.1.0,/app/opbak/make_diff_20150909_xxxxx/3.6.1.0.zip """ with hide('running', 'stdout'): run('''python {remote_script_dir}/make_diff.py --resource-dir {resource_dir} --diff-from {diff_from_lua} --diff-to {diff_to_lua} --dest {dest}'''.format(remote_script_dir=remote_script_dir, resource_dir=resource_dir, diff_from_lua=diff_from_lua, diff_to_lua=diff_to_lua, dest=dest)) #?????????.lua?? _zipfile = dest.split(',')[0] zipfile = _zipfile.rstrip('.zip') zip_lua = '{}.lua'.format(zipfile) with hide('running', 'stdout'): file_size = run('stat --printf="%s" {}'.format(zipfile)) md5 = run("md5sum {} | awk '{{print $1}}'".format(zipfile)).strip('\n') run('''echo -ne 'local updateZipSize = {{}}\nupdateZipSize.value = {file_size}\nupdateZipSize.md5 = "{md5}"\nreturn updateZipSize' >{zip_lua}'''.format(file_size=file_size, md5=md5, zip_lua=zip_lua))
def fetch_and_inflat_package(template_matchServer_ip, remote_dir, matchServer): mk_remote_dir(remote_dir) with cd(remote_dir): #wget = 'wget -c -t 10 -T 10 -q' #server_name = 'match_download_{}'.format(TIME) #run('''{} --header="Host:{}" http://{}/package.tgz'''.format(wget, server_name, template_matchServer_ip)) run('tar zxf package.tgz') with quiet(): nginx_conf_exsits = run('test -f /app/nginx/conf/vhost/{}.conf'.format(matchServer)).succeeded if nginx_conf_exsits: run('mkdir backup') run('mv /app/nginx/conf/vhost/{}.conf backup/'.format(matchServer)) run('cp nginx_matchServer.conf /app/nginx/conf/vhost/{}.conf'.format(matchServer)) run('''pandora --update -e 'create database {}' '''.format(matchServer)) run('''pandora --update {} <matchServer_init.sql'''.format(matchServer)) run('''mkdir -p /app/{}'''.format(matchServer)) run('''tar zxf matchServer.tgz -C /app/{}'''.format(matchServer))
def add_match_dns(self): @hosts('dns') def _add_match_dns(id, ip): dns_add_cmd = '/app/opbin/dns/dnsapi -g {0} -a add -d match{1}.{0} -l 1 -i {2}'.format(GAME, id, ip) print('??????????????: {}'.format(dns_add_cmd)) ret_value = run(dns_add_cmd) if ret_value != 'Record add success': print('[WARNING] Failed to add dns, you can try again manually: {}'.format(dns_add_cmd)) execute(_add_match_dns, self.id, self.ip) # def clean_job(self): # # @hosts(self.template_matchServer) # def _clean_on_template_server(): # with cd('/app/nginx/conf/vhost'): # run('rm -f download_{}.conf'.format(TIME)) # reload_nginx() # # execute(_clean_on_template_server)
def update_backend(gameServer, version, mainland=True): backup_dir = '/app/opbak/{}/{}'.format(TIME, gameServer) run(''' [ -d {0} ] || mkdir -p {0} '''.format(backup_dir)) with cd('/app/{}/backend/apps'.format(gameServer)): for conf_file in ['app.properties', 'plugins.xml']: #check if the config exists with quiet(): conf_exists = run('test -f {}'.format(conf_file)).succeeded if conf_exists: run('cp {} {}/'.format(conf_file, backup_dir)) if mainland: cmd = ''' sed -i '/http:\/\/.*\/%s/s/%stest_[0-9]\{1,3\}-[0-9]\{1,3\}-[0-9]\{1,3\}/%s/g' %s ''' % (GAME, GAME, version, conf_file) else: cmd = ''' sed -i '/http:\/\/.*\/%s/s/%stest_[a-z]\{2,5\}_[0-9]\{1,3\}-[0-9]\{1,3\}-[0-9]\{1,3\}/%s/g' %s ''' % (GAME, GAME, version, conf_file) run(cmd)
def setup_supervisor(): # We use supervisord to keep Crestify running in the background # Recover from crashes, and to start automatically on bootup # Also, using more than 1 gunicorn worker resulted in socket not being released, so only 1 worker will be used sudo('apt-get -y install supervisor') sudo('mkdir /var/log/crestify/') sudo( 'cd /home/crestify/crestify && ../crestifyenv/bin/honcho export -s /bin/sh -a crestify supervisord /etc/supervisor/conf.d') fd = StringIO() get('/etc/supervisor/conf.d/crestify.conf', fd) content = fd.getvalue().splitlines() for n, i in enumerate(content): if i.startswith("environment="): content[n] = i + ",PATH=/home/crestify/crestifyenv/bin:%(ENV_PATH)s" if i.startswith("user="): content[n] = "user=crestify" if i.startswith("stopsignal="): content[n] = "stopsignal=TERM" # Both Gunicorn and Celery use SIGTERM for graceful shutdown content = StringIO("\n".join(content)) put(content, "/etc/supervisor/conf.d/crestify.conf", use_sudo=True) sudo('supervisorctl reread') sudo('supervisorctl update')
def download_artifact(**kwargs): with cd(env.release_path), credential_output(): cmd = 'curl --max-time 30 --retry 3 %(curl_url)s' % env if env.curl_user: cmd += ' --user %(curl_user)s' % env if env.curl_output: cmd += ' -o %(curl_output)s' % env if env.curl_options: cmd += ' %(curl_options)s' % env if env.curl_extract_tar: cmd += ' | tar -x' elif env.curl_extract_tgz: cmd += ' | tar -xz' run(cmd) if env.curl_postinstall_script: output = show if env.curl_postinstall_output else hide with output('output'): run(env.curl_postinstall_script)
def build_fis_assets(): output = show if env.fis_output else hide with output('output'): with cd('%(release_path)s/%(fis_source)s' % env): cmd = ( 'fis release ' '--file %(fis_conf)s ' '--dest %(fis_dest)s ' ) % env if env.fis_md5: cmd += '--md5 ' if env.fis_optimize: cmd += '--optimize ' if env.fis_pack: cmd += '--pack ' if env.fis_domains: cmd += '--domains ' run(cmd)
def _update(self, compose_file, new_settings, force=False): if not force: settings, digests = self.current_settings digests = digests and json.loads(b64decode(digests).decode()) if settings == new_settings and digests is not None: new_digests = self._get_digests(digests) if digests == new_digests: return False with fab.cd(self.temp_dir): fab.put(six.BytesIO(compose_file), self.actual_compose_file) fabricio.run('docker stack deploy {options} {name}'.format( options=utils.Options(self.options), name=self.name, )) self.stack_updated.set() return True
def setupServers(): sudo('yes '' | add-apt-repository ppa:fkrull/deadsnakes-python2.7 -y') sudo('apt-get -y update') sudo('apt-get -y install python2.7') sudo('apt-get -y dist-upgrade') sudo('apt-get -y install python-pip python-dev build-essential') sudo('apt-get -y install libssl-dev libffi-dev git-all') sudo('yes | pip install --upgrade pip') sudo('yes | pip install --upgrade virtualenv') sudo('yes | pip install --upgrade petlib') sudo('yes | pip install twisted==16.6.0') sudo('yes | pip install numpy') sudo('yes | pip install service_identity') sudo('yes | pip install sphinxmix') sudo('apt-get -y install htop') #sudo('apt-get -y install tshark') if fabric.contrib.files.exists("loopix"): with cd("loopix"): run("git pull") run("git checkout %s" % BRANCH) else: run("git clone https://github.com/UCL-InfoSec/loopix.git")
def deployMultiClient(num): local('rm -f testMap.csv') for i in range(int(num)): dirc = 'client%s' % i with cd(dirc): with cd('loopix'): run("git pull") run("git checkout %s" % BRANCH) with cd('loopix/loopix'): N = hexlify(os.urandom(8)) providers = getProvidersNames() prvName = random.choice(providers) port = int(9999 - i) print "CLIENT: Client%s" % N run("python setup_client.py %d %s Client%s %s" % (port, str(env.host), N, prvName)) get('publicClient.bin', 'publicClient-%d-%s.bin'%(port, env.host)) with open('testMap.csv', 'a') as outfile: csvW = csv.writer(outfile) csvW.writerow(['Client%s'%N, dirc])
def deploy(target='dev', sha1=None): if sha1 is None: # get current working git sha1 sha1 = local('git rev-parse HEAD', capture=True) # server code reset to current working sha1 home_dir = '/home/pyconkr/{target}.pycon.kr/pyconkr-2017'.format(target=target) if target == 'dev': python_env = '/home/pyconkr/.pyenv/versions/pyconkr-2017-dev' else: python_env = '/home/pyconkr/.pyenv/versions/pyconkr-2017' with settings(cd(home_dir), shell_env(DJANGO_SETTINGS_MODULE='pyconkr.settings_prod')): run('git fetch --all -p') run('git reset --hard ' + sha1) run('bower install') run('%s/bin/pip install -r requirements.txt' % python_env) run('%s/bin/python manage.py compilemessages' % python_env) run('%s/bin/python manage.py migrate' % python_env) run('%s/bin/python manage.py collectstatic --noinput' % python_env) # worker reload run('echo r > /var/run/pyconkr-2017-%s.fifo' % target)
def create_app_dir(): """Create the application directory and setup a virtualenv.""" # create app dir if exists(remote_app_dir) is False: sudo('mkdir -p ' + remote_app_dir) # create virtual env with cd(remote_app_dir): if exists(remote_app_dir + '/env') is False: sudo('virtualenv env') # Change permissions sudo('chown {}:{} {} -R'.format(env.user, env.user, remote_app_dir)) # Create log dir if exists(remote_log_dir) is False: sudo('mkdir {}'.format(remote_log_dir))
def deploy(): with cd("/data/stregsystem"): sudo("systemctl stop apache2.service") with settings(sudo_user='stregsystem'): sudo("git pull --ff-only") with prefix("source /data/stregsystem/env/bin/activate"): sudo("pip install -rrequirements.txt") sudo("python manage.py collectstatic --noinput") sudo("python manage.py migrate") sudo("systemctl start apache2.service")
def reinstall_venv(): with cd(PERMANENT_PROJECT_FOLDER): sudo('rm -rf %s' % VENV_FOLDER) sudo('python3 -m venv %s' % VENV_FOLDER)
def start_letsencrypt_setup(): sudo("mkdir -p /tmp/git") sudo("rm -rf /tmp/git/letsencrypt") with cd("/tmp/git"): sudo("git clone https://github.com/letsencrypt/letsencrypt") with cd("/tmp/git/letsencrypt"): sudo('./letsencrypt-auto certonly --standalone') sudo('rm -rf /tmp/git')
def run_setup_script(script_name, context): venv_activate_path = os.path.join(VENV_BIN_DIRECTORY, 'activate') venv_activate_command = 'source %s' % venv_activate_path with cd(PROJECT_FOLDER), shell_env(**context), prefix(venv_activate_command): run('python3 %s' % os.path.join(PROJECT_FOLDER, script_name))
def start_or_reload_service(has_started=False): ''' Start or reload the application service. ''' with cd(buildman.get_deploy_dir()): if runner.is_script_defined(constants.SCRIPT_START_OR_RELOAD): remote_info('Starting/Reloading the service.') runner.run_script(constants.SCRIPT_START_OR_RELOAD) elif has_started and runner.is_script_defined(constants.SCRIPT_RELOAD): remote_info('Reloading the service.') runner.run_script_safely(constants.SCRIPT_RELOAD) elif runner.is_script_defined(constants.SCRIPT_START): remote_info('Starting the service.') runner.run_script(constants.SCRIPT_START)
def reload_service(): ''' Restart the application service. ''' with cd(buildman.get_deploy_dir()): remote_info('Reloading the service.') runner.run_script_safely(constants.SCRIPT_RELOAD)
def stop_service(): ''' Stop the application service. ''' with cd(buildman.get_deploy_dir()): remote_info('Stopping the service.') runner.run_script_safely(constants.SCRIPT_STOP)
def status(): ''' Get the status of the service. ''' with cd(buildman.get_current_path()): runner.run_script(constants.SCRIPT_STATUS_CHECK)
def services(): ''' List the services running for the application. ''' with cd(buildman.get_current_path()): runner.run_script(constants.SCRIPT_LIST_SERVICES)
def unlock(): """os x servers need to be unlocked""" require('srvr', 'path', 'within_virtualenv', provided_by=env.servers) with cd(env.path): run('security unlock-keychain')
def makemigrations(app=None): require('srvr', 'path', 'within_virtualenv', provided_by=env.servers) if env.srvr in ['dev', 'stg', 'liv']: print(yellow('Do not run makemigrations on the servers')) return with cd(env.path), prefix(env.within_virtualenv): run('./manage.py makemigrations {}'.format(app if app else ''))
def migrate(app=None): require('srvr', 'path', 'within_virtualenv', provided_by=env.servers) with cd(env.path), prefix(env.within_virtualenv): run('./manage.py migrate {}'.format(app if app else ''))
def update_index(): require('srvr', 'path', 'within_virtualenv', provided_by=env.servers) with cd(env.path), prefix(env.within_virtualenv): run('./manage.py build_solr_schema > schema.xml') run('mv schema.xml ../../solr/collection1/conf/') sudo('service tomcat7-{} restart'.format(env.srvr)) run('./manage.py update_index')
def collect_static(process=False): require('srvr', 'path', 'within_virtualenv', provided_by=env.servers) if env.srvr in ['local', 'vagrant']: print(yellow('Do not run collect_static on local servers')) return with cd(env.path), prefix(env.within_virtualenv): run('./manage.py collectstatic {process} --noinput'.format( process=('--no-post-process' if not process else '')))
def install_requirements(): require('srvr', 'path', 'within_virtualenv', provided_by=env.servers) reqs = 'requirements-{}.txt'.format(env.srvr) try: assert os.path.exists(reqs) except AssertionError: reqs = 'requirements.txt' with cd(env.path), prefix(env.within_virtualenv): run('pip install -U -r {}'.format(reqs))
def reinstall_requirement(which): require('srvr', 'path', 'within_virtualenv', provided_by=env.servers) with cd(env.path), prefix(env.within_virtualenv): run('pip uninstall {0} && pip install --no-deps {0}'.format(which))
def touch_wsgi(): require('srvr', 'path', 'within_virtualenv', provided_by=env.servers) with cd(os.path.join(env.path, 'dprr')), prefix(env.within_virtualenv): run('touch wsgi.py')
def runserver(port='8000'): require('srvr', 'path', 'within_virtualenv', provided_by=env.servers) if env.srvr not in ['local', 'vagrant']: print(yellow('this server only runs for development purposes')) return with cd(env.path), prefix(env.within_virtualenv): run('./manage.py runserver 0.0.0.0:{}'.format(port))
def manage_run(command, sudo=False): """Run a Django management command on the remote server.""" require('environment') manage_base = u"%(virtualenv_root)s/bin/python %(code_root)s/manage.py " % env with cd(env.code_root): if sudo: sudo(u'%s %s' % (manage_base, command)) else: run(u'%s %s' % (manage_base, command))
def create_virtualenv(): with cd(PROJECT_DIR): run('python3 -m venv venv')
def pull_repository(): with cd(PROJECT_DIR): run('git pull origin master')