我们从Python开源项目中,提取了以下38个代码示例,用于说明如何使用sh.cp()。
def copyToFilesystem(data_path, session_path, session_id): """ copies: data_path/session_path wav1.wav wav1.txt wav2.wav wav2.txt .. to: /data/eyra/recordings/session_<session_id>/wav*.{wav,txt} """ recPath = os.path.join(EYRA_ROOT, 'session_{}'.format(session_id)) os.makedirs(recPath, exist_ok=True) full_session_path = os.path.join(data_path, session_path) log('Copying all files from {} to {}'.format(full_session_path, recPath)) sh.cp(sh.glob(full_session_path + '/*'), recPath)
def copyToFilesystem(data_path, rec_name, session_id, prompt): """ copies: data_path/rec_name to: /data/eyra/recordings/session_<session_id>/wav*.{wav,txt} where *.txt contains the corresponding prompt. """ recPath = os.path.join(EYRA_ROOT, 'session_{}'.format(session_id)) os.makedirs(recPath, exist_ok=True) full_rec_name = os.path.join(data_path, rec_name) log('Copying file from {} to {}'.format(full_rec_name, recPath)) sh.cp(full_rec_name, recPath) promptPath = os.path.join(recPath, rec_name[:-4]+'.txt') log('Writing prompt to file: {}'.format(promptPath)) with open(promptPath, 'w') as f: f.write(prompt)
def copy_files(src_path, dst_path, *files): """ This helper function is aimed to copy files from a source path to a destination path. :param src_path: absolute or relative source path :param dst_path: absolute or relative destination path :param files: tuples with the following format (source_filename, destination_filename) """ src_path, dst_path = __expand_folders(src_path, dst_path) for file in files: if isinstance(file, tuple): src, dst = file elif isinstance(file, string_types): src, dst = 2 * [file] else: continue src, dst = join(src_path, src), join(dst_path, dst) if src != dst: sh.cp(src, dst)
def copy_folder(src_path, dst_path, includes=None): """ This helper function is aimed to copy an entire folder from a source path to a destination path. :param src_path: absolute or relative source path :param dst_path: absolute or relative destination path :param includes: list of sub-folders and files to be included from the src_path and to be copied into dst_path """ src_path, dst_path = __expand_folders(src_path, dst_path) if src_path != dst_path: if includes is not None: dst_path = join(dst_path, split(src_path)[-1]) if not exists(dst_path): makedirs(dst_path) for include in includes: head, tail = split(include) sub_dst_path = join(dst_path, head) if not exists(sub_dst_path): makedirs(sub_dst_path) sh.cp('-R', join(src_path, include), sub_dst_path) else: sh.cp('-R', src_path, dst_path)
def build_arch(self, arch): super(VlcRecipe, self).build_arch(arch) build_dir = self.get_build_dir(arch.arch) port_dir = join(build_dir, 'vlc-port-android') aar = self.aars[arch] if not exists(aar): with current_directory(port_dir): env = dict(environ) env.update({ 'ANDROID_ABI': arch.arch, 'ANDROID_NDK': self.ctx.ndk_dir, 'ANDROID_SDK': self.ctx.sdk_dir, }) info("compiling vlc from sources") debug("environment: {}".format(env)) if not exists(join('bin', 'VLC-debug.apk')): shprint(sh.Command('./compile.sh'), _env=env, _tail=50, _critical=True) shprint(sh.Command('./compile-libvlc.sh'), _env=env, _tail=50, _critical=True) shprint(sh.cp, '-a', aar, self.ctx.aars_dir)
def build_arch(self, arch): env = self.get_recipe_env(arch) harfbuzz_recipe = Recipe.get_recipe('harfbuzz', self.ctx) env['LDFLAGS'] = ' '.join( [env['LDFLAGS'], '-L{}'.format(join(harfbuzz_recipe.get_build_dir(arch.arch), 'src', '.libs'))]) with current_directory(self.get_build_dir(arch.arch)): configure = sh.Command('./configure') shprint(configure, '--host=arm-linux-androideabi', '--prefix={}'.format(realpath('.')), '--without-zlib', '--with-png=no', '--enable-shared', _env=env) shprint(sh.make, '-j5', _env=env) shprint(sh.cp, 'objs/.libs/libfreetype.so', self.ctx.libs_dir)
def build_arch(self, arch): if exists(join(self.ctx.libs_dir, 'libsdl.so')): info('libsdl.so already exists, skipping sdl build.') return env = self.get_recipe_env(arch) with current_directory(self.get_jni_dir()): shprint(sh.ndk_build, 'V=1', _env=env, _tail=20, _critical=True) libs_dir = join(self.ctx.bootstrap.build_dir, 'libs', arch.arch) import os contents = list(os.walk(libs_dir))[0][-1] for content in contents: shprint(sh.cp, '-a', join(self.ctx.bootstrap.build_dir, 'libs', arch.arch, content), self.ctx.libs_dir)
def addrtab(env, output): '''Copy address table files into addrtab subfolder''' try: os.mkdir(output) except OSError: pass import sh for addrtab in env.depParser.CommandList["addrtab"]: print(sh.cp('-av', addrtab.FilePath, join(output, basename(addrtab.FilePath)) )) # ------------------------------------------------------------------------------ # ------------------------------------------------------------------------------
def copy_addons(branches, addonsdir, reposdir): for branch in branches: _logger.debug('copying addons for branch {}'.format( branch.get('gitproject'))) for addon in branch.get('enabled_modules'): addon_path = os.path.join( reposdir, branch.get('gitproject'), addon) sh.cp('-r', addon_path, addonsdir)
def copySessionsToFilesystem(db, path) -> None: """ Uses the info in idHash[db]['session'] to locate files in path/session_X and copy them to their new location EYRA_ROOT/session_Y """ for session in os.listdir(path): oldSessionId = session.split('_')[1] newSessionId = idHash[db]['session'][int(oldSessionId)] newSessionPath = os.path.join(EYRA_ROOT, 'session_{}'.format(newSessionId)) sh.cp('-r', os.path.join(path, session), newSessionPath)
def prebuild_arch(self, arch): # Override hostpython Setup? shprint(sh.cp, join(self.get_recipe_dir(), 'Setup'), join(self.get_build_dir(), 'Modules', 'Setup'))
def postbuild_arch(self, arch): super(PyjniusRecipe, self).postbuild_arch(arch) info('Copying pyjnius java class to classes build dir') with current_directory(self.get_build_dir(arch.arch)): shprint(sh.cp, '-a', join('jnius', 'src', 'org'), self.ctx.javaclass_dir)
def build_arch(self, arch): if not exists(join(self.get_build_dir(arch.arch), 'libpython2.7.so')): self.do_python_build(arch) if not exists(self.ctx.get_python_install_dir()): shprint(sh.cp, '-a', join(self.get_build_dir(arch.arch), 'python-install'), self.ctx.get_python_install_dir()) # This should be safe to run every time info('Copying hostpython binary to targetpython folder') shprint(sh.cp, self.ctx.hostpython, join(self.ctx.get_python_install_dir(), 'bin', 'python.host')) self.ctx.hostpython = join(self.ctx.get_python_install_dir(), 'bin', 'python.host') if not exists(join(self.ctx.get_libs_dir(arch.arch), 'libpython2.7.so')): shprint(sh.cp, join(self.get_build_dir(arch.arch), 'libpython2.7.so'), self.ctx.get_libs_dir(arch.arch)) # # if exists(join(self.get_build_dir(arch.arch), 'libpython2.7.so')): # if exists(join(self.ctx.libs_dir, 'libpython2.7.so')): # info('libpython2.7.so already exists, skipping python build.') # if not exists(join(self.ctx.get_python_install_dir(), 'libpython2.7.so')): # info('Copying python-install to dist-dependent location') # shprint(sh.cp, '-a', 'python-install', self.ctx.get_python_install_dir()) # self.ctx.hostpython = join(self.ctx.get_python_install_dir(), 'bin', 'python.host') # return
def build_arch(self, arch): with current_directory(self.get_build_dir(arch.arch)): env = self.get_recipe_env(arch) configure = sh.Command('./configure') shprint(configure, '--host=arm-linux', _env=env) shprint(sh.make, '-j4', _env=env) shprint(sh.mkdir, 'include') shprint(sh.cp, '-a', 'gmp.h', 'include/gmp.h') shprint(sh.cp, '-a', '.libs/libgmp.so', join(self.ctx.get_libs_dir(arch.arch), 'libgmp.so')) shprint(sh.cp, '-a', '.libs/libgmp.so', join(self.ctx.get_libs_dir(''), 'libgmp.so')) # also copy to libs_collections/<package_name>
def build_arch(self, arch): env = self.get_recipe_env(arch) with current_directory(join(self.get_build_dir(arch.arch), 'libmysqlclient')): shprint(sh.cp, '-t', '.', join(self.get_recipe_dir(), 'p4a.cmake')) # shprint(sh.mkdir, 'Platform') # shprint(sh.cp, '-t', 'Platform', join(self.get_recipe_dir(), 'Linux.cmake')) shprint(sh.rm, '-f', 'CMakeCache.txt') shprint(sh.cmake, '-G', 'Unix Makefiles', # '-DCMAKE_MODULE_PATH=' + join(self.get_build_dir(arch.arch), 'libmysqlclient'), '-DCMAKE_INSTALL_PREFIX=./install', '-DCMAKE_TOOLCHAIN_FILE=p4a.cmake', _env=env) shprint(sh.make, _env=env) self.install_libs(arch, join('libmysql', 'libmysql.so')) # def get_recipe_env(self, arch=None): # env = super(LibmysqlclientRecipe, self).get_recipe_env(arch) # env['WITHOUT_SERVER'] = 'ON' # ncurses = self.get_recipe('ncurses', self) # # env['CFLAGS'] += ' -I' + join(ncurses.get_build_dir(arch.arch), # # 'include') # env['CURSES_LIBRARY'] = join(self.ctx.get_libs_dir(arch.arch), 'libncurses.so') # env['CURSES_INCLUDE_PATH'] = join(ncurses.get_build_dir(arch.arch), # 'include') # return env # # def build_arch(self, arch): # env = self.get_recipe_env(arch) # with current_directory(self.get_build_dir(arch.arch)): # # configure = sh.Command('./configure') # # TODO: should add openssl as an optional dep and compile support # # shprint(configure, '--enable-shared', '--enable-assembler', # # '--enable-thread-safe-client', '--with-innodb', # # '--without-server', _env=env) # # shprint(sh.make, _env=env) # shprint(sh.cmake, '.', '-DCURSES_LIBRARY=' + env['CURSES_LIBRARY'], # '-DCURSES_INCLUDE_PATH=' + env['CURSES_INCLUDE_PATH'], _env=env) # shprint(sh.make, _env=env) # # self.install_libs(arch, 'libmysqlclient.so')
def cythonize_build(self, env, build_dir='.'): super(KivyRecipe, self).cythonize_build(env, build_dir=build_dir) if not exists(join(build_dir, 'kivy', 'include')): return # If kivy is new enough to use the include dir, copy it # manually to the right location as we bypass this stage of # the build with current_directory(build_dir): build_libs_dirs = glob.glob(join('build', 'lib.*')) for dirn in build_libs_dirs: shprint(sh.cp, '-r', join('kivy', 'include'), join(dirn, 'kivy'))
def build_arch(self, arch): env = self.get_recipe_env(arch) # Build libproto.a with current_directory(self.get_build_dir(arch.arch)): env['HOSTARCH'] = 'arm-eabi' env['BUILDARCH'] = shprint(sh.gcc, '-dumpmachine').stdout.decode('utf-8').split('\n')[0] if not exists('configure'): shprint(sh.Command('./autogen.sh'), _env=env) shprint(sh.Command('./configure'), '--host={}'.format(env['HOSTARCH']), '--enable-shared', _env=env) with current_directory(join(self.get_build_dir(arch.arch), 'src')): shprint(sh.make, 'libprotobuf.la', '-j'+str(cpu_count()), _env=env) shprint(sh.cp, '.libs/libprotobuf.a', join(self.ctx.get_libs_dir(arch.arch), 'libprotobuf.a')) # Copy stl library shutil.copyfile(self.ctx.ndk_dir + '/sources/cxx-stl/gnu-libstdc++/' + self.ctx.toolchain_version + '/libs/' + arch.arch + '/libgnustl_shared.so', join(self.ctx.get_libs_dir(arch.arch), 'libgnustl_shared.so')) # Build python bindings and _message.so with current_directory(join(self.get_build_dir(arch.arch), 'python')): hostpython = sh.Command(self.hostpython_location) shprint(hostpython, 'setup.py', 'build_ext', '--cpp_implementation' , _env=env) # Install python bindings self.install_python_package(arch)
def prebuild_arch(self, arch): if self.is_patched(arch): return shprint(sh.cp, join(self.get_recipe_dir(), 'Setup'), join(self.get_build_dir(arch.arch), 'Setup'))
def build_arch(self, arch): env = self.get_recipe_env(arch) with current_directory(self.get_build_dir(arch.arch)): if not exists('configure'): shprint(sh.Command('./autogen.sh'), _env=env) shprint(sh.Command('autoreconf'), '-vif', _env=env) shprint(sh.Command('./configure'), '--host=' + arch.toolchain_prefix, '--prefix=' + self.ctx.get_python_install_dir(), '--enable-shared', _env=env) shprint(sh.make, '-j5', 'libffi.la', _env=env) # dlname = None # with open(join(host, 'libffi.la')) as f: # for line in f: # if line.startswith('dlname='): # dlname = line.strip()[8:-1] # break # # if not dlname or not exists(join(host, '.libs', dlname)): # raise RuntimeError('failed to locate shared object! ({})' # .format(dlname)) # shprint(sh.sed, '-i', 's/^dlname=.*$/dlname=\'libffi.so\'/', join(host, 'libffi.la')) shprint(sh.cp, '-t', self.ctx.get_libs_dir(arch.arch), join(self.get_host(arch), '.libs', 'libffi.so')) #, # join(host, 'libffi.la'))
def prebuild_arch(self, arch): super(JpegRecipe, self).prebuild_arch(arch) build_dir = self.get_build_dir(arch.arch) app_mk = join(build_dir, 'Application.mk') if not exists(app_mk): shprint(sh.cp, join(self.get_recipe_dir(), 'Application.mk'), app_mk) jni_ln = join(build_dir, 'jni') if not exists(jni_ln): shprint(sh.ln, '-s', build_dir, jni_ln)
def copy_files(self, arch): env = self.get_recipe_env(arch) lib = "{ndk}/sources/cxx-stl/gnu-libstdc++/{version}/libs/{arch}" lib = lib.format(ndk=self.ctx.ndk_dir, version=env["TOOLCHAIN_VERSION"], arch=arch.arch) stl_lib = join(lib, "libgnustl_shared.so") dst_dir = join(self.ctx.get_site_packages_dir(), "..", "lib-dynload") shprint(sh.cp, stl_lib, dst_dir) src_lib = join(self.get_build_dir(arch.arch), "icu_build", "lib") dst_lib = self.get_lib_dir(arch) src_suffix = "." + self.version dst_suffix = "." + self.version.split(".")[0] # main version for lib in self.generated_libraries: shprint(sh.cp, join(src_lib, lib+src_suffix), join(dst_lib, lib+dst_suffix)) src_include = join( self.get_build_dir(arch.arch), "icu_build", "include") dst_include = join( self.ctx.get_python_install_dir(), "include", "icu") ensure_dir(dst_include) shprint(sh.cp, "-r", join(src_include, "layout"), dst_include) shprint(sh.cp, "-r", join(src_include, "unicode"), dst_include) # copy stl library lib = "{ndk}/sources/cxx-stl/gnu-libstdc++/{version}/libs/{arch}" lib = lib.format(ndk=self.ctx.ndk_dir, version=env["TOOLCHAIN_VERSION"], arch=arch.arch) stl_lib = join(lib, "libgnustl_shared.so") dst_dir = join(self.ctx.get_python_install_dir(), "lib") ensure_dir(dst_dir) shprint(sh.cp, stl_lib, dst_dir)
def build_arch(self, arch): super(LevelDBRecipe, self).build_arch(arch) env = self.get_recipe_env(arch) with current_directory(self.get_build_dir(arch.arch)): if 'snappy' in recipe.ctx.recipe_build_order: # Copy source from snappy recipe sh.cp('-rf', self.get_recipe('snappy', self.ctx).get_build_dir(arch.arch), 'snappy') # Build shprint(sh.make, _env=env) # Copy the shared library shutil.copyfile('libleveldb.so', join(self.ctx.get_libs_dir(arch.arch), 'libleveldb.so')) # Copy stl shutil.copyfile(self.ctx.ndk_dir + '/sources/cxx-stl/gnu-libstdc++/' + self.ctx.toolchain_version + '/libs/' + arch.arch + '/libgnustl_shared.so', join(self.ctx.get_libs_dir(arch.arch), 'libgnustl_shared.so'))
def build_arch(self, arch): env = self.get_recipe_env(arch) with current_directory(self.get_build_dir(arch.arch)): configure = sh.Command('./configure') shprint(configure, '--without-readline', '--host=arm-linux', _env=env) shprint(sh.make, 'submake-libpq', _env=env) shprint(sh.cp, '-a', 'src/interfaces/libpq/libpq.a', self.ctx.get_libs_dir(arch.arch))
def build_arch(self, arch): super(LibZMQRecipe, self).build_arch(arch) env = self.get_recipe_env(arch) # # libsodium_recipe = Recipe.get_recipe('libsodium', self.ctx) # libsodium_dir = libsodium_recipe.get_build_dir(arch.arch) # env['sodium_CFLAGS'] = '-I{}'.format(join( # libsodium_dir, 'src')) # env['sodium_LDLAGS'] = '-L{}'.format(join( # libsodium_dir, 'src', 'libsodium', '.libs')) curdir = self.get_build_dir(arch.arch) prefix = join(curdir, "install") with current_directory(curdir): bash = sh.Command('sh') shprint( bash, './configure', '--host=arm-linux-androideabi', '--without-documentation', '--prefix={}'.format(prefix), '--with-libsodium=no', _env=env) shprint(sh.make, _env=env) shprint(sh.make, 'install', _env=env) shutil.copyfile('.libs/libzmq.so', join( self.ctx.get_libs_dir(arch.arch), 'libzmq.so')) bootstrap_obj_dir = join(self.ctx.bootstrap.build_dir, 'obj', 'local', arch.arch) ensure_dir(bootstrap_obj_dir) shutil.copyfile( '{}/sources/cxx-stl/gnu-libstdc++/{}/libs/{}/libgnustl_shared.so'.format( self.ctx.ndk_dir, self.ctx.toolchain_version, arch), join(bootstrap_obj_dir, 'libgnustl_shared.so')) # Copy libgnustl_shared.so with current_directory(self.get_build_dir(arch.arch)): sh.cp( "{ctx.ndk_dir}/sources/cxx-stl/gnu-libstdc++/{ctx.toolchain_version}/libs/{arch.arch}/libgnustl_shared.so".format(ctx=self.ctx,arch=arch), self.ctx.get_libs_dir(arch.arch) )
def biglink(ctx, arch): # First, collate object files from each recipe info('Collating object files from each recipe') obj_dir = join(ctx.bootstrap.build_dir, 'collated_objects') ensure_dir(obj_dir) recipes = [Recipe.get_recipe(name, ctx) for name in ctx.recipe_build_order] for recipe in recipes: recipe_obj_dir = join(recipe.get_build_container_dir(arch.arch), 'objects_{}'.format(recipe.name)) if not exists(recipe_obj_dir): info('{} recipe has no biglinkable files dir, skipping' .format(recipe.name)) continue files = glob.glob(join(recipe_obj_dir, '*')) if not len(files): info('{} recipe has no biglinkable files, skipping' .format(recipe.name)) continue info('{} recipe has object files, copying'.format(recipe.name)) files.append(obj_dir) shprint(sh.cp, '-r', *files) env = arch.get_env() env['LDFLAGS'] = env['LDFLAGS'] + ' -L{}'.format( join(ctx.bootstrap.build_dir, 'obj', 'local', arch.arch)) if not len(glob.glob(join(obj_dir, '*'))): info('There seem to be no libraries to biglink, skipping.') return info('Biglinking') info('target {}'.format(join(ctx.get_libs_dir(arch.arch), 'libpymodules.so'))) do_biglink = copylibs_function if ctx.copy_libs else biglink_function do_biglink( join(ctx.get_libs_dir(arch.arch), 'libpymodules.so'), obj_dir.split(' '), extra_link_dirs=[join(ctx.bootstrap.build_dir, 'obj', 'local', arch.arch)], env=env)
def install_libs(self, arch, *libs): libs_dir = self.ctx.get_libs_dir(arch.arch) if not libs: warning('install_libs called with no libraries to install!') return args = libs + (libs_dir,) shprint(sh.cp, *args)
def prepare_build_dir(self, arch): if self.src_filename is None: print('IncludedFilesBehaviour failed: no src_filename specified') exit(1) shprint(sh.rm, '-rf', self.get_build_dir(arch)) shprint(sh.cp, '-a', join(self.get_recipe_dir(), self.src_filename), self.get_build_dir(arch))
def build_compiled_components(self,arch): super(CppCompiledComponentsPythonRecipe, self).build_compiled_components(arch) # Copy libgnustl_shared.so with current_directory(self.get_build_dir(arch.arch)): sh.cp( "{ctx.ndk_dir}/sources/cxx-stl/gnu-libstdc++/{ctx.toolchain_version}/libs/{arch.arch}/libgnustl_shared.so".format(ctx=self.ctx,arch=arch), self.ctx.get_libs_dir(arch.arch) )
def distribute_libs(self, arch, src_dirs, wildcard='*', dest_dir="libs"): '''Copy existing arch libs from build dirs to current dist dir.''' info('Copying libs') tgt_dir = join(dest_dir, arch.arch) ensure_dir(tgt_dir) for src_dir in src_dirs: for lib in glob.glob(join(src_dir, wildcard)): shprint(sh.cp, '-a', lib, tgt_dir)
def distribute_javaclasses(self, javaclass_dir, dest_dir="src"): '''Copy existing javaclasses from build dir to current dist dir.''' info('Copying java files') ensure_dir(dest_dir) for filename in glob.glob(javaclass_dir): shprint(sh.cp, '-a', filename, dest_dir)
def _unpack_aar(self, aar, arch): '''Unpack content of .aar bundle and copy to current dist dir.''' with temp_directory() as temp_dir: name = splitext(basename(aar))[0] jar_name = name + '.jar' info("unpack {} aar".format(name)) debug(" from {}".format(aar)) debug(" to {}".format(temp_dir)) shprint(sh.unzip, '-o', aar, '-d', temp_dir) jar_src = join(temp_dir, 'classes.jar') jar_tgt = join('libs', jar_name) debug("copy {} jar".format(name)) debug(" from {}".format(jar_src)) debug(" to {}".format(jar_tgt)) ensure_dir('libs') shprint(sh.cp, '-a', jar_src, jar_tgt) so_src_dir = join(temp_dir, 'jni', arch.arch) so_tgt_dir = join('libs', arch.arch) debug("copy {} .so".format(name)) debug(" from {}".format(so_src_dir)) debug(" to {}".format(so_tgt_dir)) ensure_dir(so_tgt_dir) so_files = glob.glob(join(so_src_dir, '*.so')) for f in so_files: shprint(sh.cp, '-a', f, so_tgt_dir)
def update_versioning(daily_file): """Actualiza las carpetas bajo control de versiones a partir de los cambios existentes entre éstas y el archivo_diario entregado. - Si el archivo no existe bajo control de versiones, lo agrega y commitea con un mensaje relevante. - Si el archivo versionado y el diario son distintos, "pisa" el versionado con el diario y commitea los cambios con un mensaje relevante. - Si ambos archivos son iguales, no hace nada. NOTA: Esta función debe ejecutarse desde la raíz del repositorio. """ # Me aseguro estar ubicado en la raíz del repositorio. os.chdir(ROOT_DIR) version_file, _, file_date = versioning_assistant(daily_file) if not os.path.isfile(version_file): # File is not on version control. commit_msg = 'Agrego archivo {} encontrado el {}'.format( version_file, file_date) elif not filecmp.cmp(daily_file, version_file): # daily_file and version_file differ. commit_msg = 'Modifico archivo {} según cambios del {}'.format( version_file, file_date) else: # No changes between daily_file and version_file. commit_msg = None # Commit if appropriate. if commit_msg: sh.cp(daily_file, version_file) GIT.add(version_file) GIT.commit(m=commit_msg)
def write_docker_file(workdir_path, instance_data, devel_mode=False): pycharm_debug_egg = False if os.path.isfile(PYCHARM_DEBUG_EGG): sh.cp(PYCHARM_DEBUG_EGG, workdir_path) pycharm_debug_egg = os.path.basename(PYCHARM_DEBUG_EGG) elif devel_mode: print "Warning: Pycharm debug egg not found at {}, " \ "debugging with pycharm will not work".format(PYCHARM_DEBUG_EGG) parent_docker_image = instance_data.get_parent_docker_image() parent_docker_image_tag = instance_data.get_parent_docker_image_tag() apt_packages = instance_data.get_data('apt_package_ids') pip_modules = instance_data.get_data('pip_module_ids') with open(os.path.join(workdir_path, 'Dockerfile'), 'w') as dockerfile: dockerfile.write( '# This file will be overwritten on the next ProjectSetup run \n') if parent_docker_image_tag: dockerfile.write('FROM {}:{}\n\n'.format(parent_docker_image, parent_docker_image_tag)) else: dockerfile.write('FROM {}\n\n'.format(parent_docker_image)) dockerfile.write('USER root\n'.format(parent_docker_image)) if apt_packages: dockerfile.write('{}\n'.format(get_extra_deps_apt(apt_packages))) if pip_modules: dockerfile.write('{}\n'.format(get_extra_deps_pip(pip_modules))) # add pycharm debug egg and debug start script if pycharm_debug_egg: dockerfile.write('ADD {} /opt/odoo/odoo/\n'.format( pycharm_debug_egg) ) dockerfile.write( 'RUN set -x; easy_install /opt/odoo/odoo/{}\n'.format( pycharm_debug_egg) ) dockerfile.write('USER odoo\n'.format(parent_docker_image)) if not devel_mode: dockerfile.write('ADD addons-extra /opt/odoo/addons-extra\n') with open(os.path.join(workdir_path, '.dockerignore'), 'w') as docker_ignore_file: docker_ignore_file.write( '# This file will be overwritten on the next ProjectSetup run\n') for line in [ '*.zip', '*.dump', 'repos', 'odoo', 'data', 'odoo-enterprise', ]: docker_ignore_file.write('{}\n'.format(line))
def extract_source(self, source, cwd): """ (internal) Extract the `source` into the directory `cwd`. """ if not source: return if isfile(source): info("Extract {} into {}".format(source, cwd)) if source.endswith(".tgz") or source.endswith(".tar.gz"): shprint(sh.tar, "-C", cwd, "-xvzf", source) elif source.endswith(".tbz2") or source.endswith(".tar.bz2"): shprint(sh.tar, "-C", cwd, "-xvjf", source) elif source.endswith(".zip"): zf = zipfile.ZipFile(source) zf.extractall(path=cwd) zf.close() else: warning( "Error: cannot extract, unrecognized extension for {}" .format(source)) raise Exception() elif isdir(source): info("Copying {} into {}".format(source, cwd)) shprint(sh.cp, '-a', source, cwd) else: warning( "Error: cannot extract or copy, unrecognized path {}" .format(source)) raise Exception() # def get_archive_rootdir(self, filename): # if filename.endswith(".tgz") or filename.endswith(".tar.gz") or \ # filename.endswith(".tbz2") or filename.endswith(".tar.bz2"): # archive = tarfile.open(filename) # root = archive.next().path.split("/") # return root[0] # elif filename.endswith(".zip"): # with zipfile.ZipFile(filename) as zf: # return dirname(zf.namelist()[0]) # else: # print("Error: cannot detect root directory") # print("Unrecognized extension for {}".format(filename)) # raise Exception()
def install_python_package(self, arch, name=None, env=None, is_dir=True): '''Automate the installation of a Python package (or a cython package where the cython components are pre-built).''' # arch = self.filtered_archs[0] # old kivy-ios way if name is None: name = self.name if env is None: env = self.get_recipe_env(arch) info('Installing {} into site-packages'.format(self.name)) with current_directory(self.get_build_dir(arch.arch)): hostpython = sh.Command(self.hostpython_location) if self.ctx.python_recipe.from_crystax: # hppath = join(dirname(self.hostpython_location), 'Lib', # 'site-packages') hpenv = env.copy() # if 'PYTHONPATH' in hpenv: # hpenv['PYTHONPATH'] = ':'.join([hppath] + # hpenv['PYTHONPATH'].split(':')) # else: # hpenv['PYTHONPATH'] = hppath # hpenv['PYTHONHOME'] = self.ctx.get_python_install_dir() # shprint(hostpython, 'setup.py', 'build', # _env=hpenv, *self.setup_extra_args) shprint(hostpython, 'setup.py', 'install', '-O2', '--root={}'.format(self.ctx.get_python_install_dir()), '--install-lib=.', # AND: will need to unhardcode the 3.5 when adding 2.7 (and other crystax supported versions) _env=hpenv, *self.setup_extra_args) # site_packages_dir = self.ctx.get_site_packages_dir() # built_files = glob.glob(join('build', 'lib*', '*')) # for filen in built_files: # shprint(sh.cp, '-r', filen, join(site_packages_dir, split(filen)[-1])) elif self.call_hostpython_via_targetpython: shprint(hostpython, 'setup.py', 'install', '-O2', _env=env, *self.setup_extra_args) else: hppath = join(dirname(self.hostpython_location), 'Lib', 'site-packages') hpenv = env.copy() if 'PYTHONPATH' in hpenv: hpenv['PYTHONPATH'] = ':'.join([hppath] + hpenv['PYTHONPATH'].split(':')) else: hpenv['PYTHONPATH'] = hppath shprint(hostpython, 'setup.py', 'install', '-O2', '--root={}'.format(self.ctx.get_python_install_dir()), '--install-lib=lib/python2.7/site-packages', _env=hpenv, *self.setup_extra_args) # AND: Hardcoded python2.7 needs fixing # If asked, also install in the hostpython build dir if self.install_in_hostpython: self.install_hostpython_package(arch)
def fli(env, dev, ipbuspkg): # ------------------------------------------------------------------------- # Must be in a build area if env.project is None: raise click.ClickException( 'Project area not defined. Move into a project area and try again') if env.projectConfig['toolset'] != 'sim': raise click.ClickException( "Work area toolset mismatch. Expected 'sim', found '%s'" % env.projectConfig['toolset']) # ------------------------------------------------------------------------- # ------------------------------------------------------------------------- if not which('vsim'): raise click.ClickException( "ModelSim is not available. Have you sourced the environment script?") # ------------------------------------------------------------------------- # ------------------------------------------------------------------------- if ipbuspkg not in env.getSources(): raise click.ClickException( "Package %s not found in source/. The FLI cannot be built." % ipbuspkg) # ------------------------------------------------------------------------- # Set ModelSim root based on vsim's path os.environ['MODELSIM_ROOT'] = (dirname(dirname(which('vsim')))) # Apply set # os.environ['MTI_VCO_MODE']='64' lFliSrc = join(env.src, ipbuspkg, 'components', 'ipbus_eth', 'firmware', 'sim', 'modelsim_fli') import sh # Clean-up sh.rm('-rf', 'modelsim_fli', 'mac_fli.so', _out=sys.stdout) # Copy sh.cp('-a', lFliSrc, './', _out=sys.stdout) # Make sh.make('-C', 'modelsim_fli', 'TAP_DEV={0}'.format(dev), _out=sys.stdout) # Link sh.ln('-s', 'modelsim_fli/mac_fli.so', '.', _out=sys.stdout) # ------------------------------------------------------------------------------ # ------------------------------------------------------------------------------