我们从Python开源项目中,提取了以下48个代码示例,用于说明如何使用setuptools.command.install.install()。
def run_cmake(): """ Runs CMake to determine configuration for this build. """ if _spawn.find_executable('cmake') is None: print("CMake is required to build this package.") print("Please install/load CMake and re-run setup.") sys.exit(-1) _build_dir = os.path.join(os.path.split(__file__)[0], 'build') _dir_util.mkpath(_build_dir) os.chdir(_build_dir) try: _spawn.spawn(['cmake', '-DCMAKE_BUILD_TYPE=release', '-DENABLE_OPENMP=True', '..']) except _spawn.DistutilsExecError: print("Error while running CMake") sys.exit(-1)
def build_extension(self, ext): """ Compile manually the py_mini_racer extension, bypass setuptools """ try: if not is_v8_built(): self.run_command('build_v8') self.debug = True if V8_PATH: dest_filename = join(self.build_lib, "py_mini_racer") copy_file(V8_PATH, dest_filename, verbose=self.verbose, dry_run=self.dry_run) else: build_ext.build_extension(self, ext) except Exception as e: # Alter message err_msg = """py_mini_racer failed to build, ensure you have an up-to-date pip (>= 8.1) to use the wheel instead To update pip: 'pip install -U pip' See also: https://github.com/sqreen/PyMiniRacer#binary-builds-availability Original error: %s""" raise Exception(err_msg % repr(e))
def get_requires(): """ DEPRECATED: dependency_links doesn't work Enables both "pytorch>=0.2" and "git+ssh://..." style links to work You can list both in requirements.txt, which is not supposed to be the same as listing requires in setup.py Don't forget to append "#egg=pytorch-0.2" to the end of the github src link Turns out that as well as a dependency_links line, we also need to add the name of the package in the install_requires line https://stackoverflow.com/a/33685899/3453033 https://mike.zwobble.org/2013/05/adding-git-or-hg-or-svn-dependencies-in-setup-py/ https://stackoverflow.com/questions/3472430/how-can-i-make-setuptools-install-a-package-thats-not-on-pypi https://stackoverflow.com/questions/19738085/why-isnt-setup-py-dependency-links-doing-anything """ reqs = read('requirements.txt').splitlines() install_requires = [] dependency_links = [] for req in reqs: if 'git+' in req or '://' in req: dependency_links.append(req) else: install_requires.append(req) return install_requires, dependency_links
def run_cmake(arg=""): """ Forcing to run cmake """ if ds.find_executable('cmake') is None: print "CMake is required to build zql" print "Please install cmake version >= 2.8 and re-run setup" sys.exit(-1) print "Configuring zql build with CMake.... " cmake_args = arg try: build_dir = op.join(op.split(__file__)[0], 'build') dd.mkpath(build_dir) os.chdir("build") ds.spawn(['cmake', '..'] + cmake_args.split()) ds.spawn(['make', 'clean']) ds.spawn(['make']) os.chdir("..") except ds.DistutilsExecError: print "Error while running cmake" print "run 'setup.py build --help' for build options" print "You may also try editing the settings in CMakeLists.txt file and re-running setup" sys.exit(-1)
def run(self): """Install the package in install mode. super().run() does not install dependencies when running ``python setup.py install`` (pypa/setuptools#456). """ if 'bdist_wheel' in sys.argv: # do not use eggs, but wheels super().run() else: # force install of deps' eggs during setup.py install self.do_egg_install() self.generate_file_from_template(TEMPLATE_FILES, BASE_ENV, prefix=BASE_ENV, syslog_args=SYSLOG_ARGS) # data_files is not enough when installing from PyPI for file in ETC_FILES: shutil.copy2(file, Path(BASE_ENV) / file) self.create_pid_folder()
def run(self): if sys.platform.startswith("linux"): cmd = "which scrot" proc = subprocess.Popen(cmd, shell=True, stdout=subprocess.DEVNULL, stderr=subprocess.STDOUT) proc.wait() if proc.returncode != 0: print("Did not find scrot! You might have to install it " "yourself to satisfy pyscreenshot.") if not sys.platform.startswith("win32"): # We defer to pip for *nix platforms because it actually works on # them. print("This can take a while.") pip.main(["install", "--user", "."]) return for mod in manual_install_modules: self.install_manually(mod)
def run(self): global path, version, initVersion, forcedVersion, installVersion name = self.config_vars['dist_name'] print(name) path = os.path.join(self.install_libbase, 'NeoAnalysis') if os.path.exists(path): raise Exception("It appears another version of %s is already " "installed at %s; remove this before installing." % (name, path)) print("Installing to %s" % path) rval = install.install.run(self) # If the version in __init__ is different from the automatically-generated # version string, then we will update __init__ in the install directory if initVersion == version: return rval try: initfile = os.path.join(path, '__init__.py') data = open(initfile, 'r').read() open(initfile, 'w').write(re.sub(r"__version__ = .*", "__version__ = '%s'" % version, data)) installVersion = version except: sys.stderr.write("Warning: Error occurred while setting version string in build path. " "Installation will use the original version string " "%s instead.\n" % (initVersion) ) if forcedVersion: raise installVersion = initVersion sys.excepthook(*sys.exc_info()) return rval
def run(self): _from_git(self.distribution) return install.install.run(self)
def run(self): _from_git(self.distribution) return du_install.install.run(self)
def run(self): import distutils.command.install_scripts self.run_command("egg_info") if self.distribution.scripts: # run first to set up self.outfiles distutils.command.install_scripts.install_scripts.run(self) else: self.outfiles = [] if self.no_ep: # don't install entry point scripts into .egg file! return ei_cmd = self.get_finalized_command("egg_info") dist = pkg_resources.Distribution( ei_cmd.egg_base, pkg_resources.PathMetadata(ei_cmd.egg_base, ei_cmd.egg_info), ei_cmd.egg_name, ei_cmd.egg_version, ) bs_cmd = self.get_finalized_command('build_scripts') executable = getattr( bs_cmd, 'executable', easy_install.sys_executable) is_wininst = getattr( self.get_finalized_command("bdist_wininst"), '_is_running', False ) if os.name != 'nt': get_script_args = override_get_script_args else: get_script_args = easy_install.get_script_args executable = '"%s"' % executable for args in get_script_args(dist, executable, is_wininst): self.write_script(*args)
def get_version(package_name, pre_version=None): """Get the version of the project. First, try getting it from PKG-INFO or METADATA, if it exists. If it does, that means we're in a distribution tarball or that install has happened. Otherwise, if there is no PKG-INFO or METADATA file, pull the version from git. We do not support setup.py version sanity in git archive tarballs, nor do we support packagers directly sucking our git repo into theirs. We expect that a source tarball be made from our git repo - or that if someone wants to make a source tarball from a fork of our repo with additional tags in it that they understand and desire the results of doing that. :param pre_version: The version field from setup.cfg - if set then this version will be the next release. """ version = os.environ.get( "PBR_VERSION", os.environ.get("OSLO_PACKAGE_VERSION", None)) if version: return version version = _get_version_from_pkg_metadata(package_name) if version: return version version = _get_version_from_git(pre_version) # Handle http://bugs.python.org/issue11638 # version will either be an empty unicode string or a valid # unicode version string, but either way it's unicode and needs to # be encoded. if sys.version_info[0] == 2: version = version.encode('utf-8') if version: return version raise Exception("Versioning for this project requires either an sdist" " tarball, or access to an upstream git repository." " Are you sure that git is installed?") # This is added because pbr uses pbr to install itself. That means that # any changes to the egg info writer entrypoints must be forward and # backward compatible. This maintains the pbr.packaging.write_pbr_json # path.
def keywords_with_side_effects(argv, **kwargs): def is_short_option(argument): """Check whether a command line argument is a short option.""" return len(argument) >= 2 and argument[0] == '-' and argument[1] != '-' def expand_short_options(argument): """Expand combined short options into canonical short options.""" return ('-' + char for char in argument[1:]) def argument_without_setup_requirements(argv, i): """Check whether a command line argument needs setup requirements.""" if argv[i] in NO_SETUP_REQUIRES_ARGUMENTS: # Simple case: An argument which is either an option or a command # which doesn't need setup requirements. return True elif (is_short_option(argv[i]) and all(option in NO_SETUP_REQUIRES_ARGUMENTS for option in expand_short_options(argv[i]))): # Not so simple case: Combined short options none of which need # setup requirements. return True elif argv[i - 1:i] == ['--egg-base']: # Tricky case: --egg-info takes an argument which should not make # us use setup_requires (defeating the purpose of this code). return True else: return False if all(argument_without_setup_requirements(argv, i) for i in range(1, len(argv))): try: cmdclass = kwargs["cmdclass"] except KeyError: cmdclass = kwargs["cmdclass"] = {} cmdclass["build"] = DummyCFFIBuild cmdclass["install"] = DummyCFFIInstall return kwargs
def setuptools_run(self): """ The setuptools version of the .run() method. We must pull in the entire code so we can override the level used in the _getframe() call since we wrap this call by one more level. """ from distutils.command.install import install as distutils_install # Explicit request for old-style install? Just do it if self.old_and_unmanageable or self.single_version_externally_managed: return distutils_install.run(self) # Attempt to detect whether we were called from setup() or by another # command. If we were called by setup(), our caller will be the # 'run_command' method in 'distutils.dist', and *its* caller will be # the 'run_commands' method. If we were called any other way, our # immediate caller *might* be 'run_command', but it won't have been # called by 'run_commands'. This is slightly kludgy, but seems to # work. # caller = sys._getframe(3) caller_module = caller.f_globals.get('__name__', '') caller_name = caller.f_code.co_name if caller_module != 'distutils.dist' or caller_name!='run_commands': # We weren't called from the command line or setup(), so we # should run in backward-compatibility mode to support bdist_* # commands. distutils_install.run(self) else: self.do_egg_install()
def run(self): reqs = " ".join(["'%s'" % r for r in PKG_INFO["install_requires"]]) os.system("pip install " + reqs) # XXX: py27 compatible return super(PipInstallCommand, self).run()
def run(self): cwd = os.getcwd() _install.install.run(self) _target_path = os.path.join(get_python_lib(), 'flanders') for f in [os.path.join('build', 'lib', 'libflanders.so'), os.path.join('build', 'include', 'flanders_export.h'), os.path.join('flanders', 'flanders.h')]: copy2(os.path.join(cwd, f), _target_path)
def get_setuptools_script_dir(): # Run the above class just to get paths dist = Distribution({'cmdclass': {'install': GetPaths}}) dist.dry_run = True dist.parse_config_files() command = dist.get_command_obj('install') command.ensure_finalized() command.run() print(dist.install_libbase) src_dir = glob(os.path.join(dist.install_libbase, 'medaka-*', 'exes'))[0] for exe in (os.path.join(src_dir, x) for x in os.listdir(src_dir)): print("Copying", os.path.basename(exe), '->', dist.install_scripts) shutil.copy(exe, dist.install_scripts) return dist.install_libbase, dist.install_scripts
def install_lxml(): try: import cython # noqa: F401 except ImportError: cython_was_installed = False pip(['install', '-v', 'cython']) else: cython_was_installed = True pip(['install', '-v', 'https://github.com/funkyfuture/lxml/tarball/smart_xpath#egg=lxml']) if not cython_was_installed: pip(['uninstall', '--yes', '-v', 'cython'])
def run_my_command(): global install_requires, dependency_links if dependency_links: for link in dependency_links: print('[custom] Install from CVS:', dependency_links) pc.check_output(['pip', 'install', '--upgrade', link])
def post_install(func, **kwargs): def command_wrapper(command_subclass): # Keep a reference to the command subclasses 'run' function _run = command_subclass.run def run(self): _run(self) log.info("running post install function {}".format(func.__name__)) func(self, **kwargs) command_subclass.run = run return command_subclass return command_wrapper
def install_kernel(cmd): # Install the kernel spec when we install the package from ipykernel import kernelspec from jupyter_client.kernelspec import KernelSpecManager kernel_name = 'geonotebook%i' % sys.version_info[0] path = os.path.join(tempfile.mkdtemp(suffix='_kernels'), kernel_name) try: os.makedirs(path) except OSError: pass kernel_dict = { 'argv': kernelspec.make_ipkernel_cmd(mod='geonotebook'), 'display_name': 'Geonotebook (Python %i)' % sys.version_info[0], 'language': 'python', } with open(os.path.join(path, 'kernel.json'), 'w') as fh: json.dump(kernel_dict, fh, indent=1) ksm = KernelSpecManager() ksm.install_kernel_spec( path, kernel_name=kernel_name, user=False, prefix=sys.prefix) shutil.rmtree(path) # shamelessly taken from ipyleaflet: https://github.com/ellisonbg/ipyleaflet # Copyright (c) 2014 Brian E. Granger
def run(self): has_npm = self.has_npm() if not has_npm: log.error( "`npm` unavailable. If you're running this command using " "sudo, make sure `npm` is available to sudo" ) env = os.environ.copy() env['PATH'] = npm_path if self.should_run_npm_install(): log.info( "Installing build dependencies with npm. " "This may take a while..." ) check_call( ['npm', 'install'], cwd=node_root, stdout=sys.stdout, stderr=sys.stderr ) log.info( "Building static assets. " ) check_call( ['npm', 'run', 'build'], cwd=node_root, stdout=sys.stdout, stderr=sys.stderr ) os.utime(self.node_modules, None) for t in self.targets: if not os.path.exists(t): msg = 'Missing file: %s' % t if not has_npm: msg += '\nnpm is required to build a development version' raise ValueError(msg) # update package data in case this created new files update_package_data(self.distribution)
def _pip_install(links, requires, root=None, option_dict=dict()): if options.get_boolean_option( option_dict, 'skip_pip_install', 'SKIP_PIP_INSTALL'): return cmd = [sys.executable, '-m', 'pip.__init__', 'install'] if root: cmd.append("--root=%s" % root) for link in links: cmd.append("-f") cmd.append(link) # NOTE(ociuhandu): popen on Windows does not accept unicode strings git._run_shell_command( cmd + requires, throw_on_error=True, buffer=False, env=dict(PIP_USE_WHEEL=b"true"))
def run(self): option_dict = self.distribution.get_option_dict('pbr') if (not self.single_version_externally_managed and self.distribution.install_requires): _pip_install( self.distribution.dependency_links, self.distribution.install_requires, self.root, option_dict=option_dict) return du_install.install.run(self)
def run(self): if os.name != 'nt': get_script_args = override_get_script_args else: get_script_args = easy_install.get_script_args import distutils.command.install_scripts self.run_command("egg_info") if self.distribution.scripts: # run first to set up self.outfiles distutils.command.install_scripts.install_scripts.run(self) else: self.outfiles = [] if self.no_ep: # don't install entry point scripts into .egg file! return ei_cmd = self.get_finalized_command("egg_info") dist = pkg_resources.Distribution( ei_cmd.egg_base, pkg_resources.PathMetadata(ei_cmd.egg_base, ei_cmd.egg_info), ei_cmd.egg_name, ei_cmd.egg_version, ) bs_cmd = self.get_finalized_command('build_scripts') executable = getattr( bs_cmd, 'executable', easy_install.sys_executable) is_wininst = getattr( self.get_finalized_command("bdist_wininst"), '_is_running', False ) for args in get_script_args(dist, executable, is_wininst): self.write_script(*args)
def get_version(package_name, pre_version=None): """Get the version of the project. First, try getting it from PKG-INFO, if it exists. If it does, that means we're in a distribution tarball or that install has happened. Otherwise, if there is no PKG-INFO file, pull the version from git. We do not support setup.py version sanity in git archive tarballs, nor do we support packagers directly sucking our git repo into theirs. We expect that a source tarball be made from our git repo - or that if someone wants to make a source tarball from a fork of our repo with additional tags in it that they understand and desire the results of doing that. """ version = os.environ.get( "PBR_VERSION", os.environ.get("OSLO_PACKAGE_VERSION", None)) if version: return version version = _get_version_from_pkg_info(package_name) if version: return version version = _get_version_from_git(pre_version) # Handle http://bugs.python.org/issue11638 # version will either be an empty unicode string or a valid # unicode version string, but either way it's unicode and needs to # be encoded. if sys.version_info[0] == 2: version = version.encode('utf-8') if version: return version raise Exception("Versioning for this project requires either an sdist" " tarball, or access to an upstream git repository." " Are you sure that git is installed?") # This is added because pbr uses pbr to install itself. That means that # any changes to the egg info writer entrypoints must be forward and # backward compatible. This maintains the pbr.packaging.write_pbr_json # path.
def run(self): if not posixpath.exists("src/zq.so"): run_cmake() ds.spawn(['make', 'install']) #self.distribution.ext_modules = get_ext_modules() self.do_egg_install()
def get_setuptools_script_dir(): # Run the above class just to get paths dist = Distribution({'cmdclass': {'install': GetPaths}}) dist.dry_run = True dist.parse_config_files() command = dist.get_command_obj('install') command.ensure_finalized() command.run() src_dir = glob(os.path.join(dist.install_libbase, 'pomoxis-*', 'exes'))[0] for exe in (os.path.join(src_dir, x) for x in os.listdir(src_dir)): print("Copying", os.path.basename(exe), '->', dist.install_scripts) shutil.copy(exe, dist.install_scripts) return dist.install_libbase, dist.install_scripts
def run(self): subprocess.check_call(["make"]) subprocess.check_call(["cp", "build/plasma_store", "plasma/plasma_store"]) subprocess.check_call(["cp", "build/plasma_manager", "plasma/plasma_manager"]) subprocess.check_call(["cmake", ".."], cwd="./build") subprocess.check_call(["make", "install"], cwd="./build") # Calling _install.install.run(self) does not fetch required packages # and instead performs an old-style install. See command/install.py in # setuptools. So, calling do_egg_install() manually here. self.do_egg_install()
def _install_deps_wheels(): """Python wheels are much faster (no compiling).""" print('Installing dependencies...') check_call([sys.executable, '-m', 'pip', 'install', '-r', 'requirements/run.in'])
def create_pid_folder(): """Create the folder in /var/run to hold the pidfile.""" pid_folder = os.path.join(BASE_ENV, 'var/run/kytos') os.makedirs(pid_folder, exist_ok=True) if BASE_ENV == '/': # system install os.chmod(pid_folder, 0o1777) # permissions like /tmp
def run(self, *args, **kwargs): _generate_json_schemas() # install is not a new class object, we can't use super return install.install.run(self, *args, **kwargs)
def get_version(package_name, pre_version=None): """Get the version of the project. First, try getting it from PKG-INFO or METADATA, if it exists. If it does, that means we're in a distribution tarball or that install has happened. Otherwise, if there is no PKG-INFO or METADATA file, pull the version from git. We do not support setup.py version sanity in git archive tarballs, nor do we support packagers directly sucking our git repo into theirs. We expect that a source tarball be made from our git repo - or that if someone wants to make a source tarball from a fork of our repo with additional tags in it that they understand and desire the results of doing that. :param pre_version: The version field from setup.cfg - if set then this version will be the next release. """ version = os.environ.get( "PBR_VERSION", os.environ.get("OSLO_PACKAGE_VERSION", None)) if version: return version version = _get_version_from_pkg_metadata(package_name) if version: return version version = _get_version_from_git(pre_version) # Handle http://bugs.python.org/issue11638 # version will either be an empty unicode string or a valid # unicode version string, but either way it's unicode and needs to # be encoded. if sys.version_info[0] == 2: version = version.encode('utf-8') if version: return version raise Exception("Versioning for this project requires either an sdist" " tarball, or access to an upstream git repository." " It's also possible that there is a mismatch between" " the package name in setup.cfg and the argument given" " to pbr.version.VersionInfo. Project name {name} was" " given, but was not able to be found.".format( name=package_name)) # This is added because pbr uses pbr to install itself. That means that # any changes to the egg info writer entrypoints must be forward and # backward compatible. This maintains the pbr.packaging.write_pbr_json # path.