def _find_libpy3_windows(self, env): ''' Find python3 libraries on Windows and also verify that the arch matches what we are building for. ''' pyarch = sysconfig.get_platform() arch = detect_cpu_family(env.coredata.compilers) if arch == 'x86': arch = '32' elif arch == 'x86_64': arch = '64' else: # We can't cross-compile Python 3 dependencies on Windows yet mlog.log('Unknown architecture {!r} for'.format(arch), mlog.bold(self.name)) self.is_found = False return # Pyarch ends in '32' or '64' if arch != pyarch[-2:]: mlog.log('Need', mlog.bold(self.name), 'for {}-bit, but found {}-bit'.format(arch, pyarch[-2:])) self.is_found = False return inc = sysconfig.get_path('include') platinc = sysconfig.get_path('platinclude') self.compile_args = ['-I' + inc] if inc != platinc: self.compile_args.append('-I' + platinc) # Nothing exposes this directly that I coulf find basedir = sysconfig.get_config_var('base') vernum = sysconfig.get_config_var('py_version_nodot') self.link_args = ['-L{}/libs'.format(basedir), '-lpython{}'.format(vernum)] self.version = sysconfig.get_config_var('py_version_short') self.is_found = True
def getusersitepackages(): """Returns the user-specific site-packages directory path. If the global variable ``USER_SITE`` is not initialized yet, this function will also set it. """ global USER_SITE user_base = getuserbase() # this will also set USER_BASE if USER_SITE is not None: return USER_SITE from sysconfig import get_path if sys.platform == 'darwin': from sysconfig import get_config_var if get_config_var('PYTHONFRAMEWORK'): USER_SITE = get_path('purelib', 'osx_framework_user') return USER_SITE if sys.platform == 'win32': from sysconfig import _POSIX_BUILD if _POSIX_BUILD: USER_SITE = get_path('purelib', 'posix_user') return USER_SITE USER_SITE = get_path('purelib', '%s_user' % os.name) return USER_SITE
def print_includes(): dirs = [sysconfig.get_path('include')] if sysconfig.get_path('platinclude') not in dirs: dirs.append(sysconfig.get_path('platinclude')) if get_include() not in dirs: dirs.append(get_include()) print(' '.join('-I' + d for d in dirs))
def test_user_similar(self): # Issue 8759 : make sure the posix scheme for the users # is similar to the global posix_prefix one base = get_config_var('base') user = get_config_var('userbase') for name in ('stdlib', 'platstdlib', 'purelib', 'platlib'): global_path = get_path(name, 'posix_prefix') user_path = get_path(name, 'posix_user') self.assertEqual(user_path, global_path.replace(base, user))
def test_user_similar(self): # Issue 8759 : make sure the posix scheme for the users # is similar to the global posix_prefix one base = get_config_var("base") user = get_config_var("userbase") for name in ("stdlib", "platstdlib", "purelib", "platlib"): global_path = get_path(name, "posix_prefix") user_path = get_path(name, "posix_user") self.assertEquals(user_path, global_path.replace(base, user))
def get_sys_path(location, name): # Returns the sysconfig path for a distribution, or None for scheme in sysconfig.get_scheme_names(): for path_type in ["platlib", "purelib"]: path = sysconfig.get_path(path_type, scheme) try: if samefile(path, location): return sysconfig.get_path(name, scheme) except EnvironmentError: pass
def getusersitepackages(): global USER_SITE user_base = getuserbase() if USER_SITE is not None: return USER_SITE from sysconfig import get_path if sys.platform == 'darwin': from sysconfig import get_config_var if get_config_var('PYTHONFRAMEWORK'): USER_SITE = get_path('purelib', 'osx_framework_user') return USER_SITE USER_SITE = get_path('purelib', '%s_user' % os.name) return USER_SITE
def print_includes(): dirs = [sysconfig.get_path('include'), sysconfig.get_path('platinclude'), get_include(), get_include(True)] # Make unique but preserve order unique_dirs = [] for d in dirs: if d not in unique_dirs: unique_dirs.append(d) print(' '.join('-I' + d for d in unique_dirs))
def get_library_location(package): # get abs path of a package in the library, rather than locally library_package_paths = glob(os.path.join(get_path('platlib'), '*')) sys.path = library_package_paths + sys.path package_path = os.path.dirname(get_loader(package).get_filename()) sys.path = sys.path[len(library_package_paths):] return package_path
def find_queries_path(): possible_paths = [] # Try all possible schemes where python expects data to stay. for scheme in sysconfig.get_scheme_names(): default_path = sysconfig.get_path(name='data', scheme=scheme) possible_paths.append(os.path.join(default_path, 'tract_querier', 'queries')) # Try to manage Virtual Environments on some OSes, # where data is not put the 'local' subdirectory, # but at the root of the virtual environment. if default_path.endswith('local'): possible_paths.append(os.path.join(default_path.rsplit('local', 1)[0], 'tract_querier', 'queries')) # Case where the Tract_querier is cloned from git and simply # added to the python path, without installation. possible_paths.append(os.path.abspath(os.path.join( os.path.dirname(__file__), 'data'))) paths_found = [path for path in possible_paths if os.path.exists(path)] if not paths_found: raise Exception('Default path for queries not found') return paths_found[0]
def setup(version=None, packages=None, after_install=None, scripts=None, install_requires=None, **kwargs): for k, v in DEFAULT.items(): kwargs.setdefault(k, v) if not packages: # 自动搜索包 packages = setuptools.find_packages(exclude=("testing", "scripts")) if not version: # 自动获取版本 version = str(Ver.read_file()) if not install_requires: # 从repuires.txt 中获取依赖包 install_requires = _get_requires() if not scripts: scripts = [str(path) for path in Path(".").glob("scripts/*")] # 安装程序 dist = distutils.core.setup( scripts=scripts, packages=packages, install_requires=install_requires, version=version, **kwargs ) # 处理脚本 if "install" in dist.have_run and os.name == "posix" and scripts: from sysconfig import get_path prefix = Path(get_path("scripts")) for script in scripts: script_name = prefix / (Path(script).name) if script_name.lsuffix in [".py", ".pyw"] and script_name.exists(): script_name.replace(script_name.with_suffix("")) if "install" in dist.have_run and after_install: after_install(dist)
def get_build_cflags(): """Synthesize a CFLAGS env var from the current python env for building of C modules.""" return '{} {} -I{}'.format( sysconfig.get_config_var('BASECFLAGS'), sysconfig.get_config_var('OPT'), sysconfig.get_path('include') )
def get_install_data_dir(inst): """ :param inst: installation option :type inst: dict :return: the prefix where to install data :rtype: string """ if 'VIRTUAL_ENV' in os.environ: inst['prefix'] = ('environment', os.environ['VIRTUAL_ENV']) elif 'user' in inst: import site inst['prefix'] = ('command line', site.USER_BASE) elif 'root' in inst: inst['prefix'] = ('command line', os.path.join(inst['root'][1], sysconfig.get_path('data').strip(os.path.sep) ) ) if 'install_data' in inst: install_dir = inst['install_data'][1] elif 'prefix' in inst: install_dir = os.path.join(inst['prefix'][1], 'share') else: try: from pip.locations import distutils_scheme except ImportError: # from pip >=10 distutils_scheme has move into _internal package. # It's ugly but I haven't other choice # because the asshole Debian/Ubuntu patch pip to install in /usr/local # but not python. So when use sysconfig.get_paths['data'] it return '/usr' from pip._internal.locations import distutils_scheme install_dir = os.path.join(distutils_scheme('')['data'], 'share') return install_dir
def test_user_similar(self): # Issue #8759: make sure the posix scheme for the users # is similar to the global posix_prefix one base = get_config_var('base') user = get_config_var('userbase') # the global scheme mirrors the distinction between prefix and # exec-prefix but not the user scheme, so we have to adapt the paths # before comparing (issue #9100) adapt = sys.prefix != sys.exec_prefix for name in ('stdlib', 'platstdlib', 'purelib', 'platlib'): global_path = get_path(name, 'posix_prefix') if adapt: global_path = global_path.replace(sys.exec_prefix, sys.prefix) base = base.replace(sys.exec_prefix, sys.prefix) user_path = get_path(name, 'posix_user') self.assertEqual(user_path, global_path.replace(base, user, 1))
def sysconfig2(): # import sysconfig module - Provide access to Python’s configuration information import sysconfig # returns an installation path corresponding to the path name print("Path Name : ", sysconfig.get_path("stdlib")) print() # returns a string that identifies the current platform. print("Current Platform : ", sysconfig.get_platform()) print() # returns the MAJOR.MINOR Python version number as a string print("Python Version Number : ", sysconfig.get_python_version()) print() # returns a tuple containing all path names print("Path Names : ", sysconfig.get_path_names()) print() # returns a tuple containing all schemes print("Scheme Names : ", sysconfig.get_scheme_names()) print() # returns the value of a single variable name. print("Variable name LIBDIR : ", sysconfig.get_config_var('LIBDIR')) # returns the value of a single variable name. print("Variable name LIBDEST : ", sysconfig.get_config_var('LIBDEST'))
def create_build_env(dirname='virtualenv'): # Create virtualenv. if not os.path.exists(dirname): check_call(['virtualenv', dirname]) import sysconfig scripts_dir = os.path.basename(sysconfig.get_path('scripts')) activate_this_file = os.path.join(dirname, scripts_dir, 'activate_this.py') with open(activate_this_file) as f: exec(f.read(), dict(__file__=activate_this_file)) # Import get_distribution after activating virtualenv to get info about # the correct packages. from pkg_resources import get_distribution, DistributionNotFound # Upgrade pip because installation of sphinx with pip 1.1 available on Travis # is broken (see #207) and it doesn't support the show command. pip_version = get_distribution('pip').version if LooseVersion(pip_version) < LooseVersion('1.5.4'): print("Updating pip") check_call(['pip', 'install', '--upgrade', 'pip']) # Upgrade distribute because installation of sphinx with distribute 0.6.24 # available on Travis is broken (see #207). try: distribute_version = get_distribution('distribute').version if LooseVersion(distribute_version) <= LooseVersion('0.6.24'): print("Updating distribute") check_call(['pip', 'install', '--upgrade', 'distribute']) except DistributionNotFound: pass # Install Sphinx and Breathe. pip_install('sphinx-doc/sphinx', '12b83372ac9316e8cbe86e7fed889296a4cc29ee', min_version='1.4.1.dev20160531') pip_install('michaeljones/breathe', '6b1c5bb7a1866f15fc328b8716258354b10c1daa', min_version='4.2.0')
def find_commands(include_others=True): if include_others: from ..utils import sys_prefix_unfollowed prefixes = [sys_prefix_unfollowed()] if sys.prefix != prefixes[0]: prefixes.append(sys.prefix) dir_paths = [join(p, basename(sysconfig.get_path('scripts'))) for p in prefixes] # Is this still needed? if on_win: dir_paths.append('C:\\cygwin\\bin') else: dir_paths = [] if on_win: pat = re.compile(r'conda-([\w\-]+)\.(exe|bat)$') else: pat = re.compile(r'conda-([\w\-]+)$') res = set() for dir_path in dir_paths: if not isdir(dir_path): continue for fn in os.listdir(dir_path): if not isfile(join(dir_path, fn)): continue m = pat.match(fn) if m: res.add(m.group(1)) return tuple(sorted(res))
def find_executable(executable, include_others=True): # backwards compatibility global dir_paths if include_others: from ..utils import sys_prefix_unfollowed prefixes = [sys_prefix_unfollowed()] if sys.prefix != prefixes[0]: prefixes.append(sys.prefix) dir_paths = [join(p, basename(sysconfig.get_path('scripts'))) for p in prefixes] # Is this still needed? if on_win: dir_paths.append('C:\\cygwin\\bin') else: dir_paths = [] dir_paths.extend(os.environ[str('PATH')].split(os.pathsep)) for dir_path in dir_paths: if on_win: for ext in ('.exe', '.bat', ''): path = join(dir_path, executable + ext) if isfile(path): return path else: path = join(dir_path, executable) if isfile(expanduser(path)): return expanduser(path) return None
def run(self): # run default setup procedure bdist_egg.run(self) import sys # Check whether setup.py is run from inside a virtualenv and get the # appropriate install location for exploitable.py. if hasattr(sys, 'real_prefix'): # Inside virtualenv: # Use Python standard library location. from distutils.sysconfig import get_python_lib install_base_path = get_python_lib() else: # Not inside virtualenv, operating on a real Python environment: # Use location for Python site-specific, platform-specific files. from sysconfig import get_path install_base_path = get_path('platlib') path_to_exploitable = os.path.join(install_base_path, os.path.basename(self.egg_output), 'exploitable', 'exploitable.py') print('\x1b[0;32m**********************************************') print(' Install complete! Source exploitable.py from') print(' your .gdbinit to make it available in GDB:') print('') print(' \x1b[1;37mecho \"source %s\" >> ~/.gdbinit\x1b[0;32m' % path_to_exploitable) print('**********************************************\x1b[0m')
def test_get_path(self): if sys.platform == 'darwin' and sys._framework: scheme = 'osx_framework_user' else: scheme = os.name + '_user' self.assertEqual(site._get_path(site._getuserbase()), sysconfig.get_path('purelib', scheme))
def create_build_env(): # Create virtualenv. virtualenv_dir = 'virtualenv' check_call(['virtualenv', virtualenv_dir]) import sysconfig scripts_dir = os.path.basename(sysconfig.get_path('scripts')) activate_this_file = os.path.join(virtualenv_dir, scripts_dir, 'activate_this.py') with open(activate_this_file) as f: exec(f.read(), dict(__file__=activate_this_file)) # Upgrade pip because installation of sphinx with pip 1.1 available on Travis # is broken (see #207) and it doesn't support the show command. from pkg_resources import get_distribution, DistributionNotFound pip_version = get_distribution('pip').version if LooseVersion(pip_version) < LooseVersion('1.5.4'): print("Updating pip") check_call(['pip', 'install', '--upgrade', 'pip']) # Upgrade distribute because installation of sphinx with distribute 0.6.24 # available on Travis is broken (see #207). try: distribute_version = get_distribution('distribute').version if LooseVersion(distribute_version) <= LooseVersion('0.6.24'): print("Updating distribute") check_call(['pip', 'install', '--upgrade', 'distribute']) except DistributionNotFound: pass # Install Sphinx and Breathe. pip_install('fmtlib/sphinx', '12dde8afdb0a7bb5576e2656692c3478c69d8cc3', check_version='1.4a0.dev-20151013') pip_install('michaeljones/breathe', '1c9d7f80378a92cffa755084823a78bb38ee4acc')
def _path_to_executable(name, directories=None, max_depth=2): using_windows = platform.system() == 'Windows' #try specified directory if directories is not None: if type(directories) is not list: directories = [directories] for d in directories: for directory in valid_directories(d): full_path = os.path.join(directory, name) if is_valid_executable(full_path): return full_path if using_windows and is_valid_executable(full_path + '.exe'): return full_path + '.exe' #try PATH try: PATH = os.environ['PATH'] except NameError: try: PATH = os.defpath except NameError: raise Usage("Could not determine PATH") for p in PATH.split(os.pathsep): full_path = os.path.join(p, name) if is_valid_executable(full_path): return full_path if using_windows and is_valid_executable(full_path + '.exe'): return full_path + '.exe' #try python scripts try: script_path = sysconfig.get_path('scripts') full_path = os.path.join(script_path, name) if is_valid_executable(full_path): return full_path if using_windows and is_valid_executable(full_path + '.exe'): return full_path + '.exe' except (NameError, AttributeError): pass # check if we're on Windows, and try a little harder if using_windows: all_exes = itertools.ifilter(lambda f: f.endswith('exe'), itertools.chain( leaves(os.environ['PROGRAMFILES'], max_depth=max_depth), leaves(os.environ['PROGRAMFILES(X86)'], max_depth=max_depth) )) namex = name + '.exe' for exe in all_exes: exename = os.path.split(exe)[1] if (exename == name or exename == namex) and \ is_valid_executable(exe): return exe # success # give up raise StandardError
def _get_sip_inc_dir(self, build_cmd): """Get include directory for sip.""" return ( build_cmd.sip_include_dir or os.environ.get('SIP_INCLUDE_DIR') or DEF_SIP_INC_DIR or sysconfig.get_path('include') )
def installfrom(address, user=None): if user is None: user = site.ENABLE_USER_SITE and not os.access(sysconfig.get_path('purelib'), os.W_OK) try: return install_local(fetch(*parse_address(address)), user=user) except BadInput as e: print(e, file=sys.stderr) return 2
def is_installed(file_name): ''' 确认指定的文件是否已被安装。 ''' from sysconfig import get_path paths=[get_path(name) for name in ('platlib','scripts')] if os.name=='nt': file_name=file_name.lower() paths=[path.lower() for path in paths] return any([file_name.startswith(path) for path in paths])
def test_user_site(self): # test install with --user # preparing the environment for the test self.old_user_base = get_config_var('userbase') self.old_user_site = get_path('purelib', '%s_user' % os.name) self.tmpdir = self.mkdtemp() self.user_base = os.path.join(self.tmpdir, 'B') self.user_site = os.path.join(self.tmpdir, 'S') _CONFIG_VARS['userbase'] = self.user_base scheme = '%s_user' % os.name _SCHEMES.set(scheme, 'purelib', self.user_site) def _expanduser(path): if path[0] == '~': path = os.path.normpath(self.tmpdir) + path[1:] return path self.old_expand = os.path.expanduser os.path.expanduser = _expanduser def cleanup(): _CONFIG_VARS['userbase'] = self.old_user_base _SCHEMES.set(scheme, 'purelib', self.old_user_site) os.path.expanduser = self.old_expand self.addCleanup(cleanup) schemes = get_scheme_names() for key in ('nt_user', 'posix_user', 'os2_home'): self.assertIn(key, schemes) dist = Distribution({'name': 'xx'}) cmd = install_dist(dist) # making sure the user option is there options = [name for name, short, lable in cmd.user_options] self.assertIn('user', options) # setting a value cmd.user = True # user base and site shouldn't be created yet self.assertFalse(os.path.exists(self.user_base)) self.assertFalse(os.path.exists(self.user_site)) # let's run finalize cmd.ensure_finalized() # now they should self.assertTrue(os.path.exists(self.user_base)) self.assertTrue(os.path.exists(self.user_site)) self.assertIn('userbase', cmd.config_vars) self.assertIn('usersite', cmd.config_vars)
def setup_virtualenv(self, home_dir=os.curdir, bootstrap=None, **opts): """ Set up a virtualenv in the `directory` with options. If a `bootstrap` file is provided or the `virtualenv_script` exists, it is run as a script with positional `args` inserted into `sys.argv`. Otherwise, `virtualenv` is imported and `create_environment()` is called with any kwargs. Following the run of this command, dependencies can automatically be installed with the develop command. """ if bootstrap: virtualenv_globals = dict(__file__=bootstrap) execfile(bootstrap, virtualenv_globals) argv = [bootstrap] if self.verbose == 0: argv.append('--quiet') elif self.verbose == 2: argv.append('--verbose') for option, value in opts.iteritems(): argv.extend(['--' + option, value]) argv.append(home_dir) self.logger.info( 'Setting up a isolated Python with bootstrap script: {0}' .format(' '.join(argv))) orig_argv = sys.argv[:] try: sys.argv[:] = argv virtualenv_globals['main']() finally: sys.argv[:] = orig_argv else: try: import virtualenv except ImportError: raise errors.DistutilsModuleError( 'The virtualenv module must be available if no virtualenv ' 'bootstrap script is given: {0}'.format(bootstrap)) self.logger.info( 'Setting up a isolated Python with module: ' '{0}.create_environment({1} {2})'.format( virtualenv, repr(home_dir), ' '.join( '{0}={1}'.format(item) for item in opts.items()))) virtualenv.logger = virtualenv.Logger([( virtualenv.Logger.level_for_integer(2 - self.verbose), sys.stdout)]) virtualenv.create_environment(home_dir, **opts) return os.path.join( sysconfig.get_path('scripts', vars=dict(base=home_dir)), 'python' + sysconfig.get_config_var('EXE'))
def sysconfig_path(self, state, args, kwargs): if len(args) != 1: raise mesonlib.MesonException('sysconfig_path() requires passing the name of path to get.') path_name = args[0] valid_names = sysconfig.get_path_names() if path_name not in valid_names: raise mesonlib.MesonException('{} is not a valid path name {}.'.format(path_name, valid_names)) # Get a relative path without a prefix, e.g. lib/python3.6/site-packages path = sysconfig.get_path(path_name, vars={'base': '', 'platbase': '', 'installed_base': ''})[1:] return ModuleReturnValue(path, [])
def have_install_permissions(): """Check if we can create a file in the site-packages folder. This works on a Win7 miniconda install, where os.access did not. """ try: fn = os.path.join(sysconfig.get_path("purelib"), "test_write_" + str(os.getpid())) with open(fn, "w"): pass os.remove(fn) return True except PermissionError: return False
def __init__(self, environment, kwargs): super().__init__() self.name = 'python3' self.is_found = False self.version = "3.something_maybe" try: pkgdep = PkgConfigDependency('python3', environment, kwargs) if pkgdep.found(): self.cargs = pkgdep.cargs self.libs = pkgdep.libs self.version = pkgdep.get_version() self.is_found = True return except Exception: pass if not self.is_found: if mesonlib.is_windows(): inc = sysconfig.get_path('include') platinc = sysconfig.get_path('platinclude') self.cargs = ['-I' + inc] if inc != platinc: self.cargs.append('-I' + platinc) # Nothing exposes this directly that I coulf find basedir = sysconfig.get_config_var('base') vernum = sysconfig.get_config_var('py_version_nodot') self.libs = ['-L{}/libs'.format(basedir), '-lpython{}'.format(vernum)] self.is_found = True self.version = sysconfig.get_config_var('py_version_short') elif mesonlib.is_osx(): # In OSX the Python 3 framework does not have a version # number in its name. fw = ExtraFrameworkDependency('python', False) if fw.found(): self.cargs = fw.get_compile_args() self.libs = fw.get_link_args() self.is_found = True if self.is_found: mlog.log('Dependency', mlog.bold(self.name), 'found:', mlog.green('YES')) else: mlog.log('Dependency', mlog.bold(self.name), 'found:', mlog.red('NO'))
def get_python_lib(): sysconfig.get_path('purelib')
def get_env(self, prefix, libdir, py_prefix): # Get paths for environment variables includedir = os.path.join(prefix, 'include') bindir = os.path.join(prefix, 'bin') manpathdir = os.path.join(prefix, 'share', 'man') infopathdir = os.path.join(prefix, 'share', 'info') pkgconfigbin = os.path.join(self.build_tools_prefix, 'bin', 'pkg-config') pkgconfigdatadir = os.path.join(prefix, 'share', 'pkgconfig') pkgconfigdir = os.path.join(libdir, 'pkgconfig') typelibpath = os.path.join(libdir, 'girepository-1.0') xdgdatadir = os.path.join(prefix, 'share') xdgconfigdir = os.path.join(prefix, 'etc', 'xdg') xcursordir = os.path.join(prefix, 'share', 'icons') aclocaldir = os.path.join(prefix, 'share', 'aclocal') perlversionpath = os.path.join(libdir, 'perl5', 'site_perl', self._perl_version()) if self.target_platform == Platform.WINDOWS: # On windows even if perl version is 5.8.8, modules can be # installed in 5.8 perlversionpath = perlversionpath.rsplit('.', 1)[0] perl5lib = ':'.join([ to_unixpath(os.path.join(libdir, 'perl5')), to_unixpath(perlversionpath) ]) gstpluginpath = os.path.join(libdir, 'gstreamer-0.10') gstpluginpath10 = os.path.join(libdir, 'gstreamer-1.0') gstregistry = os.path.join('~', '.gstreamer-0.10', 'cerbero-registry-%s' % self.target_arch) gstregistry10 = os.path.join('~', '.cache', 'gstreamer-1.0', 'cerbero-registry-%s' % self.target_arch) gstregistry = os.path.expanduser(gstregistry) gstregistry10 = os.path.expanduser(gstregistry10) pypath = sysconfig.get_path('purelib', vars={'base': ''}) # Must strip \/ to ensure that the path is relative pypath = PurePath(pypath.strip('\\/')) # Starting with Python 3.7.1 on Windows, each PYTHONPATH must use the # native path separator and must end in a path separator. pythonpath = [ str(prefix / pypath) + os.sep, str(self.build_tools_prefix / pypath) + os.sep ] if self.platform == Platform.WINDOWS: # On Windows, pypath doesn't include Python version although some # packages (pycairo, gi, etc...) install themselves using Python # version scheme like on a posix system. # Let's add an extra path to PYTHONPATH for these libraries. pypath = sysconfig.get_path('purelib', 'posix_prefix', {'base': ''}) pypath = PurePath(pypath.strip('\\/')) pythonpath.append(str(prefix / pypath) + os.sep) # Ensure python paths exists because setup.py won't create them for path in pythonpath: if self.platform == Platform.WINDOWS: # pythonpaths start with 'Lib' on Windows, which is extremely # undesirable since our libdir is 'lib'. Windows APIs are # case-preserving case-insensitive. path = path.lower() self._create_path(path) pythonpath = os.pathsep.join(pythonpath) if self.platform == Platform.LINUX: xdgdatadir += ":/usr/share:/usr/local/share" ldflags = '-L%s ' % libdir ldflags += '-Wl,-rpath,%s ' % libdir if ldflags not in os.environ.get('LDFLAGS', ''): ldflags += os.environ.get('LDFLAGS', '') path = os.environ.get('PATH', '') path = self._join_path(os.path.join(self.build_tools_prefix, 'bin'), path) # Add the prefix bindir after the build-tools bindir so that on Windows # binaries are run with the same libraries that they are linked with. if bindir not in path and self.prefix_is_executable(): path = self._join_path(bindir, path) ld_library_path = self._join_path( os.path.join(self.build_tools_prefix, 'lib'), path) if not self.cross_compiling(): ld_library_path = self._join_path(ld_library_path, libdir) if self.extra_lib_path is not None: ld_library_path = self._join_path(ld_library_path, self.extra_lib_path) if self.toolchain_prefix is not None: ld_library_path = self._join_path( ld_library_path, os.path.join(self.toolchain_prefix, 'lib')) includedir = self._join_path( includedir, os.path.join(self.toolchain_prefix, 'include')) # Most of these variables are extracted from jhbuild env = { 'LD_LIBRARY_PATH': ld_library_path, 'LDFLAGS': ldflags, 'C_INCLUDE_PATH': includedir, 'CPLUS_INCLUDE_PATH': includedir, 'PATH': path, 'MANPATH': manpathdir, 'INFOPATH': infopathdir, 'PKG_CONFIG': pkgconfigbin, 'PKG_CONFIG_PATH': '%s' % pkgconfigdatadir, 'PKG_CONFIG_LIBDIR': '%s' % pkgconfigdir, 'GI_TYPELIB_PATH': typelibpath, 'XDG_DATA_DIRS': xdgdatadir, 'XDG_CONFIG_DIRS': xdgconfigdir, 'XCURSOR_PATH': xcursordir, 'ACLOCAL_FLAGS': '-I%s' % aclocaldir, 'ACLOCAL': "aclocal", 'PERL5LIB': perl5lib, 'GST_PLUGIN_PATH': gstpluginpath, 'GST_PLUGIN_PATH_1_0': gstpluginpath10, 'GST_REGISTRY': gstregistry, 'GST_REGISTRY_1_0': gstregistry10, 'PYTHONPATH': pythonpath, 'MONO_PATH': os.path.join(libdir, 'mono', '4.5'), 'MONO_GAC_PREFIX': prefix, 'GSTREAMER_ROOT': prefix } return env
# We have to have CFFI >= 1.3.0, and this platform cannot upgrade # it. raise Exception("PyPy >= 2.6.1 is required") __version__ = read_version() from _setuplibev import libev_configure_command from _setuplibev import LIBEV_EMBED from _setuplibev import CORE from _setupares import ARES # Get access to the greenlet header file. # The sysconfig dir is not enough if we're in a virtualenv # See https://github.com/pypa/pip/issues/4610 include_dirs = [sysconfig.get_path("include")] venv_include_dir = os.path.join(sys.prefix, 'include', 'site', 'python' + sysconfig.get_python_version()) venv_include_dir = os.path.abspath(venv_include_dir) if os.path.exists(venv_include_dir): include_dirs.append(venv_include_dir) # If we're installed via buildout, and buildout also installs # greenlet, we have *NO* access to greenlet.h at all. So include # our own copy as a fallback. include_dirs.append('deps') SEMAPHORE = Extension(name="gevent.__semaphore", sources=["src/gevent/_semaphore.py"], depends=['src/gevent/__semaphore.pxd'], include_dirs=include_dirs)
def get_python_include_dir(python_version): """Get include directory associated with the current python interpreter.""" # determine python include dir python_include_dir = sysconfig.get_config_var('INCLUDEPY') # if Python.h not found (or python_include_dir is None), try to find a # suitable include dir found_python_h = (python_include_dir is not None or os.path.exists( os.path.join(python_include_dir, 'Python.h'))) if not found_python_h: # NOTE(opadron): these possible prefixes must be guarded against # AttributeErrors and KeyErrors because they each can throw on # different platforms or even different builds on the same platform. include_py = sysconfig.get_config_var('INCLUDEPY') include_dir = sysconfig.get_config_var('INCLUDEDIR') include = None plat_include = None python_inc = None python_inc2 = None try: include = sysconfig.get_path('include') except (AttributeError, KeyError): pass try: plat_include = sysconfig.get_path('platinclude') except (AttributeError, KeyError): pass try: python_inc = sysconfig.get_python_inc() except AttributeError: pass if include_py is not None: include_py = os.path.dirname(include_py) if include is not None: include = os.path.dirname(include) if plat_include is not None: plat_include = os.path.dirname(plat_include) if python_inc is not None: python_inc2 = os.path.join( python_inc, ".".join(map(str, sys.version_info[:2]))) candidate_prefixes = list( filter(bool, ( include_py, include_dir, include, plat_include, python_inc, python_inc2, ))) candidate_versions = (python_version, ) if python_version: candidate_versions += ('', ) candidates = (os.path.join(prefix, ''.join(('python', ver))) for (prefix, ver) in itertools.product( candidate_prefixes, candidate_versions)) for candidate in candidates: if os.path.exists(os.path.join(candidate, 'Python.h')): # we found an include directory python_include_dir = candidate break # TODO(opadron): what happens if we don't find an include directory? # Throw SKBuildError? return python_include_dir
from PyQt5.QtCore import PYQT_CONFIGURATION pyqt['sip_flags'] = PYQT_CONFIGURATION['sip_flags'] def get_sip_dir(q): for x in ('', 'Py2-PyQt5', 'PyQt5', 'sip/PyQt5'): base = os.path.join(q, x) if os.path.exists(os.path.join(base, 'QtWidgets')): return base raise EnvironmentError( 'Failed to find the location of the PyQt5 .sip files') pyqt['pyqt_sip_dir'] = get_sip_dir( sys.prefix if iswindows else os.path.join(sys.prefix, 'share', 'sip')) pyqt['sip_inc_dir'] = sysconfig.get_path('include') glib_flags = subprocess.check_output([PKGCONFIG, '--libs', 'glib-2.0' ]).strip() if islinux else '' fontconfig_flags = subprocess.check_output([PKGCONFIG, '--libs', 'fontconfig' ]).strip() if islinux else '' qt_inc = pyqt['inc'] qt_lib = pyqt['lib'] ft_lib_dirs = [] ft_libs = [] ft_inc_dirs = [] jpg_libs = [] jpg_lib_dirs = [] podofo_inc = '/usr/include/podofo' podofo_lib = '/usr/lib' chmlib_inc_dirs = chmlib_lib_dirs = []
opt_flags = [flag for (flag, val) in opts] if '--help' in opt_flags: exit_with_usage(code=0) for opt in opt_flags: if opt == '--prefix': print(sysconfig.get_config_var('prefix')) elif opt == '--exec-prefix': print(sysconfig.get_config_var('exec_prefix')) elif opt in ('--includes', '--cflags'): flags = [ '-I' + sysconfig.get_path('include'), '-I' + sysconfig.get_path('platinclude') ] if opt == '--cflags': flags.extend(getvar('CFLAGS').split()) print(' '.join(flags)) elif opt in ('--libs', '--ldflags'): libs = [] if '--embed' in opt_flags: libs.append('-lpython' + pyver + sys.abiflags) else: libpython = getvar('LIBPYTHON') if libpython: libs.append(libpython) libs.extend(getvar('LIBS').split() + getvar('SYSLIBS').split())
def bootstrap() -> Iterable[Tuple[str,Any]]: ''' Yields a stream of log information. ''' global DID_BOOTSTRAP if DID_BOOTSTRAP: return DID_BOOTSTRAP = True # Add custom folder to search path. for path in site.getsitepackages(prefixes=[INSTALL_PREFIX]): if not path.startswith(INSTALL_PREFIX): # On macOS, some global paths are added as well which we don't want. continue yield ('log', 'Added {} as module search path'.format(path)) # Make sure directory exists as it may otherwise be ignored later on when we need it. # This is because Python seems to cache whether module search paths do not exist to avoid # redundant lookups. os.makedirs(path, exist_ok=True) site.addsitedir(path) # pkg_resources doesn't listen to changes on sys.path. pkg_resources.working_set.add_entry(path) # pip tries to install packages even if they are installed already in the # custom folder. To avoid that, we do the check ourselves. # However, if any package is missing, we re-install all packages. # See the comment below on why this is necessary. installed = [] needs_install = [] cannot_update = [] for dep in DEPS: try: # Will raise DistributionNotFound if not found. location = pkg_resources.get_distribution(dep.name).location is_local = Path(INSTALL_PREFIX) in Path(location).parents if not dep.min: installed.append((dep, is_local)) else: # There is a minimum version constraint, check that. try: # Will raise VersionConflict on version mismatch. pkg_resources.get_distribution('{}>={}'.format(dep.name, dep.min)) installed.append((dep, is_local)) except pkg_resources.VersionConflict as exc: # Re-install is only possible if the previous version was installed by us. if is_local: needs_install.append(dep) else: # Continue without re-installing this package and hope for the best. # cannot_update is populated which can later be used to notify the user # that a newer version is required and has to be manually updated. cannot_update.append((dep, exc.dist.version)) installed.append((dep, False)) except pkg_resources.DistributionNotFound as exc: needs_install.append(dep) if needs_install: yield ('needs_install', needs_install) yield ('log', 'Package directory: ' + INSTALL_PREFIX) # Remove everything as we can't upgrade packages when using --prefix # which may lead to multiple pkg-0.20.3.dist-info folders for different versions # and that would lead to false positives with pkg_resources.get_distribution(). if os.path.exists(INSTALL_PREFIX): # Some randomness for the temp folder name, in case an old one is still lying around for some reason. rnd = random.randint(10000, 99999) tmp_dir = INSTALL_PREFIX + '_tmp_{}'.format(rnd) # On Windows, rename + delete allows to re-create the folder immediately, # otherwise it may still be locked and we get "Permission denied" errors. os.rename(INSTALL_PREFIX, tmp_dir) shutil.rmtree(tmp_dir) os.makedirs(INSTALL_PREFIX, exist_ok=True) # Determine packages to install. # Since we just cleaned all packages installed by us, including those that didn't need # a re-install, re-install those as well. installed_local = [dep for dep, is_local in installed if is_local] req_specs = [] for dep in needs_install + installed_local: if dep.install.startswith('http'): req_specs.append(dep.install) else: req_specs.append('{}=={}'.format(dep.name, dep.install)) # Locate python in order to invoke pip. python = os.path.join(sysconfig.get_path('scripts'), 'python3') # Handle the special Python environment bundled with QGIS on Windows. try: import qgis except: qgis = None if os.name == 'nt' and qgis: # sys.executable will be one of two things: # within QGIS: C:\Program Files\QGIS 3.0\bin\qgis-bin-g7.4.0.exe # within python-qgis.bat: C:\PROGRA~1\QGIS 3.0\apps\Python36\python.exe exe_path = sys.executable exe_dir = os.path.dirname(exe_path) if os.path.basename(exe_path) == 'python.exe': python_qgis_dir = os.path.join(exe_dir, os.pardir, os.pardir, 'bin') else: python_qgis_dir = exe_dir python = os.path.abspath(os.path.join(python_qgis_dir, 'python-qgis.bat')) if not os.path.isfile(python): python = os.path.abspath(os.path.join(python_qgis_dir, 'python-qgis-ltr.bat')) # Must use a single pip install invocation, otherwise dependencies of newly # installed packages get re-installed and we couldn't pin versions. # E.g. 'pip install pandas==0.20.3' will install pandas, but doing # 'pip install xarray==0.10.0' after that would re-install pandas (latest version) # as it's a dependency of xarray. # This is all necessary due to limitations of pip's --prefix option. args = [python, '-m', 'pip', 'install', '--prefix', INSTALL_PREFIX] + req_specs yield ('log', ' '.join(args)) for line in run_subprocess(args, LOG_PATH): yield ('log', line) yield ('install_done', None) if cannot_update: for dep, _ in cannot_update: yield ('cannot_update', cannot_update)
def build_project(args): """ Build a dev version of the project. Returns ------- site_dir site-packages directory where it was installed """ root_ok = [ os.path.exists(os.path.join(ROOT_DIR, fn)) for fn in PROJECT_ROOT_FILES ] if not all(root_ok): print("To build the project, run runtests.py in " "git checkout or unpacked source") sys.exit(1) dst_dir = os.path.join(ROOT_DIR, 'build', 'testenv') env = dict(os.environ) cmd = [sys.executable, 'setup.py'] # Always use ccache, if installed env['PATH'] = os.pathsep.join(EXTRA_PATH + env.get('PATH', '').split(os.pathsep)) if args.debug or args.gcov: # assume everyone uses gcc/gfortran env['OPT'] = '-O0 -ggdb' env['FOPT'] = '-O0 -ggdb' if args.gcov: from sysconfig import get_config_vars cvars = get_config_vars() env['OPT'] = '-O0 -ggdb' env['FOPT'] = '-O0 -ggdb' env['CC'] = env.get('CC', cvars['CC']) + ' --coverage' env['CXX'] = env.get('CXX', cvars['CXX']) + ' --coverage' env['F77'] = 'gfortran --coverage ' env['F90'] = 'gfortran --coverage ' env['LDSHARED'] = cvars['LDSHARED'] + ' --coverage' env['LDFLAGS'] = " ".join(cvars['LDSHARED'].split()[1:]) +\ ' --coverage' cmd += ['build'] if args.parallel > 1: cmd += ['-j', str(args.parallel)] # Install; avoid producing eggs so SciPy can be imported from dst_dir. cmd += [ 'install', '--prefix=' + dst_dir, '--single-version-externally-managed', '--record=' + dst_dir + 'tmp_install_log.txt' ] from sysconfig import get_path py_path = get_path('platlib') site_dir = os.path.join(dst_dir, get_path_suffix(py_path, 3)) # easy_install won't install to a path that Python by default cannot see # and isn't on the PYTHONPATH. Plus, it has to exist. if not os.path.exists(site_dir): os.makedirs(site_dir) env['PYTHONPATH'] = os.pathsep.join((site_dir, env.get('PYTHONPATH', ''))) log_filename = os.path.join(ROOT_DIR, 'build.log') start_time = datetime.datetime.now() if args.show_build_log: ret = subprocess.call(cmd, env=env, cwd=ROOT_DIR) else: log_filename = os.path.join(ROOT_DIR, 'build.log') print("Building, see build.log...") with open(log_filename, 'w') as log: p = subprocess.Popen(cmd, env=env, stdout=log, stderr=log, cwd=ROOT_DIR) try: # Wait for it to finish, and print something to indicate the # process is alive, but only if the log file has grown (to # allow continuous integration environments kill a hanging # process accurately if it produces no output) last_blip = time.time() last_log_size = os.stat(log_filename).st_size while p.poll() is None: time.sleep(0.5) if time.time() - last_blip > 60: log_size = os.stat(log_filename).st_size if log_size > last_log_size: elapsed = datetime.datetime.now() - start_time print(" ... build in progress ({0} " "elapsed)".format(elapsed)) last_blip = time.time() last_log_size = log_size ret = p.wait() except: # noqa: E722 p.terminate() raise elapsed = datetime.datetime.now() - start_time if ret == 0: print("Build OK ({0} elapsed)".format(elapsed)) else: if not args.show_build_log: with open(log_filename, 'r') as f: print(f.read()) print("Build failed! ({0} elapsed)".format(elapsed)) sys.exit(1) return site_dir
def test_get_path(self): # XXX make real tests here for scheme in _INSTALL_SCHEMES: for name in _INSTALL_SCHEMES[scheme]: res = get_path(name, scheme)
try: src_prefix = os.path.join(os.getcwd(), 'src') except OSError: # In case the current working directory has been renamed or deleted sys.exit( "The folder you are executing pip from can no longer be found." ) # under macOS + virtualenv sys.prefix is not properly resolved # it is something like /path/to/python/bin/.. return os.path.abspath(src_prefix) # FIXME doesn't account for venv linked to global site-packages site_packages = sysconfig.get_path("purelib") # type: Optional[str] # This is because of a bug in PyPy's sysconfig module, see # https://bitbucket.org/pypy/pypy/issues/2506/sysconfig-returns-incorrect-paths # for more information. if platform.python_implementation().lower() == "pypy": site_packages = distutils_sysconfig.get_python_lib() try: # Use getusersitepackages if this is present, as it ensures that the # value is initialised properly. user_site = site.getusersitepackages() except AttributeError: user_site = site.USER_SITE if WINDOWS: bin_py = os.path.join(sys.prefix, 'Scripts')
def __init__(self): def u(v): return v.decode("utf-8") if isinstance(v, bytes) else v def abs_path(v): return None if v is None else os.path.abspath( v) # unroll relative elements from path (e.g. ..) # qualifies the python self.platform = u(sys.platform) self.implementation = u(platform.python_implementation()) if self.implementation == "PyPy": self.pypy_version_info = tuple(u(i) for i in sys.pypy_version_info) # this is a tuple in earlier, struct later, unify to our own named tuple self.version_info = VersionInfo(*list(u(i) for i in sys.version_info)) self.architecture = 64 if sys.maxsize > 2**32 else 32 self.version = u(sys.version) self.os = u(os.name) # information about the prefix - determines python home self.prefix = u(abs_path(getattr(sys, "prefix", None))) # prefix we think self.base_prefix = u(abs_path(getattr(sys, "base_prefix", None))) # venv self.real_prefix = u(abs_path(getattr(sys, "real_prefix", None))) # old virtualenv # information about the exec prefix - dynamic stdlib modules self.base_exec_prefix = u( abs_path(getattr(sys, "base_exec_prefix", None))) self.exec_prefix = u(abs_path(getattr(sys, "exec_prefix", None))) self.executable = u(abs_path( sys.executable)) # the executable we were invoked via self.original_executable = u(abs_path( self.executable)) # the executable as known by the interpreter self.system_executable = self._fast_get_system_executable( ) # the executable we are based of (if available) try: __import__("venv") has = True except ImportError: has = False self.has_venv = has self.path = [u(i) for i in sys.path] self.file_system_encoding = u(sys.getfilesystemencoding()) self.stdout_encoding = u(getattr(sys.stdout, "encoding", None)) self.sysconfig_paths = { u(i): u(sysconfig.get_path(i, expand=False)) for i in sysconfig.get_path_names() } config_var_keys = set() for element in self.sysconfig_paths.values(): for k in _CONF_VAR_RE.findall(element): config_var_keys.add(u(k[1:-1])) config_var_keys.add("PYTHONFRAMEWORK") self.sysconfig_vars = { u(i): u(sysconfig.get_config_var(i) or "") for i in config_var_keys } if self.implementation == "PyPy" and sys.version_info.major == 2: self.sysconfig_vars[u"implementation_lower"] = u"python" self.distutils_install = { u(k): u(v) for k, v in self._distutils_install().items() } self.system_stdlib = self.sysconfig_path( "stdlib", { k: (self.system_prefix if v.startswith(self.prefix) else v) for k, v in self.sysconfig_vars.items() }, ) self.max_size = getattr(sys, "maxsize", getattr(sys, "maxint", None)) self._creators = None
def resolved_active_python_script(self, script): return resolve_path(sysconfig.get_path('scripts'), script)
def from_dist(cls, dist: BaseDistribution) -> "UninstallPathSet": dist_location = dist.location info_location = dist.info_location if dist_location is None: logger.info( "Not uninstalling %s since it is not installed", dist.canonical_name, ) return cls(dist) normalized_dist_location = normalize_path(dist_location) if not dist.local: logger.info( "Not uninstalling %s at %s, outside environment %s", dist.canonical_name, normalized_dist_location, sys.prefix, ) return cls(dist) if normalized_dist_location in { p for p in {sysconfig.get_path("stdlib"), sysconfig.get_path("platstdlib")} if p }: logger.info( "Not uninstalling %s at %s, as it is in the standard library.", dist.canonical_name, normalized_dist_location, ) return cls(dist) paths_to_remove = cls(dist) develop_egg_link = egg_link_path_from_location(dist.raw_name) # Distribution is installed with metadata in a "flat" .egg-info # directory. This means it is not a modern .dist-info installation, an # egg, or legacy editable. setuptools_flat_installation = ( dist.installed_with_setuptools_egg_info and info_location is not None and os.path.exists(info_location) # If dist is editable and the location points to a ``.egg-info``, # we are in fact in the legacy editable case. and not info_location.endswith(f"{dist.setuptools_filename}.egg-info")) # Uninstall cases order do matter as in the case of 2 installs of the # same package, pip needs to uninstall the currently detected version if setuptools_flat_installation: if info_location is not None: paths_to_remove.add(info_location) installed_files = dist.iter_declared_entries() if installed_files is not None: for installed_file in installed_files: paths_to_remove.add( os.path.join(dist_location, installed_file)) # FIXME: need a test for this elif block # occurs with --single-version-externally-managed/--record outside # of pip elif dist.is_file("top_level.txt"): try: namespace_packages = dist.read_text( "namespace_packages.txt") except FileNotFoundError: namespaces = [] else: namespaces = namespace_packages.splitlines(keepends=False) for top_level_pkg in [ p for p in dist.read_text("top_level.txt").splitlines() if p and p not in namespaces ]: path = os.path.join(dist_location, top_level_pkg) paths_to_remove.add(path) paths_to_remove.add(f"{path}.py") paths_to_remove.add(f"{path}.pyc") paths_to_remove.add(f"{path}.pyo") elif dist.installed_by_distutils: raise UninstallationError( "Cannot uninstall {!r}. It is a distutils installed project " "and thus we cannot accurately determine which files belong " "to it which would lead to only a partial uninstall.".format( dist.raw_name, )) elif dist.installed_as_egg: # package installed by easy_install # We cannot match on dist.egg_name because it can slightly vary # i.e. setuptools-0.6c11-py2.6.egg vs setuptools-0.6rc11-py2.6.egg paths_to_remove.add(dist_location) easy_install_egg = os.path.split(dist_location)[1] easy_install_pth = os.path.join( os.path.dirname(dist_location), "easy-install.pth", ) paths_to_remove.add_pth(easy_install_pth, "./" + easy_install_egg) elif dist.installed_with_dist_info: for path in uninstallation_paths(dist): paths_to_remove.add(path) elif develop_egg_link: # PEP 660 modern editable is handled in the ``.dist-info`` case # above, so this only covers the setuptools-style editable. with open(develop_egg_link) as fh: link_pointer = os.path.normcase(fh.readline().strip()) assert link_pointer == dist_location, ( f"Egg-link {link_pointer} does not match installed location of " f"{dist.raw_name} (at {dist_location})") paths_to_remove.add(develop_egg_link) easy_install_pth = os.path.join(os.path.dirname(develop_egg_link), "easy-install.pth") paths_to_remove.add_pth(easy_install_pth, dist_location) else: logger.debug( "Not sure how to uninstall: %s - Check: %s", dist, dist_location, ) if dist.in_usersite: bin_dir = get_bin_user() else: bin_dir = get_bin_prefix() # find distutils scripts= scripts try: for script in dist.iterdir("scripts"): paths_to_remove.add(os.path.join(bin_dir, script.name)) if WINDOWS: paths_to_remove.add( os.path.join(bin_dir, f"{script.name}.bat")) except (FileNotFoundError, NotADirectoryError): pass # find console_scripts and gui_scripts def iter_scripts_to_remove( dist: BaseDistribution, bin_dir: str, ) -> Iterator[str]: for entry_point in dist.iter_entry_points(): if entry_point.group == "console_scripts": yield from _script_names(bin_dir, entry_point.name, False) elif entry_point.group == "gui_scripts": yield from _script_names(bin_dir, entry_point.name, True) for s in iter_scripts_to_remove(dist, bin_dir): paths_to_remove.add(s) return paths_to_remove
def get_env(self, prefix, libdir, py_prefix): # Get paths for environment variables includedir = os.path.join(prefix, 'include') bindir = os.path.join(prefix, 'bin') manpathdir = os.path.join(prefix, 'share', 'man') infopathdir = os.path.join(prefix, 'share', 'info') pkgconfigbin = os.path.join(self.build_tools_prefix, 'bin', 'pkg-config') pkgconfigdatadir = os.path.join(prefix, 'share', 'pkgconfig') pkgconfigdir = os.path.join(libdir, 'pkgconfig') typelibpath = os.path.join(libdir, 'girepository-1.0') xdgdatadir = os.path.join(prefix, 'share') xdgconfigdir = os.path.join(prefix, 'etc', 'xdg') xcursordir = os.path.join(prefix, 'share', 'icons') aclocaldir = os.path.join(prefix, 'share', 'aclocal') perlversionpath = os.path.join(libdir, 'perl5', 'site_perl', self._perl_version()) if self.target_platform == Platform.WINDOWS: # On windows even if perl version is 5.8.8, modules can be # installed in 5.8 perlversionpath = perlversionpath.rsplit('.', 1)[0] perl5lib = ':'.join([ to_unixpath(os.path.join(libdir, 'perl5')), to_unixpath(perlversionpath) ]) gstpluginpath = os.path.join(libdir, 'gstreamer-0.10') gstpluginpath10 = os.path.join(libdir, 'gstreamer-1.0') gstregistry = os.path.join('~', '.gstreamer-0.10', 'cerbero-registry-%s' % self.target_arch) gstregistry10 = os.path.join('~', '.cache', 'gstreamer-1.0', 'cerbero-registry-%s' % self.target_arch) gstregistry = os.path.expanduser(gstregistry) gstregistry10 = os.path.expanduser(gstregistry10) pypath = sysconfig.get_path('purelib', vars={'base': ''}) # Ensure that / is the path separator and not \, then strip / pypath = PurePath(pypath).as_posix().strip('/') # Ensure python paths exists because setup.py won't create them pythonpath = [ os.path.join(prefix, pypath), os.path.join(self.build_tools_prefix, pypath) ] for path in pythonpath: self._create_path(path) pythonpath = os.pathsep.join(pythonpath) if self.platform == Platform.LINUX: xdgdatadir += ":/usr/share:/usr/local/share" ldflags = '-L%s ' % libdir if ldflags not in os.environ.get('LDFLAGS', ''): ldflags += os.environ.get('LDFLAGS', '') path = os.environ.get('PATH', '') if bindir not in path and self.prefix_is_executable(): path = self._join_path(bindir, path) path = self._join_path(os.path.join(self.build_tools_prefix, 'bin'), path) if not self.cross_compiling(): ld_library_path = libdir else: ld_library_path = "" if self.extra_lib_path is not None: ld_library_path = self._join_path(ld_library_path, self.extra_lib_path) if self.toolchain_prefix is not None: ld_library_path = self._join_path( ld_library_path, os.path.join(self.toolchain_prefix, 'lib')) includedir = self._join_path( includedir, os.path.join(self.toolchain_prefix, 'include')) # Most of these variables are extracted from jhbuild env = { 'LD_LIBRARY_PATH': ld_library_path, 'LDFLAGS': ldflags, 'C_INCLUDE_PATH': includedir, 'CPLUS_INCLUDE_PATH': includedir, 'PATH': path, 'MANPATH': manpathdir, 'INFOPATH': infopathdir, 'PKG_CONFIG': pkgconfigbin, 'PKG_CONFIG_PATH': '%s' % pkgconfigdatadir, 'PKG_CONFIG_LIBDIR': '%s' % pkgconfigdir, 'GI_TYPELIB_PATH': typelibpath, 'XDG_DATA_DIRS': xdgdatadir, 'XDG_CONFIG_DIRS': xdgconfigdir, 'XCURSOR_PATH': xcursordir, 'ACLOCAL_FLAGS': '-I%s' % aclocaldir, 'ACLOCAL': "aclocal", 'PERL5LIB': perl5lib, 'GST_PLUGIN_PATH': gstpluginpath, 'GST_PLUGIN_PATH_1_0': gstpluginpath10, 'GST_REGISTRY': gstregistry, 'GST_REGISTRY_1_0': gstregistry10, 'PYTHONPATH': pythonpath, 'MONO_PATH': os.path.join(libdir, 'mono', '4.5'), 'MONO_GAC_PREFIX': prefix, 'GSTREAMER_ROOT': prefix } return env
def c_include(header): """Load (and JIT) header file <header> into Cling.""" gbl.gInterpreter.Declare("""extern "C" { #include "%s" }""" % header) def add_include_path(path): """Add a path to the include paths available to Cling.""" if not os.path.isdir(path): raise OSError("no such directory: %s" % path) gbl.gInterpreter.AddIncludePath(path) # add access to Python C-API headers add_include_path(sysconfig.get_path('include')) def add_autoload_map(fname): """Add the entries from a autoload (.rootmap) file to Cling.""" if not os.path.isfile(fname): raise OSError("no such file: %s" % fname) gbl.gInterpreter.LoadLibraryMap(fname) def _get_name(tt): if type(tt) == str: return tt try: ttname = tt.__cppname__ except AttributeError:
# FIXME: keep src in cwd for now (it is not a temporary folder) try: src_prefix = os.path.join(os.getcwd(), 'src') except OSError: # In case the current working directory has been renamed or deleted sys.exit( "The folder you are executing pip from can no longer be found.") # under macOS + virtualenv sys.prefix is not properly resolved # it is something like /path/to/python/bin/.. # Note: using realpath due to tmp dirs on OSX being symlinks src_prefix = os.path.abspath(src_prefix) # FIXME doesn't account for venv linked to global site-packages site_packages = sysconfig.get_path("purelib") # This is because of a bug in PyPy's sysconfig module, see # https://bitbucket.org/pypy/pypy/issues/2506/sysconfig-returns-incorrect-paths # for more information. if platform.python_implementation().lower() == "pypy": site_packages = distutils_sysconfig.get_python_lib() try: # Use getusersitepackages if this is present, as it ensures that the # value is initialised properly. user_site = site.getusersitepackages() except AttributeError: user_site = site.USER_SITE user_dir = expanduser('~') if WINDOWS: bin_py = os.path.join(sys.prefix, 'Scripts') bin_user = os.path.join(user_site, 'Scripts')
gbl.gInterpreter.Declare('#include "%s"' % header) def c_include(header): """Load (and JIT) header file <header> into Cling.""" gbl.gInterpreter.Declare("""extern "C" { #include "%s" }""" % header) def add_include_path(path): """Add a path to the include paths available to Cling.""" if not os.path.isdir(path): raise OSError("no such directory: %s" % path) gbl.gInterpreter.AddIncludePath(path) # add access to Python C-API headers apipath = sysconfig.get_path('include', 'posix_prefix' if os.name == 'posix' else os.name) if os.path.exists(apipath): add_include_path(apipath) elif ispypy: # possibly structured without 'pythonx.y' in path apipath = os.path.dirname(apipath) if os.path.exists(apipath) and os.path.exists(os.path.join(apipath, 'Python.h')): add_include_path(apipath) # add access to extra headers for dispatcher (CPyCppyy only (?)) if not ispypy: if 'CPPYY_API_PATH' in os.environ: apipath_extra = os.environ['CPPYY_API_PATH'] else: apipath_extra = os.path.join(os.path.dirname(apipath), 'site', 'python'+sys.version[:3]) if not os.path.exists(os.path.join(apipath_extra, 'CPyCppyy')):
def get_build_cflags(): """Synthesize a CFLAGS env var from the current python env for building of C modules.""" return '{} {} -I{}'.format(sysconfig.get_config_var('BASECFLAGS'), sysconfig.get_config_var('OPT'), sysconfig.get_path('include'))
import importlib.util import os import sys from site import check_enableusersite if sys.version_info[0] < 3: print("Python 3 required") sys.exit(1) # Handle both normal environments and virtualenvs try: from site import getusersitepackages, getsitepackages except ImportError: from sysconfig import get_path getsitepackages = lambda: get_path('purelib') getusersitepackages = getsitepackages # Hacky command line parsing to accept a silent-install -s flag like linux-setup.sh: INTERACTIVE = True if '-s' in sys.argv[1:]: INTERACTIVE = False INSTALL_VENV = False if '-v' in sys.argv[1:]: if os.environ.get('VIRTUAL_ENV'): INSTALL_VENV = True else: print( "Error: venv installation requested without an active python3 venv." )
def from_dist(cls, dist): dist_path = normalize_path(dist.location) if not dist_is_local(dist): logger.info( "Not uninstalling %s at %s, outside environment %s", dist.key, dist_path, sys.prefix, ) return cls(dist) if dist_path in {p for p in {sysconfig.get_path("stdlib"), sysconfig.get_path("platstdlib")} if p}: logger.info( "Not uninstalling %s at %s, as it is in the standard library.", dist.key, dist_path, ) return cls(dist) paths_to_remove = cls(dist) develop_egg_link = egg_link_path(dist) develop_egg_link_egg_info = '{}.egg-info'.format( pkg_resources.to_filename(dist.project_name)) egg_info_exists = dist.egg_info and os.path.exists(dist.egg_info) # Special case for distutils installed package distutils_egg_info = getattr(dist._provider, 'path', None) # Uninstall cases order do matter as in the case of 2 installs of the # same package, pip needs to uninstall the currently detected version if (egg_info_exists and dist.egg_info.endswith('.egg-info') and not dist.egg_info.endswith(develop_egg_link_egg_info)): # if dist.egg_info.endswith(develop_egg_link_egg_info), we # are in fact in the develop_egg_link case paths_to_remove.add(dist.egg_info) if dist.has_metadata('installed-files.txt'): for installed_file in dist.get_metadata( 'installed-files.txt').splitlines(): path = os.path.normpath( os.path.join(dist.egg_info, installed_file) ) paths_to_remove.add(path) # FIXME: need a test for this elif block # occurs with --single-version-externally-managed/--record outside # of pip elif dist.has_metadata('top_level.txt'): if dist.has_metadata('namespace_packages.txt'): namespaces = dist.get_metadata('namespace_packages.txt') else: namespaces = [] for top_level_pkg in [ p for p in dist.get_metadata('top_level.txt').splitlines() if p and p not in namespaces]: path = os.path.join(dist.location, top_level_pkg) paths_to_remove.add(path) paths_to_remove.add(path + '.py') paths_to_remove.add(path + '.pyc') paths_to_remove.add(path + '.pyo') elif distutils_egg_info: raise UninstallationError( "Cannot uninstall {!r}. It is a distutils installed project " "and thus we cannot accurately determine which files belong " "to it which would lead to only a partial uninstall.".format( dist.project_name, ) ) elif dist.location.endswith('.egg'): # package installed by easy_install # We cannot match on dist.egg_name because it can slightly vary # i.e. setuptools-0.6c11-py2.6.egg vs setuptools-0.6rc11-py2.6.egg paths_to_remove.add(dist.location) easy_install_egg = os.path.split(dist.location)[1] easy_install_pth = os.path.join(os.path.dirname(dist.location), 'easy-install.pth') paths_to_remove.add_pth(easy_install_pth, './' + easy_install_egg) elif egg_info_exists and dist.egg_info.endswith('.dist-info'): for path in uninstallation_paths(dist): paths_to_remove.add(path) elif develop_egg_link: # develop egg with open(develop_egg_link, 'r') as fh: link_pointer = os.path.normcase(fh.readline().strip()) assert (link_pointer == dist.location), ( 'Egg-link %s does not match installed location of %s ' '(at %s)' % (link_pointer, dist.project_name, dist.location) ) paths_to_remove.add(develop_egg_link) easy_install_pth = os.path.join(os.path.dirname(develop_egg_link), 'easy-install.pth') paths_to_remove.add_pth(easy_install_pth, dist.location) else: logger.debug( 'Not sure how to uninstall: %s - Check: %s', dist, dist.location, ) # find distutils scripts= scripts if dist.has_metadata('scripts') and dist.metadata_isdir('scripts'): for script in dist.metadata_listdir('scripts'): if dist_in_usersite(dist): bin_dir = bin_user else: bin_dir = bin_py paths_to_remove.add(os.path.join(bin_dir, script)) if WINDOWS: paths_to_remove.add(os.path.join(bin_dir, script) + '.bat') # find console_scripts _scripts_to_remove = [] console_scripts = dist.get_entry_map(group='console_scripts') for name in console_scripts.keys(): _scripts_to_remove.extend(_script_names(dist, name, False)) # find gui_scripts gui_scripts = dist.get_entry_map(group='gui_scripts') for name in gui_scripts.keys(): _scripts_to_remove.extend(_script_names(dist, name, True)) for s in _scripts_to_remove: paths_to_remove.add(s) return paths_to_remove
def generate( self, version: Optional[str], cmake_python_library: Optional[str], build_python: bool, build_test: bool, my_env: Dict[str, str], rerun: bool, ) -> None: "Runs cmake to generate native build files." if rerun and os.path.isfile(self._cmake_cache_file): os.remove(self._cmake_cache_file) ninja_build_file = os.path.join(self.build_dir, "build.ninja") if os.path.exists(self._cmake_cache_file) and not ( USE_NINJA and not os.path.exists(ninja_build_file)): # Everything's in place. Do not rerun. return args = [] if USE_NINJA: # Avoid conflicts in '-G' and the `CMAKE_GENERATOR` os.environ["CMAKE_GENERATOR"] = "Ninja" args.append("-GNinja") elif IS_WINDOWS: generator = os.getenv("CMAKE_GENERATOR", "Visual Studio 15 2017") supported = ["Visual Studio 15 2017", "Visual Studio 16 2019"] if generator not in supported: print("Unsupported `CMAKE_GENERATOR`: " + generator) print("Please set it to one of the following values: ") print("\n".join(supported)) sys.exit(1) args.append("-G" + generator) toolset_dict = {} toolset_version = os.getenv("CMAKE_GENERATOR_TOOLSET_VERSION") if toolset_version is not None: toolset_dict["version"] = toolset_version curr_toolset = os.getenv("VCToolsVersion") if curr_toolset is None: print( "When you specify `CMAKE_GENERATOR_TOOLSET_VERSION`, you must also " "activate the vs environment of this version. Please read the notes " "in the build steps carefully.") sys.exit(1) if IS_64BIT: if platform.machine() == "ARM64": args.append("-A ARM64") else: args.append("-Ax64") toolset_dict["host"] = "x64" if toolset_dict: toolset_expr = ",".join( ["{}={}".format(k, v) for k, v in toolset_dict.items()]) args.append("-T" + toolset_expr) base_dir = os.path.dirname( os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) install_dir = os.path.join(base_dir, "torch") _mkdir_p(install_dir) _mkdir_p(self.build_dir) # Store build options that are directly stored in environment variables build_options: Dict[str, CMakeValue] = {} # Build options that do not start with "BUILD_", "USE_", or "CMAKE_" and are directly controlled by env vars. # This is a dict that maps environment variables to the corresponding variable name in CMake. additional_options = { # Key: environment variable name. Value: Corresponding variable name to be passed to CMake. If you are # adding a new build option to this block: Consider making these two names identical and adding this option # in the block below. "_GLIBCXX_USE_CXX11_ABI": "GLIBCXX_USE_CXX11_ABI", "CUDNN_LIB_DIR": "CUDNN_LIBRARY", "USE_CUDA_STATIC_LINK": "CAFFE2_STATIC_LINK_CUDA", } additional_options.update({ # Build options that have the same environment variable name and CMake variable name and that do not start # with "BUILD_", "USE_", or "CMAKE_". If you are adding a new build option, also make sure you add it to # CMakeLists.txt. var: var for var in ( "BLAS", "WITH_BLAS", "BUILDING_WITH_TORCH_LIBS", "CUDA_HOST_COMILER", "CUDA_NVCC_EXECUTABLE", "CUDA_SEPARABLE_COMPILATION", "CUDNN_LIBRARY", "CUDNN_INCLUDE_DIR", "CUDNN_ROOT", "EXPERIMENTAL_SINGLE_THREAD_POOL", "INSTALL_TEST", "JAVA_HOME", "INTEL_MKL_DIR", "INTEL_OMP_DIR", "MKL_THREADING", "MKLDNN_CPU_RUNTIME", "MSVC_Z7_OVERRIDE", "CAFFE2_USE_MSVC_STATIC_RUNTIME", "Numa_INCLUDE_DIR", "Numa_LIBRARIES", "ONNX_ML", "ONNX_NAMESPACE", "ATEN_THREADING", "WERROR", "OPENSSL_ROOT_DIR", "STATIC_DISPATCH_BACKEND", "SELECTED_OP_LIST", ) }) # Aliases which are lower priority than their canonical option low_priority_aliases = { "CUDA_HOST_COMPILER": "CMAKE_CUDA_HOST_COMPILER", "CUDAHOSTCXX": "CUDA_HOST_COMPILER", "CMAKE_CUDA_HOST_COMPILER": "CUDA_HOST_COMPILER", "CMAKE_CUDA_COMPILER": "CUDA_NVCC_EXECUTABLE", "CUDACXX": "CUDA_NVCC_EXECUTABLE", } for var, val in my_env.items(): # We currently pass over all environment variables that start with "BUILD_", "USE_", and "CMAKE_". This is # because we currently have no reliable way to get the list of all build options we have specified in # CMakeLists.txt. (`cmake -L` won't print dependent options when the dependency condition is not met.) We # will possibly change this in the future by parsing CMakeLists.txt ourselves (then additional_options would # also not be needed to be specified here). true_var = additional_options.get(var) if true_var is not None: build_options[true_var] = val elif var.startswith(("BUILD_", "USE_", "CMAKE_")) or var.endswith( ("EXITCODE", "EXITCODE__TRYRUN_OUTPUT")): build_options[var] = val if var in low_priority_aliases: key = low_priority_aliases[var] if key not in build_options: build_options[key] = val # The default value cannot be easily obtained in CMakeLists.txt. We set it here. py_lib_path = sysconfig.get_path("purelib") cmake_prefix_path = build_options.get("CMAKE_PREFIX_PATH", None) if cmake_prefix_path: build_options["CMAKE_PREFIX_PATH"] = (py_lib_path + ";" + cast(str, cmake_prefix_path)) else: build_options["CMAKE_PREFIX_PATH"] = py_lib_path # Some options must be post-processed. Ideally, this list will be shrunk to only one or two options in the # future, as CMake can detect many of these libraries pretty comfortably. We have them here for now before CMake # integration is completed. They appear here not in the CMake.defines call below because they start with either # "BUILD_" or "USE_" and must be overwritten here. build_options.update({ # Note: Do not add new build options to this dict if it is directly read from environment variable -- you # only need to add one in `CMakeLists.txt`. All build options that start with "BUILD_", "USE_", or "CMAKE_" # are automatically passed to CMake; For other options you can add to additional_options above. "BUILD_PYTHON": build_python, "BUILD_TEST": build_test, # Most library detection should go to CMake script, except this one, which Python can do a much better job # due to NumPy's inherent Pythonic nature. "USE_NUMPY": USE_NUMPY, }) # Options starting with CMAKE_ cmake__options = { "CMAKE_INSTALL_PREFIX": install_dir, } # We set some CMAKE_* options in our Python build code instead of relying on the user's direct settings. Emit an # error if the user also attempts to set these CMAKE options directly. specified_cmake__options = set(build_options).intersection( cmake__options) if len(specified_cmake__options) > 0: print( ", ".join(specified_cmake__options) + " should not be specified in the environment variable. They are directly set by PyTorch build script." ) sys.exit(1) build_options.update(cmake__options) CMake.defines( args, PYTHON_EXECUTABLE=sys.executable, PYTHON_LIBRARY=cmake_python_library, PYTHON_INCLUDE_DIR=sysconfig.get_path("include"), TORCH_BUILD_VERSION=version, NUMPY_INCLUDE_DIR=NUMPY_INCLUDE_DIR, **build_options, ) expected_wrapper = "/usr/local/opt/ccache/libexec" if IS_DARWIN and os.path.exists(expected_wrapper): if "CMAKE_C_COMPILER" not in build_options and "CC" not in os.environ: CMake.defines( args, CMAKE_C_COMPILER="{}/gcc".format(expected_wrapper)) if "CMAKE_CXX_COMPILER" not in build_options and "CXX" not in os.environ: CMake.defines( args, CMAKE_CXX_COMPILER="{}/g++".format(expected_wrapper)) for env_var_name in my_env: if env_var_name.startswith("gh"): # github env vars use utf-8, on windows, non-ascii code may # cause problem, so encode first try: my_env[env_var_name] = str( my_env[env_var_name].encode("utf-8")) except UnicodeDecodeError as e: shex = ":".join("{:02x}".format(ord(c)) for c in my_env[env_var_name]) print( "Invalid ENV[{}] = {}".format(env_var_name, shex), file=sys.stderr, ) print(e, file=sys.stderr) # According to the CMake manual, we should pass the arguments first, # and put the directory as the last element. Otherwise, these flags # may not be passed correctly. # Reference: # 1. https://cmake.org/cmake/help/latest/manual/cmake.1.html#synopsis # 2. https://stackoverflow.com/a/27169347 args.append(base_dir) self.run(args, env=my_env)
def main(): parser = argparse.ArgumentParser() parser.add_argument('--add_package', help="Add a CPython package in Lib/site-packages") parser.add_argument('--install', help='Install Brython in an empty directory', action="store_true") parser.add_argument('--make_dist', help='Make a Python distribution', action="store_true") parser.add_argument('--make_package', help='Make a loadable Python package') parser.add_argument('--make_file_system', help='Make a virtual file system') parser.add_argument( '--modules', help= 'Create brython_modules.js with all the modules used by the application', action="store_true") parser.add_argument('--reset', help='Reset brython_modules.js to stdlib', action="store_true") parser.add_argument('--server', help='Start development server', nargs="?", default="absent") parser.add_argument('--update', help='Update Brython scripts', action="store_true") args = parser.parse_args() files = [ 'README.txt', 'demo.html', 'index.html', 'brython.js', 'brython_stdlib.js', 'unicode.txt' ] if args.add_package: print('add package {}...'.format(args.add_package)) package = __import__(args.add_package) package_file = package.__file__ package_dir = os.path.dirname(package_file) lib_dir = os.path.join(os.getcwd(), 'Lib') if not os.path.exists(lib_dir): os.mkdir(lib_dir) dest_dir = os.path.join(lib_dir, 'site-packages') if not os.path.exists(dest_dir): os.mkdir(dest_dir) if os.path.splitext(package_dir)[1] == '.egg': import zipfile zf = zipfile.ZipFile(package_dir) for info in zf.infolist(): if info.filename.startswith(('__pycache__', 'EGG-INFO')): continue zf.extract(info, dest_dir) print('extract', info.filename) zf.close() print('done') elif not package_dir.split(os.sep)[-1] == "site-packages": print('copy folder', package_dir) dest_dir = os.path.join(dest_dir, args.add_package) if os.path.exists(dest_dir): shutil.rmtree(dest_dir) shutil.copytree(package_dir, dest_dir) else: print('copy single file', package_file) shutil.copyfile( package_file, os.path.join(dest_dir, os.path.basename(package_file))) if args.install: print('Installing Brython {}'.format(implementation)) data_path = os.path.join(os.path.dirname(__file__), 'data') current_path_files = os.listdir(os.getcwd()) if current_path_files and 'brython.js' in current_path_files: override = input('brython.js is already present in this directory.' ' Override ? (Y/N)') if override.lower() != 'y': import sys print('exiting') sys.exit() for path in os.listdir(data_path): try: shutil.copyfile(os.path.join(data_path, path), path) except shutil.SameFileError: print(f'{path} has not been moved. Are the same file.') print('done') if args.update: print('Update Brython scripts to version {}'.format(implementation)) data_path = os.path.join(os.path.dirname(__file__), 'data') for path in os.listdir(data_path): shutil.copyfile(os.path.join(data_path, path), path) if args.reset: print('Reset brython_modules.js to standard distribution') shutil.copyfile(os.path.join(os.getcwd(), 'brython_stdlib.js'), os.path.join(os.getcwd(), 'brython_modules.js')) if args.modules: print('Create brython_modules.js with all the modules used by the ' 'application') from . import list_modules finder = list_modules.ModulesFinder() finder.inspect() finder.make_brython_modules() if args.make_dist: print('Make a Python distribution for the application') from . import list_modules finder = list_modules.ModulesFinder() finder.inspect() finder.make_brython_modules() finder.make_setup() print('done') if args.make_file_system: print('Create a Javascript file for all the files in the directory') args_fs = args.make_file_system.split("#") if len(args_fs) > 2: raise ValueError("--make_file_systems expects at most 2 " "arguments, got " + str(len(args_fs))) vfs_name = args_fs[0] prefix = args_fs[1] if len(args_fs) > 1 else None from .make_file_system import make make(vfs_name, prefix) print('done') if args.make_package: package_name = args.make_package from . import make_package make_package.make(package_name, os.getcwd()) print("done") if args.server != "absent": # start development server import http.server import sysconfig cpython_site_packages = sysconfig.get_path("purelib") class Handler(http.server.CGIHTTPRequestHandler): def guess_type(self, path): ctype = super().guess_type(path) # in case the mimetype associated with .js in the Windows # registery is not correctly set if os.path.splitext(path)[1] == ".js": ctype = "application/javascript" return ctype def translate_path(self, path): """Map /cpython_site_packages to local CPython site-packages directory.""" elts = path.split('/') if len(elts) > 1 and elts[0] == '': if elts[1] == 'cpython_site_packages': elts[-1] = elts[-1].split("?")[0] return os.path.join(cpython_site_packages, *elts[2:]) return super().translate_path(path) # port to be used when the server runs locally port = 8000 if args.server is None else int(args.server) print("Brython development server. " "Not meant to be used in production.") if args.server is None: print("For a different port provide command-line option " '"--server PORT".') print("Press CTRL+C to Quit.\n") http.server.test(HandlerClass=Handler, port=port)
return None def type_rewriter(self) -> TypeRewriter: """Return the type rewriter for use when generating stubs.""" return NoOpRewriter() def query_limit(self) -> int: """Maximum number of traces to query from the call trace store.""" return 2000 def include_unparsable_defaults(self) -> bool: """Output stubs for functions with defaults with invalid reprs?""" return False lib_paths = {sysconfig.get_path(n) for n in ['stdlib', 'purelib', 'platlib']} # if in a virtualenv, also exclude the real stdlib location venv_real_prefix = getattr(sys, 'real_prefix', None) if venv_real_prefix: lib_paths.add( sysconfig.get_path('stdlib', vars={'installed_base': venv_real_prefix}) ) # exclude code objects from frozen importlib, which have a bogus co_filename lib_paths.add('<frozen importlib.') LIB_PATHS = tuple(p for p in lib_paths if p is not None) def default_code_filter(code: CodeType) -> bool: """A CodeFilter to exclude stdlib and site-packages.""" return bool(code.co_filename and not code.co_filename.startswith(LIB_PATHS))
if not os.path.isdir(CUDA_HOME): raise OSError(f"Invalid CUDA_HOME: " "directory does not exist: {CUDA_HOME}") cuda_include_dir = os.path.join(CUDA_HOME, "include") extensions = [ Extension( "*", sources=cython_files, include_dirs=[ "../include/rmm", "../include", "../build/include", os.path.dirname(sysconfig.get_path("include")), cuda_include_dir, ], library_dirs=[get_python_lib(), os.path.join(os.sys.prefix, "lib")], libraries=["rmm"], language="c++", extra_compile_args=["-std=c++14"], ) ] setup( name="rmm", version="0.12.0", description="rmm - RAPIDS Memory Manager", url="https://github.com/rapidsai/rmm",
def main(argv): parser = ArgumentParser(usage=__doc__.lstrip()) parser.add_argument("--verbose", "-v", action="count", default=1, help="more verbosity") parser.add_argument( "--no-build", "-n", action="store_true", default=False, help="do not build the project (use system installed version)") parser.add_argument("--build-only", "-b", action="store_true", default=False, help="just build, do not run any tests") parser.add_argument("--doctests", action="store_true", default=False, help="Run doctests in module") parser.add_argument("--refguide-check", action="store_true", default=False, help="Run refguide check (do not run regular tests.)") parser.add_argument("--coverage", action="store_true", default=False, help=("report coverage of project code. HTML output" " goes under build/coverage")) parser.add_argument("--gcov", action="store_true", default=False, help=("enable C code coverage via gcov (requires GCC)." " gcov output goes to build/**/*.gc*")) parser.add_argument("--lcov-html", action="store_true", default=False, help=("produce HTML for C code coverage information " "from a previous run with --gcov. " "HTML output goes to build/lcov/")) parser.add_argument("--mode", "-m", default="fast", help="'fast', 'full', or something that could be " "passed to nosetests -A [default: fast]") parser.add_argument("--submodule", "-s", default=None, help="Submodule whose tests to run (cluster," " constants, ...)") parser.add_argument("--pythonpath", "-p", default=None, help="Paths to prepend to PYTHONPATH") parser.add_argument("--tests", "-t", action='append', help="Specify tests to run") parser.add_argument("--python", action="store_true", help="Start a Python shell with PYTHONPATH set") parser.add_argument("--ipython", "-i", action="store_true", help="Start IPython shell with PYTHONPATH set") parser.add_argument("--shell", action="store_true", help="Start Unix shell with PYTHONPATH set") parser.add_argument("--debug", "-g", action="store_true", help="Debug build") parser.add_argument("--parallel", "-j", type=int, default=1, help="Number of parallel jobs for build and testing") parser.add_argument("--show-build-log", action="store_true", help="Show build output rather than using a log file") parser.add_argument("--bench", action="store_true", help="Run benchmark suite instead of test suite") parser.add_argument("--bench-compare", action="append", metavar="BEFORE", help=("Compare benchmark results of current HEAD to" " BEFORE. Use an additional " "--bench-compare=COMMIT to override HEAD with" " COMMIT. Note that you need to commit your " "changes first!")) parser.add_argument("args", metavar="ARGS", default=[], nargs=REMAINDER, help="Arguments to pass to Nose, Python or shell") parser.add_argument("--pep8", action="store_true", default=False, help="Perform pep8 check with flake8.") parser.add_argument("--mypy", action="store_true", default=False, help="Run mypy on the codebase") parser.add_argument("--doc", action="append", nargs="?", const="html-scipyorg", help="Build documentation") args = parser.parse_args(argv) if args.pep8: # Lint the source using the configuration in tox.ini. os.system("flake8 scipy benchmarks/benchmarks") # Lint just the diff since branching off of main using a # stricter configuration. lint_diff = os.path.join(ROOT_DIR, 'tools', 'lint_diff.py') os.system(lint_diff) sys.exit(0) if args.mypy: sys.exit(run_mypy(args)) if args.bench_compare: args.bench = True args.no_build = True # ASV does the building if args.lcov_html: # generate C code coverage output lcov_generate() sys.exit(0) if args.pythonpath: for p in reversed(args.pythonpath.split(os.pathsep)): sys.path.insert(0, p) if args.gcov: gcov_reset_counters() if args.debug and args.bench: print("*** Benchmarks should not be run against debug version; " "remove -g flag ***") if not args.no_build: site_dir = build_project(args) sys.path.insert(0, site_dir) os.environ['PYTHONPATH'] = \ os.pathsep.join((site_dir, os.environ.get('PYTHONPATH', ''))) extra_argv = args.args[:] if extra_argv and extra_argv[0] == '--': extra_argv = extra_argv[1:] if args.python: if extra_argv: # Don't use subprocess, since we don't want to include the # current path in PYTHONPATH. sys.argv = extra_argv with open(extra_argv[0], 'r') as f: script = f.read() sys.modules['__main__'] = new_module('__main__') ns = dict(__name__='__main__', __file__=extra_argv[0]) exec(script, ns) sys.exit(0) else: import code code.interact() sys.exit(0) if args.ipython: import IPython IPython.embed(user_ns={}) sys.exit(0) if args.shell: shell = os.environ.get('SHELL', 'sh') print("Spawning a Unix shell...") os.execv(shell, [shell] + extra_argv) sys.exit(1) if args.doc: cmd = ["make", "-Cdoc", 'PYTHON="{}"'.format(sys.executable)] cmd += args.doc if args.parallel: cmd.append('SPHINXOPTS="-j{}"'.format(args.parallel)) subprocess.run(cmd, check=True) sys.exit(0) if args.coverage: dst_dir = os.path.join(ROOT_DIR, 'build', 'coverage') fn = os.path.join(dst_dir, 'coverage_html.js') if os.path.isdir(dst_dir) and os.path.isfile(fn): shutil.rmtree(dst_dir) extra_argv += ['--cov-report=html:' + dst_dir] if args.refguide_check: cmd = [ os.path.join(ROOT_DIR, 'tools', 'refguide_check.py'), '--doctests' ] if args.submodule: cmd += [args.submodule] os.execv(sys.executable, [sys.executable] + cmd) sys.exit(0) if args.bench: # Run ASV items = extra_argv if args.tests: items += args.tests if args.submodule: items += [args.submodule] bench_args = [] for a in items: bench_args.extend(['--bench', a]) if not args.bench_compare: import scipy print("Running benchmarks for Scipy version %s at %s" % (scipy.__version__, scipy.__file__)) cmd = [ 'asv', 'run', '--dry-run', '--show-stderr', '--python=same' ] + bench_args retval = run_asv(cmd) sys.exit(retval) else: if len(args.bench_compare) == 1: commit_a = args.bench_compare[0] commit_b = 'HEAD' elif len(args.bench_compare) == 2: commit_a, commit_b = args.bench_compare else: p.error("Too many commits to compare benchmarks for") # Check for uncommitted files if commit_b == 'HEAD': r1 = subprocess.call( ['git', 'diff-index', '--quiet', '--cached', 'HEAD']) r2 = subprocess.call(['git', 'diff-files', '--quiet']) if r1 != 0 or r2 != 0: print("*" * 80) print("WARNING: you have uncommitted changes --- " "these will NOT be benchmarked!") print("*" * 80) # Fix commit ids (HEAD is local to current repo) p = subprocess.Popen(['git', 'rev-parse', commit_b], stdout=subprocess.PIPE) out, err = p.communicate() commit_b = out.strip() p = subprocess.Popen(['git', 'rev-parse', commit_a], stdout=subprocess.PIPE) out, err = p.communicate() commit_a = out.strip() cmd = [ 'asv', 'continuous', '--show-stderr', '--factor', '1.05', commit_a, commit_b ] + bench_args run_asv(cmd) sys.exit(1) if args.build_only: sys.exit(0) else: try: test, version, mod_path = import_module() except ImportError: # this may fail when running with --no-build, so try to detect # an installed scipy in a subdir inside a repo dst_dir = os.path.join(ROOT_DIR, 'build', 'testenv') from sysconfig import get_path py_path = get_path('platlib') site_dir = os.path.join(dst_dir, get_path_suffix(py_path, 3)) print("Trying to import scipy from development installed path at:", site_dir) sys.path.insert(0, site_dir) os.environ['PYTHONPATH'] = \ os.pathsep.join((site_dir, os.environ.get('PYTHONPATH', ''))) test, version, mod_path = import_module() if args.submodule: tests = [PROJECT_MODULE + "." + args.submodule] elif args.tests: tests = args.tests else: tests = None # Run the tests if not args.no_build: test_dir = site_dir else: test_dir = os.path.join(ROOT_DIR, 'build', 'test') if not os.path.isdir(test_dir): os.makedirs(test_dir) shutil.copyfile(os.path.join(ROOT_DIR, '.coveragerc'), os.path.join(test_dir, '.coveragerc')) cwd = os.getcwd() try: os.chdir(test_dir) print("Running tests for {} version:{}, installed at:{}".format( PROJECT_MODULE, version, mod_path)) result = test(args.mode, verbose=args.verbose, extra_argv=extra_argv, doctests=args.doctests, coverage=args.coverage, tests=tests, parallel=args.parallel) finally: os.chdir(cwd) if isinstance(result, bool): sys.exit(0 if result else 1) elif result.wasSuccessful(): sys.exit(0) else: sys.exit(1)
def _get_purelib(): return get_path("purelib")
def _get_grumpy_stdlib(): try: runtime_gopath = resource_filename( Requirement.parse('grumpy-runtime'), 'grumpy_runtime/data/gopath', ) except DistributionNotFound: return None return os.path.join(os.path.sep, runtime_gopath, 'src/__python__') _GRUMPY_STDLIB_PATH = _get_grumpy_stdlib() _CPYTHON_STDLIB_PATHS = ( [sysconfig.get_path('platstdlib') + sysconfig.get_path('stdlib')] + sysconfig.get_config_vars('LIBDEST', 'DESTLIB', 'BINLIBDEST')) _NATIVE_MODULE_PREFIX = '__go__/' class Import(object): """Represents a single module import and all its associated bindings. Each import pertains to a single module that is imported. Thus one import statement may produce multiple Import objects. E.g. "import foo, bar" makes an Import object for module foo and another one for module bar. """ Binding = collections.namedtuple('Binding', ('bind_type', 'alias', 'value'))
#!/usr/bin/env python3 import sysconfig from compileall import compile_dir from os import environ, path from subprocess import call prefix = environ.get('MESON_INSTALL_PREFIX', '/usr/local') datadir = path.join(prefix, 'share') destdir = environ.get('DESTDIR', '') # Package managers set this so we don't need to run if not destdir: print('Updating icon cache...') call([ 'gtk-update-icon-cache', '-qtf', path.join(datadir, 'icons', 'hicolor') ]) print('Compiling python bytecode...') moduledir = sysconfig.get_path('purelib', vars={'base': str(prefix)}) compile_dir(destdir + path.join(moduledir, 'gtweak'), optimize=2)
'src/RowSet.cpp', 'src/Store.cpp', 'src/StoreFactory.cpp', 'src/TimeSeriesProperties.cpp', 'src/TimestampUtils.cpp', 'src/griddb.i', 'src/Util.cpp' ] DEPENDENTS = [ 'src/AggregationResult.h', 'src/ContainerInfo.h', 'src/Container.h', 'src/ExpirationInfo.h', 'src/Field.h' 'src/GSException.h', 'src/PartitionController.h', 'src/Query.h', 'src/QueryAnalysisEntry.h', 'src/RowKeyPredicate.h', 'src/RowList.h', 'src/RowSet.h', 'src/Store.h', 'src/StoreFactory.h', 'src/TimeSeriesProperties.h', 'src/TimestampUtils.h', 'src/gstype_python.i', 'src/gstype.i', 'src/Util.h', 'include/gridstore.h' ] site_packages_path = sysconfig.get_path('purelib') INCLUDES = ['include', 'src', site_packages_path + '/numpy/core/include/'] COMPILE_ARGS = ['-std=c++0x'] # For MacOS if platform.system() == 'Darwin': LIBRARIES = ['gridstore'] else: LIBRARIES = ['rt', 'gridstore'] SWIG_OPTS = ['-DSWIGWORDSIZE64', '-c++', '-outdir', '.', '-Isrc'] class CustomBuild(build): sub_commands = [('build_ext', build.has_ext_modules),
def gp(x: str) -> Optional[str]: return sysconfig.get_path(x)