def build_so(module_name, sources, setup_args=None): from distutils.dist import Distribution from distutils.errors import DistutilsArgError from distutils.extension import Extension from shutil import copy2 setup_args = generate_setup_args(setup_args) dist = Distribution(setup_args) ext = Extension( name = module_name, sources = sources, include_dirs = [ find_handsome_include_dir() ], extra_compile_args = [ '-std=c++11' ], ) if dist.ext_modules is None: dist.ext_modules = [ ext ] else: dist.ext_modules.append(ext) target_dir, _ = os.path.split(os.path.abspath(__file__)) build = dist.get_command_obj('build') build.build_base = os.path.join(target_dir, 'build') cfgfiles = dist.find_config_files() dist.parse_config_files(cfgfiles) try: ok = dist.parse_command_line() except DistutilsArgError: raise if not ok: raise RuntimeError('Build cannot continue') command = dist.get_command_obj("build_ext") dist.run_commands() so_path = os.path.abspath(command.get_outputs()[0]) _, so_name = os.path.split(so_path) target_path = os.path.join(target_dir, so_name) if os.path.isfile(target_path): os.unlink(target_path) copy2(so_path, target_path) return target_path
def build_cython_extension(py_or_pyx_file_path, cython_force_rebuild=True): """ Build a cython extension from a `.py` or `.pyx` file - build will be done in a sub-folder named `_pyxbld` in the py_or_pyx_file_path :param py_or_pyx_file_path: (str) path to a `.py` or `.pyx` file :param cython_force_rebuild: (bool) If True the cython extension is rebuild even if it was already build :return: (tuple) cython_extension_module_path, cython_module_c_file_path, cython_build_dir_path """ module_dir = path_dirname(py_or_pyx_file_path) module__cython_name = path_splitext(path_basename(py_or_pyx_file_path))[0] cython_module_c_file_path = path_join(module_dir, module__cython_name + '.c') cython_build_dir_path = path_join(module_dir, '_pyxbld') args = ['--quiet', 'build_ext', '--build-lib', module_dir] if cython_force_rebuild: args.append('--force') dist = Distribution({'script_name': None, 'script_args': args}) dist.ext_modules = [ Extension(name=module__cython_name, sources=[py_or_pyx_file_path]) ] dist.cmdclass = {'build_ext': cython_build_ext} build = dist.get_command_obj('build') build.build_base = cython_build_dir_path try: dist.parse_command_line() except DistutilsArgError as err: raise Err('utils.build_cython_extension', [ 'py_or_pyx_file_path: <{}>'.format(py_or_pyx_file_path), ' DistutilsArgError: <{}>'.format(err) ]) try: obj_build_ext = dist.get_command_obj('build_ext') dist.run_commands() cython_extension_module_path = obj_build_ext.get_outputs()[0] if path_dirname(py_or_pyx_file_path) != module_dir: raise Err('utils.build_cython_extension', [ 'py_or_pyx_file_path: <{}>'.format(py_or_pyx_file_path), ' <module_dir> differs from final <cython_module_dir>', ' module_dir: <{}>'.format(module_dir), ' cython_module_dir: <{}>'.format( path_dirname(py_or_pyx_file_path)) ]) except Exception as err: raise Err('utils.build_cython_extension', [ 'py_or_pyx_file_path: <{}>'.format(py_or_pyx_file_path), ' Exception: <{}>'.format(err) ]) return cython_extension_module_path, cython_module_c_file_path, cython_build_dir_path
def can_compile_extensions(): from distutils.dist import Distribution sargs = {'script_name': None, 'script_args': ["--build-ext"]} d = Distribution(sargs) cfg = d.get_command_obj('config') cfg.dump_source = 0 cfg.noisy = 0 cfg.finalize_options() build_ext = d.get_command_obj('build_ext') build_ext.finalize_options() return cfg.try_compile('int main(void) {return 0;}', headers=['Python.h'], include_dirs=build_ext.include_dirs, lang='c')
def build_cython_extension(py_or_pyx_file_path, cython_force_rebuild=True): """ Build a cython extension from a `.py` or `.pyx` file - build will be done in a sub-folder named `_pyxbld` in the py_or_pyx_file_path :param py_or_pyx_file_path: (str) path to a `.py` or `.pyx` file :param cython_force_rebuild: (bool) If True the cython extension is rebuild even if it was already build :return: (tuple) cython_extension_module_path, cython_module_c_file_path, cython_build_dir_path """ module_dir = path_dirname(py_or_pyx_file_path) module__cython_name = path_splitext(path_basename(py_or_pyx_file_path))[0] cython_module_c_file_path = path_join(module_dir, module__cython_name + '.c') cython_build_dir_path = path_join(module_dir, '_pyxbld') args = ['--quiet', 'build_ext', '--build-lib', module_dir] if cython_force_rebuild: args.append('--force') dist = Distribution({'script_name': None, 'script_args': args}) dist.ext_modules = [Extension(name=module__cython_name, sources=[py_or_pyx_file_path])] dist.cmdclass = {'build_ext': cython_build_ext} build = dist.get_command_obj('build') build.build_base = cython_build_dir_path try: dist.parse_command_line() except DistutilsArgError as err: raise Err('utils.build_cython_extension', [ 'py_or_pyx_file_path: <{}>'.format(py_or_pyx_file_path), ' DistutilsArgError: <{}>'.format(err) ]) try: obj_build_ext = dist.get_command_obj('build_ext') dist.run_commands() cython_extension_module_path = obj_build_ext.get_outputs()[0] if path_dirname(py_or_pyx_file_path) != module_dir: raise Err('utils.build_cython_extension', [ 'py_or_pyx_file_path: <{}>'.format(py_or_pyx_file_path), ' <module_dir> differs from final <cython_module_dir>', ' module_dir: <{}>'.format(module_dir), ' cython_module_dir: <{}>'.format(path_dirname(py_or_pyx_file_path)) ]) except Exception as err: raise Err('utils.build_cython_extension', [ 'py_or_pyx_file_path: <{}>'.format(py_or_pyx_file_path), ' Exception: <{}>'.format(err) ]) return cython_extension_module_path, cython_module_c_file_path, cython_build_dir_path
def pyx_to_dll(filename, ext=None, force_rebuild=0): """Compile a PYX file to a DLL and return the name of the generated .so or .dll .""" assert os.path.exists(filename) path, name = os.path.split(filename) if not ext: modname, extension = os.path.splitext(name) assert extension == ".pyx", extension ext = Extension(name=modname, sources=[filename]) if DEBUG: quiet = "--verbose" else: quiet = "--quiet" args = [quiet, "build_ext"] if force_rebuild: args.append("--force") dist = Distribution({"script_name": None, "script_args": args}) if not dist.ext_modules: dist.ext_modules = [] dist.ext_modules.append(ext) dist.cmdclass = {'build_ext': build_ext} build = dist.get_command_obj('build') build.build_base = os.path.join(path, "_pyxbld") try: ok = dist.parse_command_line() except DistutilsArgError, msg: raise
def get_include(user=False): from distutils.dist import Distribution import os import sys # Are we running in a virtual environment? virtualenv = hasattr(sys, 'real_prefix') or \ sys.prefix != getattr(sys, "base_prefix", sys.prefix) # Are we running in a conda environment? conda = os.path.exists(os.path.join(sys.prefix, 'conda-meta')) if virtualenv: return os.path.join(sys.prefix, 'include', 'site', 'python' + sys.version[:3]) elif conda: if os.name == 'nt': return os.path.join(sys.prefix, 'Library', 'include') else: return os.path.join(sys.prefix, 'include') else: dist = Distribution({'name': 'pybind11'}) dist.parse_config_files() dist_cobj = dist.get_command_obj('install', create=True) # Search for packages in user's home directory? if user: dist_cobj.user = user dist_cobj.prefix = "" dist_cobj.finalize_options() return os.path.dirname(dist_cobj.install_headers)
def package_install_site(name='', user=False, plat_specific=False): """pip-inspired, distutils-based method for fetching the default install location (site-packages path). Returns virtual environment or system site-packages, unless `user=True` in which case returns user-site (typ. under `~/.local/ on linux). If there's a distinction (on a particular system) between platform specific and pure python package locations, set `plat_specific=True` to retrieve the former. """ dist = Distribution({'name': name}) dist.parse_config_files() inst = dist.get_command_obj('install', create=True) # NOTE: specifying user=True will create user-site if user: inst.user = user inst.prefix = "" inst.finalize_options() # platform-specific site vs. purelib (platform-independent) site if plat_specific: loc = inst.install_platlib else: loc = inst.install_purelib # install_lib specified in setup.cfg has highest precedence if 'install_lib' in dist.get_option_dict('install'): loc = inst.install_lib return loc
def get_include(user=False): from distutils.dist import Distribution import os import sys # Are we running in a virtual environment? virtualenv = hasattr(sys, 'real_prefix') or \ sys.prefix != getattr(sys, "base_prefix", sys.prefix) if virtualenv: return os.path.join(sys.prefix, 'include', 'site', 'python' + sys.version[:3]) else: dist = Distribution({'name': 'pybind11'}) dist.parse_config_files() dist_cobj = dist.get_command_obj('install', create=True) # Search for packages in user's home directory? if user: dist_cobj.user = user dist_cobj.prefix = "" dist_cobj.finalize_options() return os.path.dirname(dist_cobj.install_headers)
def get_include(user=False): from distutils.dist import Distribution import os import sys # Are we running in a virtual environment? virtualenv = hasattr(sys, "real_prefix") or sys.prefix != getattr( sys, "base_prefix", sys.prefix ) if virtualenv: return os.path.join(sys.prefix, "include", "site", "python" + sys.version[:3]) else: dist = Distribution({"name": "pybind11"}) dist.parse_config_files() dist_cobj = dist.get_command_obj("install", create=True) # Search for packages in user's home directory? if user: dist_cobj.user = user dist_cobj.prefix = "" dist_cobj.finalize_options() return os.path.dirname(dist_cobj.install_headers)
def get_include(user=False): r''' Get the relevant ``include`` directory. (c) Sylvain Corlay, https://github.com/pybind/python_example ''' from distutils.dist import Distribution # Are we running in a virtual environment? # - check virtualenv = hasattr( sys, 'real_prefix') or sys.prefix != getattr(sys, "base_prefix", sys.prefix) # - return path if virtualenv: return os.path.join(sys.prefix, 'include', 'site', 'python' + sys.version[:3]) # Search dist = Distribution({'name': 'cppmat'}) dist.parse_config_files() dist_cobj = dist.get_command_obj('install', create=True) # Search for packages in user's home directory? if user: dist_cobj.user = user dist_cobj.prefix = "" # Search dist_cobj.finalize_options() return os.path.dirname(dist_cobj.install_headers)
def package_install_site(name='', user=False, plat_specific=False): """pip-inspired, distutils-based method for fetching the default install location (site-packages path). Returns virtual environment or system site-packages, unless `user=True` in which case returns user-site (typ. under `~/.local/ on linux). If there's a distinction (on a particular system) between platform specific and pure python package locations, set `plat_specific=True` to retrieve the former. """ dist = Distribution({'name': name}) dist.parse_config_files() inst = dist.get_command_obj('install', create=True) # NOTE: specifying user=True will create user-site if user: inst.user = user inst.prefix = "" inst.finalize_options() # platform-specific site vs. purelib (platform-independent) site if plat_specific: loc = inst.install_platlib else: loc = inst.install_purelib # install_lib specified in setup.cfg has highest precedence if 'install_lib' in dist.get_option_dict('install'): loc = inst.install_lib return loc
def distutils_scheme(dist_name, user=False, home=None, root=None): """ Return a distutils install scheme """ from distutils.dist import Distribution scheme = {} d = Distribution({'name': dist_name}) d.parse_config_files() i = d.get_command_obj('install', create=True) # NOTE: setting user or home has the side-effect of creating the home dir or # user base for installations during finalize_options() # ideally, we'd prefer a scheme class that has no side-effects. i.user = user or i.user i.home = home or i.home i.root = root or i.root i.finalize_options() for key in SCHEME_KEYS: scheme[key] = getattr(i, 'install_' + key) if running_under_virtualenv(): scheme['headers'] = os.path.join(sys.prefix, 'include', 'site', 'python' + sys.version[:3], dist_name) if root is not None: scheme["headers"] = os.path.join( root, os.path.abspath(scheme["headers"])[1:], ) return scheme
def distutils_scheme(dist_name, user=False, home=None, root=None): """ Return a distutils install scheme """ from distutils.dist import Distribution scheme = {} d = Distribution({'name': dist_name}) d.parse_config_files() i = d.get_command_obj('install', create=True) # NOTE: setting user or home has the side-effect of creating the home dir or # user base for installations during finalize_options() # ideally, we'd prefer a scheme class that has no side-effects. i.user = user or i.user i.home = home or i.home i.root = root or i.root i.finalize_options() for key in SCHEME_KEYS: scheme[key] = getattr(i, 'install_'+key) if running_under_virtualenv(): scheme['headers'] = os.path.join(sys.prefix, 'include', 'site', 'python' + sys.version[:3], dist_name) if root is not None: scheme["headers"] = os.path.join( root, os.path.abspath(scheme["headers"])[1:], ) return scheme
def pyx_to_dll(filename, ext = None, force_rebuild = 0): """Compile a PYX file to a DLL and return the name of the generated .so or .dll .""" assert os.path.exists(filename) path, name = os.path.split(filename) if not ext: modname, extension = os.path.splitext(name) assert extension == ".pyx", extension ext = Extension(name=modname, sources=[filename]) if DEBUG: quiet = "--verbose" else: quiet = "--quiet" args = [quiet, "build_ext"] if force_rebuild: args.append("--force") dist = Distribution({"script_name": None, "script_args": args}) if not dist.ext_modules: dist.ext_modules = [] dist.ext_modules.append(ext) dist.cmdclass = {'build_ext': build_ext} build = dist.get_command_obj('build') build.build_base = os.path.join(path, "_pyxbld") try: ok = dist.parse_command_line() except DistutilsArgError, msg: raise
def self_upgrade(): """Upgrade ourselves with pip.""" # Run pip using the current python executable to accommodate for virtualenvs command = ( [sys.executable] + ["-m", "pip"] + ["install", "MozPhab"] + ["--upgrade"] + ["--no-cache-dir"] + ["--disable-pip-version-check"] ) if config.get_pre_releases: command += ["--pre"] # sys.path[0] is the directory containing the script that was used to # start python. This will be something like: # "<python environment>/bin" or "<python environment>\Scripts" (Windows) script_dir = Path(sys.path[0]) # If moz-phab was installed with --user, we need to pass it to pip # Create "install" distutils command with --user to find the scripts_path d = Distribution() d.parse_config_files() i = d.get_command_obj("install", create=True) # Forcing the environment detected by Distribution to the --user one i.user = True i.prefix = i.exec_prefix = i.home = i.install_base = i.install_platbase = None i.finalize_options() # Checking if the moz-phab script is installed in user's scripts directory user_dir = Path(i.install_scripts).resolve() if script_dir == user_dir: command.append("--user") if environment.IS_WINDOWS: # Windows does not allow to remove the exe file of the running process. # Renaming the `moz-phab.exe` file to allow pip to install a new version. temp_exe = script_dir / "moz-phab-temp.exe" try: temp_exe.unlink() except FileNotFoundError: pass exe = script_dir / "moz-phab.exe" exe.rename(temp_exe) try: check_call(command) except Exception: temp_exe.rename(exe) raise if not exe.is_file(): # moz-phab.exe is not created - install wasn't needed. temp_exe.rename(exe) else: check_call(command)
def distutils_scheme(dist_name, user=False, home=None, root=None, isolated=False, prefix=None): """ Return a distutils install scheme """ from distutils.dist import Distribution scheme = {} if isolated: extra_dist_args = {"script_args": ["--no-user-cfg"]} else: extra_dist_args = {} dist_args = {'name': dist_name} dist_args.update(extra_dist_args) d = Distribution(dist_args) d.parse_config_files() i = d.get_command_obj('install', create=True) # NOTE: setting user or home has the sitex-effect of creating the home dir # or user base for installations during finalize_options() # ideally, we'd prefer a scheme class that has no sitex-effects. assert not (user and prefix), "user={0} prefix={1}".format(user, prefix) i.user = user or i.user if user: i.prefix = "" i.prefix = prefix or i.prefix i.home = home or i.home i.root = root or i.root i.finalize_options() for key in SCHEME_KEYS: scheme[key] = getattr(i, 'install_' + key) # install_lib specified in setup.cfg should install *everything* # into there (i.e. it takes precedence over both purelib and # platlib). Note, i.install_lib is *always* set after # finalize_options(); we only want to override here if the user # has explicitly requested it hence going back to the config if 'install_lib' in d.get_option_dict('install'): scheme.update(dict(purelib=i.install_lib, platlib=i.install_lib)) if running_under_virtualenv(): scheme['headers'] = os.path.join( sys.prefix, 'include', 'site', 'python' + sys.version[:3], dist_name, ) if root is not None: path_no_drive = os.path.splitdrive( os.path.abspath(scheme["headers"]))[1] scheme["headers"] = os.path.join( root, path_no_drive[1:], ) return scheme
def distutils_scheme(dist_name, user=False, home=None, root=None, isolated=False, prefix=None): """ Return a distutils install scheme """ from distutils.dist import Distribution scheme = {} if isolated: extra_dist_args = {"script_args": ["--no-user-cfg"]} else: extra_dist_args = {} dist_args = {'name': dist_name} dist_args.update(extra_dist_args) d = Distribution(dist_args) d.parse_config_files() i = d.get_command_obj('install', create=True) # NOTE: setting user or home has the side-effect of creating the home dir # or user base for installations during finalize_options() # ideally, we'd prefer a scheme class that has no side-effects. assert not (user and prefix), "user={0} prefix={1}".format(user, prefix) i.user = user or i.user if user: i.prefix = "" i.prefix = prefix or i.prefix i.home = home or i.home i.root = root or i.root i.finalize_options() for key in SCHEME_KEYS: scheme[key] = getattr(i, 'install_' + key) # install_lib specified in setup.cfg should install *everything* # into there (i.e. it takes precedence over both purelib and # platlib). Note, i.install_lib is *always* set after # finalize_options(); we only want to override here if the user # has explicitly requested it hence going back to the config if 'install_lib' in d.get_option_dict('install'): scheme.update(dict(purelib=i.install_lib, platlib=i.install_lib)) if running_under_virtualenv(): scheme['headers'] = os.path.join( sys.prefix, 'include', 'site', 'python' + sys.version[:3], dist_name, ) if root is not None: path_no_drive = os.path.splitdrive( os.path.abspath(scheme["headers"]))[1] scheme["headers"] = os.path.join( root, path_no_drive[1:], ) return scheme
def distutils_scheme( dist_name, user=False, home=None, root=None, isolated=False, prefix=None ): # type:(str, bool, str, str, bool, str) -> Dict[str, str] """ Return a distutils install scheme """ from distutils.dist import Distribution dist_args = {"name": dist_name} # type: Dict[str, Union[str, List[str]]] if isolated: dist_args["script_args"] = ["--no-user-cfg"] d = Distribution(dist_args) d.parse_config_files() obj = None # type: Optional[DistutilsCommand] obj = d.get_command_obj("install", create=True) assert obj is not None i = cast(distutils_install_command, obj) # NOTE: setting user or home has the side-effect of creating the home dir # or user base for installations during finalize_options() # ideally, we'd prefer a scheme class that has no side-effects. assert not (user and prefix), "user={} prefix={}".format(user, prefix) assert not (home and prefix), "home={} prefix={}".format(home, prefix) i.user = user or i.user if user or home: i.prefix = "" i.prefix = prefix or i.prefix i.home = home or i.home i.root = root or i.root i.finalize_options() scheme = {} for key in SCHEME_KEYS: scheme[key] = getattr(i, "install_" + key) # install_lib specified in setup.cfg should install *everything* # into there (i.e. it takes precedence over both purelib and # platlib). Note, i.install_lib is *always* set after # finalize_options(); we only want to override here if the user # has explicitly requested it hence going back to the config if "install_lib" in d.get_option_dict("install"): scheme.update(dict(purelib=i.install_lib, platlib=i.install_lib)) if running_under_virtualenv(): scheme["headers"] = os.path.join( sys.prefix, "include", "site", "python{}".format(get_major_minor_version()), dist_name, ) if root is not None: path_no_drive = os.path.splitdrive(os.path.abspath(scheme["headers"]))[1] scheme["headers"] = os.path.join(root, path_no_drive[1:],) return scheme
def _distutils_install(): # follow https://github.com/pypa/pip/blob/master/src/pip/_internal/locations.py#L95 d = Distribution({"script_args": "--no-user-cfg"}) d.parse_config_files() i = d.get_command_obj("install", create=True) i.prefix = "a" i.finalize_options() result = {key: (getattr(i, "install_{}".format(key))[1:]).lstrip(os.sep) for key in SCHEME_KEYS} return result
def _build_impl(self): dist = Distribution({ "script_name": None, "script_args": ["build_ext"] }) dist.ext_modules = cythonize([self.extension]) dist.include_dirs = [] dist.cmdclass = {'build_ext': custom_build_ext} build = dist.get_command_obj('build') # following the convention of cython's pyxbuild and naming # base directory "_pyxbld" build.build_base = join(self.CYMJ_DIR_PATH, 'generated', '_pyxbld_%s' % self.__class__.__name__) dist.parse_command_line() obj_build_ext = dist.get_command_obj("build_ext") dist.run_commands() built_so_file_path, = obj_build_ext.get_outputs() return built_so_file_path
def build(self): dist = Distribution({ "script_name": None, "script_args": ["build_ext"] }) dist.ext_modules = cythonize([self.extension]) dist.include_dirs = [] dist.cmdclass = {'build_ext': custom_build_ext} build = dist.get_command_obj('build') # following the convention of cython's pyxbuild and naming # base directory "_pyxbld" build.build_base = join(self.CYMJ_DIR_PATH, 'generated', '_pyxbld_%s' % self.__class__.__name__) dist.parse_command_line() obj_build_ext = dist.get_command_obj("build_ext") dist.run_commands() so_file_path, = obj_build_ext.get_outputs() return so_file_path
def distutils_scheme(dist_name, user=False, home=None, root=None, isolated=False, prefix=None): # type:(str, bool, str, str, bool, str) -> dict """ Return a distutils install scheme """ from distutils.dist import Distribution scheme = {} if isolated: extra_dist_args = {"script_args": ["--no-user-cfg"]} else: extra_dist_args = {} dist_args = {"name": dist_name} # type: Dict[str, Union[str, List[str]]] dist_args.update(extra_dist_args) d = Distribution(dist_args) # Ignoring, typeshed issue reported python/typeshed/issues/2567 d.parse_config_files() # NOTE: Ignoring type since mypy can't find attributes on 'Command' i = d.get_command_obj("install", create=True) # type: Any assert i is not None # NOTE: setting user or home has the side-effect of creating the home dir # or user base for installations during finalize_options() # ideally, we'd prefer a scheme class that has no side-effects. assert not (user and prefix), "user={} prefix={}".format(user, prefix) i.user = user or i.user if user: i.prefix = "" i.prefix = prefix or i.prefix i.home = home or i.home i.root = root or i.root i.finalize_options() for key in SCHEME_KEYS: scheme[key] = getattr(i, "install_" + key) # install_lib specified in setup.cfg should install *everything* # into there (i.e. it takes precedence over both purelib and # platlib). Note, i.install_lib is *always* set after # finalize_options(); we only want to override here if the user # has explicitly requested it hence going back to the config # Ignoring, typeshed issue reported python/typeshed/issues/2567 if "install_lib" in d.get_option_dict("install"): # type: ignore scheme.update(dict(purelib=i.install_lib, platlib=i.install_lib)) if running_under_virtualenv(): scheme["headers"] = os.path.join( sys.prefix, "include", "site", "python" + sys.version[:3], dist_name ) if root is not None: path_no_drive = os.path.splitdrive(os.path.abspath(scheme["headers"]))[1] scheme["headers"] = os.path.join(root, path_no_drive[1:]) return scheme
def _distutils_install(): # follow https://github.com/pypa/pip/blob/master/src/pip/_internal/locations.py#L95 d = Distribution({"script_args": "--no-user-cfg"}) # configuration files not parsed so they do not hijack paths if hasattr(sys, "_framework"): sys._framework = None # disable macOS static paths for framework i = d.get_command_obj("install", create=True) i.prefix = os.sep # paths generated are relative to prefix that contains the path sep, this makes it relative i.finalize_options() result = {key: (getattr(i, "install_{}".format(key))[1:]).lstrip(os.sep) for key in SCHEME_KEYS} return result
def compiler_type(): """ Gets the compiler type from distutils. On Windows with MSVC it will be "msvc". On OS X and linux it is "unix". """ dist = Distribution() dist.parse_config_files() cmd = dist.get_command_obj('build') cmd.ensure_finalized() compiler = new_compiler(compiler=cmd.compiler) return compiler.compiler_type
def test_initialize(self): # we don't use the dist from setUp because # we want to test before finalize is called dist = Distribution(attrs=self.attrs) bdist = dist.get_command_obj('bdist_prestoadmin') self.assertEquals(bdist.bdist_dir, None) self.assertEquals(bdist.dist_dir, None) self.assertEquals(bdist.virtualenv_version, None) self.assertEquals(bdist.keep_temp, False) self.assertEquals(bdist.online_install, False)
def compiler_type(): """ Gets the compiler type from distutils. On Windows with MSVC it will be "msvc". On macOS and linux it is "unix". """ dist = Distribution() dist.parse_config_files() cmd = dist.get_command_obj("build") cmd.ensure_finalized() compiler = new_compiler(compiler=cmd.compiler) return compiler.compiler_type
def test_initialize(self): # we don't use the dist from setUp because # we want to test before finalize is called dist = Distribution(attrs=self.attrs) bdist = dist.get_command_obj('bdist_prestoadmin') self.assertEquals(bdist.bdist_dir, None) self.assertEquals(bdist.dist_dir, None) self.assertEquals(bdist.virtualenv_version, None) self.assertEquals(bdist.keep_temp, False) self.assertEquals(bdist.online_install, False)
def distutils_scheme(dist_name, user=False, home=None, root=None, isolated=False): """ Return a distutils install scheme """ from distutils.dist import Distribution scheme = {} if isolated: extra_dist_args = {"script_args": ["--no-user-cfg"]} else: extra_dist_args = {} dist_args = {'name': dist_name} dist_args.update(extra_dist_args) d = Distribution(dist_args) d.parse_config_files() i = d.get_command_obj('install', create=True) # NOTE: setting user or home has the side-effect of creating the home dir # or user base for installations during finalize_options() # ideally, we'd prefer a scheme class that has no side-effects. i.user = user or i.user i.home = home or i.home i.root = root or i.root i.finalize_options() for key in SCHEME_KEYS: scheme[key] = getattr(i, 'install_' + key) if i.install_lib is not None: # install_lib takes precedence over purelib and platlib scheme.update(dict(purelib=i.install_lib, platlib=i.install_lib)) if running_under_virtualenv(): scheme['headers'] = os.path.join( sys.prefix, 'include', 'site', 'python' + sys.version[:3], dist_name, ) if root is not None: scheme["headers"] = os.path.join( root, os.path.abspath(scheme["headers"])[1:], ) return scheme
def can_compile_extensions(): from distutils.dist import Distribution from distutils.errors import DistutilsError sargs = {'script_name': None, 'script_args': ["--build-ext"]} d = Distribution(sargs) cfg = d.get_command_obj('config') cfg.dump_source = 0 cfg.noisy = 0 cfg.finalize_options() build_ext = d.get_command_obj('build_ext') build_ext.finalize_options() try: result = cfg.try_compile( 'int main(void) {return 0;}', headers=['Python.h'], include_dirs=build_ext.include_dirs, lang='c' ) except DistutilsError: return False else: return result
def distutils_scheme(dist_name, user=False, home=None, root=None, isolated=False): """ Return a distutils install scheme """ from distutils.dist import Distribution scheme = {} if isolated: extra_dist_args = {"script_args": ["--no-user-cfg"]} else: extra_dist_args = {} dist_args = {'name': dist_name} dist_args.update(extra_dist_args) d = Distribution(dist_args) d.parse_config_files() i = d.get_command_obj('install', create=True) # NOTE: setting user or home has the side-effect of creating the home dir # or user base for installations during finalize_options() # ideally, we'd prefer a scheme class that has no side-effects. i.user = user or i.user if user: i.prefix = "" i.home = home or i.home i.root = root or i.root i.finalize_options() for key in SCHEME_KEYS: scheme[key] = getattr(i, 'install_' + key) if i.install_lib is not None: # install_lib takes precedence over purelib and platlib scheme.update(dict(purelib=i.install_lib, platlib=i.install_lib)) if running_under_virtualenv(): scheme['headers'] = os.path.join( sys.prefix, 'include', 'site', 'python' + sys.version[:3], dist_name, ) if root is not None: scheme["headers"] = os.path.join( root, os.path.abspath(scheme["headers"])[1:], ) return scheme
def setUp(self): super(TestBDistPrestoAdmin, self).setUp() self.attrs = { "name": "prestoadmin", "cmdclass": {"bdist_prestoadmin": bdist_prestoadmin}, "version": "1.1", "packages": ["prestoadmin"], "package_dir": {"prestoadmin": "prestoadmin"}, "install_requires": ["fabric"], } # instantiation of the object calls # initialize_options which is what we are testing dist = Distribution(attrs=self.attrs) self.bdist = dist.get_command_obj("bdist_prestoadmin") self.bdist.finalize_options()
def setUp(self): super(TestBDistPrestoAdmin, self).setUp() self.attrs = { 'name': 'prestoadmin', 'cmdclass': {'bdist_prestoadmin': bdist_prestoadmin}, 'version': '1.2', 'packages': ['prestoadmin'], 'package_dir': {'prestoadmin': 'prestoadmin'}, 'install_requires': ['fabric'] } # instantiation of the object calls # initialize_options which is what we are testing dist = Distribution(attrs=self.attrs) self.bdist = dist.get_command_obj('bdist_prestoadmin') self.bdist.finalize_options()
def test_finalize_argvs(self): self.attrs['script_args'] = [ 'bdist_prestoadmin', '--bdist-dir=junk', '--dist-dir=tmp', '--virtualenv-version=12.0.1', '-k' ] # we don't use the dist from setUp because # we want to test with additional arguments dist = Distribution(attrs=self.attrs) dist.parse_command_line() bdist = dist.get_command_obj('bdist_prestoadmin') bdist.finalize_options() self.assertEquals(bdist.bdist_dir, 'junk') self.assertEquals(bdist.dist_dir, 'tmp') self.assertEquals(bdist.virtualenv_version, '12.0.1') self.assertEquals(bdist.keep_temp, True)
def configure_distutils_command(cmdline): """ Helper to configure a command class, but not run it just yet. This will have strange side effects if you pass in things `distutils` deals with internally. :param cmdline: The command line (sans the executable name) :return: Command instance """ d = Distribution(attrs={ "cmdclass": vars(frontend), "script_args": shlex.split(cmdline), }) d.parse_command_line() assert len(d.commands) == 1 cmdinst = d.get_command_obj(d.commands[0]) cmdinst.ensure_finalized() return cmdinst
def configure_distutils_command(cmdline): """ Helper to configure a command class, but not run it just yet. This will have strange side effects if you pass in things `distutils` deals with internally. :param cmdline: The command line (sans the executable name) :return: Command instance """ d = Distribution(attrs={ "cmdclass": vars(frontend), "script_args": shlex.split(cmdline), }) d.parse_command_line() assert len(d.commands) == 1 cmdinst = d.get_command_obj(d.commands[0]) cmdinst.ensure_finalized() return cmdinst
def test_finalize_argvs(self): self.attrs['script_args'] = ['bdist_prestoadmin', '--bdist-dir=junk', '--dist-dir=tmp', '--virtualenv-version=12.0.1', '-k' ] # we don't use the dist from setUp because # we want to test with additional arguments dist = Distribution(attrs=self.attrs) dist.parse_command_line() bdist = dist.get_command_obj('bdist_prestoadmin') bdist.finalize_options() self.assertEquals(bdist.bdist_dir, 'junk') self.assertEquals(bdist.dist_dir, 'tmp') self.assertEquals(bdist.virtualenv_version, '12.0.1') self.assertEquals(bdist.keep_temp, True)
def setUp(self): super(TestBDistPrestoAdmin, self).setUp() self.attrs = { 'name': 'prestoadmin', 'cmdclass': { 'bdist_prestoadmin': bdist_prestoadmin }, 'version': '1.2', 'packages': ['prestoadmin'], 'package_dir': { 'prestoadmin': 'prestoadmin' }, 'install_requires': ['fabric'] } # instantiation of the object calls # initialize_options which is what we are testing dist = Distribution(attrs=self.attrs) self.bdist = dist.get_command_obj('bdist_prestoadmin') self.bdist.finalize_options()
def test_finalize_argvs(self): self.attrs["script_args"] = [ "bdist_prestoadmin", "--bdist-dir=junk", "--dist-dir=tmp", "--virtualenv-version=12.0.1", "-k", ] # we don't use the dist from setUp because # we want to test with additional arguments dist = Distribution(attrs=self.attrs) dist.parse_command_line() bdist = dist.get_command_obj("bdist_prestoadmin") bdist.finalize_options() self.assertEquals(bdist.bdist_dir, "junk") self.assertEquals(bdist.dist_dir, "tmp") self.assertEquals(bdist.virtualenv_version, "12.0.1") self.assertEquals(bdist.keep_temp, True)
def compile_with_distutils(extension_name, src_filename, extra_objects = [], extra_compiler_flags = [], extra_link_flags = [], print_commands = False): # copied largely from pyxbuild from distutils.dist import Distribution from distutils.extension import Extension compiler_flags = get_compiler_flags(extra_compiler_flags) # don't need -shared in the flags since the default CC on Mac OS # might specify -bundle instead and the two are mutually exclusive linker_flags = get_linker_flags(extra_link_flags, shared=False) ext = Extension(name=extension_name, sources=[src_filename], include_dirs = include_dirs, extra_objects=extra_objects, extra_compile_args=compiler_flags, extra_link_args=linker_flags) script_args = ['build_ext', '--quiet'] setup_args = {"script_name": None, "script_args": script_args, } dist = Distribution(setup_args) if not dist.ext_modules: dist.ext_modules = [] dist.ext_modules.append(ext) # I have no idea how distutils works or why I have to do any of this config_files = dist.find_config_files() try: config_files.remove('setup.cfg') except ValueError: pass dist.parse_config_files(config_files) dist.parse_command_line() obj_build_ext = dist.get_command_obj("build_ext") dist.run_commands() shared_name = obj_build_ext.get_outputs()[0] return shared_name
def cython(filename, verbose=0, compile_message=False, use_cache=False, create_local_c_file=False, annotate=True, sage_namespace=True, create_local_so_file=False): r""" Compile a Cython file. This converts a Cython file to a C (or C++ file), and then compiles that. The .c file and the .so file are created in a temporary directory. INPUT: - ``filename`` -- the name of the file to be compiled. Should end with 'pyx'. - ``verbose`` (integer, default 0) -- level of verbosity. A negative value ensures complete silence. - ``compile_message`` (bool, default False) -- if True, print ``'Compiling <filename>...'`` to the standard error. - ``use_cache`` (bool, default False) -- if True, check the temporary build directory to see if there is already a corresponding .so file. If so, and if the .so file is newer than the Cython file, don't recompile, just reuse the .so file. - ``create_local_c_file`` (bool, default False) -- if True, save a copy of the ``.c`` or ``.cpp`` file in the current directory. - ``annotate`` (bool, default True) -- if True, create an html file which annotates the conversion from .pyx to .c. By default this is only created in the temporary directory, but if ``create_local_c_file`` is also True, then save a copy of the .html file in the current directory. - ``sage_namespace`` (bool, default True) -- if True, import ``sage.all``. - ``create_local_so_file`` (bool, default False) -- if True, save a copy of the compiled .so file in the current directory. OUTPUT: a tuple ``(name, dir)`` where ``name`` is the name of the compiled module and ``dir`` is the directory containing the generated files. TESTS: Before :trac:`12975`, it would have been needed to write ``#clang c++``, but upper case ``C++`` has resulted in an error. Using pkgconfig to find the libraries, headers and macros. This is a work around while waiting for :trac:`22461` which will offer a better solution:: sage: code = [ ....: "#clang C++", ....: "from sage.rings.polynomial.multi_polynomial_libsingular cimport MPolynomial_libsingular", ....: "from sage.libs.singular.polynomial cimport singular_polynomial_pow", ....: "def test(MPolynomial_libsingular p):", ....: " singular_polynomial_pow(&p._poly, p._poly, 2, p._parent_ring)"] sage: cython(os.linesep.join(code)) The function ``test`` now manipulates internal C data of polynomials, squaring them:: sage: P.<x,y>=QQ[] sage: test(x) sage: x x^2 Check that compiling C++ code works:: sage: cython("# distutils: language = c++\n"+ ....: "from libcpp.vector cimport vector\n" ....: "cdef vector[int] * v = new vector[int](4)\n") Check that compiling C++ code works when creating a local C file, first moving to a tempdir to avoid clutter. Before :trac:`22113`, the create_local_c_file argument was not tested for C++ code:: sage: d = sage.misc.temporary_file.tmp_dir() sage: os.chdir(d) sage: with open("test.pyx", 'w') as f: ....: _ = f.write("# distutils: language = c++\n" ....: "from libcpp.vector cimport vector\n" ....: "cdef vector[int] * v = new vector[int](4)\n") sage: output = sage.misc.cython.cython("test.pyx", create_local_c_file=True) Accessing a ``.pxd`` file from the current directory works:: sage: d = sage.misc.temporary_file.tmp_dir() sage: os.chdir(d) sage: with open("helper.pxd", 'w') as f: ....: _ = f.write("cdef inline int the_answer(): return 42") sage: cython(''' ....: from helper cimport the_answer ....: print(the_answer()) ....: ''') 42 Warning and error messages generated by Cython are properly handled. Warnings are only shown if verbose >= 0:: sage: code = ''' ....: def test_unreachable(): ....: raise Exception ....: return 42 ....: ''' sage: cython(code, verbose=-1) sage: cython(code, verbose=0) warning: ...:4:4: Unreachable code sage: cython("foo = bar\n") Traceback (most recent call last): ... RuntimeError: Error compiling Cython file: ------------------------------------------------------------ ... foo = bar ^ ------------------------------------------------------------ <BLANKLINE> ...:1:6: undeclared name not builtin: bar sage: cython("cdef extern from 'no_such_header_file': pass") Traceback (most recent call last): ... RuntimeError: ... """ if not filename.endswith('pyx'): print( "Warning: file (={}) should have extension .pyx".format(filename), file=sys.stderr) # base is the name of the .so module that we create. If we are # creating a local shared object file, we use a more natural # naming convention. If we are not creating a local shared object # file, the main constraint is that it is unique and determined by # the file that we're running Cython on, so that in some cases we # can cache the result (e.g., recompiling the same pyx file during # the same session). if create_local_so_file: base, ext = os.path.splitext(os.path.basename(filename)) else: base = os.path.abspath(filename) base = sanitize(base) # This is the *temporary* directory where we store the pyx file. # This is deleted when Sage exits, which means pyx files must be # rebuilt every time Sage is restarted at present. target_dir = os.path.join(SPYX_TMP, base) # Build directory for Cython/distutils build_dir = os.path.join(target_dir, "build") if os.path.exists(target_dir): # There is already a module here. Maybe we do not have to rebuild? # Find the name. if use_cache: from sage.misc.sageinspect import loadable_module_extension prev_so = [ F for F in os.listdir(target_dir) if F.endswith(loadable_module_extension()) ] if len(prev_so) > 0: prev_so = prev_so[ 0] # should have length 1 because of deletes below if os.path.getmtime(filename) <= os.path.getmtime( '%s/%s' % (target_dir, prev_so)): # We do not have to rebuild. return prev_so[:-len(loadable_module_extension() )], target_dir # Delete all ordinary files in target_dir for F in os.listdir(target_dir): G = os.path.join(target_dir, F) if os.path.isdir(G): continue try: os.unlink(G) except OSError: pass else: sage_makedirs(target_dir) if create_local_so_file: name = base else: global sequence_number if base not in sequence_number: sequence_number[base] = 0 name = '%s_%s' % (base, sequence_number[base]) # increment the sequence number so will use a different one next time. sequence_number[base] += 1 if compile_message: sys.stderr.write("Compiling {}...\n".format(filename)) sys.stderr.flush() # Copy original file to the target directory. pyxfile = os.path.join(target_dir, name + ".pyx") shutil.copy(filename, pyxfile) # Add current working directory to includes. This is needed because # we cythonize from a different directory. See Trac #24764. includes = [os.getcwd()] + sage_include_directories() # Now do the actual build, directly calling Cython and distutils from Cython.Build import cythonize from Cython.Compiler.Errors import CompileError import Cython.Compiler.Options from distutils.dist import Distribution from distutils.core import Extension from distutils.log import set_verbosity set_verbosity(verbose) Cython.Compiler.Options.annotate = annotate Cython.Compiler.Options.embed_pos_in_docstring = True Cython.Compiler.Options.pre_import = "sage.all" if sage_namespace else None ext = Extension( name, sources=[pyxfile], extra_compile_args=["-w"], # no warnings libraries=standard_libs, library_dirs=standard_libdirs) directives = dict(language_level=sys.version_info[0]) try: # Change directories to target_dir so that Cython produces the correct # relative path; https://trac.sagemath.org/ticket/24097 with restore_cwd(target_dir): try: ext, = cythonize([ext], aliases=cython_aliases(), include_path=includes, compiler_directives=directives, quiet=(verbose <= 0), errors_to_stderr=False, use_listing_file=True) finally: # Read the "listing file" which is the file containing # warning and error messages generated by Cython. try: with open(name + ".lis") as f: cython_messages = f.read() except IOError: cython_messages = "Error compiling Cython file" except CompileError: raise RuntimeError(cython_messages.strip()) if verbose >= 0: sys.stderr.write(cython_messages) sys.stderr.flush() if create_local_c_file: shutil.copy(os.path.join(target_dir, ext.sources[0]), os.curdir) if annotate: shutil.copy(os.path.join(target_dir, name + ".html"), os.curdir) # This emulates running "setup.py build" with the correct options dist = Distribution() dist.ext_modules = [ext] dist.include_dirs = includes buildcmd = dist.get_command_obj("build") buildcmd.build_base = build_dir buildcmd.build_lib = target_dir try: # Capture errors from distutils and its child processes with open(os.path.join(target_dir, name + ".err"), 'w+') as errfile: try: # Redirect stderr to errfile. We use the file descriptor # number "2" instead of "sys.stderr" because we really # want to redirect the messages from GCC. These are sent # to the actual stderr, regardless of what sys.stderr is. sys.stderr.flush() with redirection(2, errfile, close=False): dist.run_command("build") finally: errfile.seek(0) distutils_messages = errfile.read() except Exception as msg: msg = str(msg) + "\n" + distutils_messages raise RuntimeError(msg.strip()) if verbose >= 0: sys.stderr.write(distutils_messages) sys.stderr.flush() if create_local_so_file: # Copy module to current directory from sage.misc.sageinspect import loadable_module_extension shutil.copy( os.path.join(target_dir, name + loadable_module_extension()), os.curdir) return name, target_dir
# --- distutils options remaining in args. # --- This needs to be done for generating builddir since the distutils # --- options may affect its value. sys.argv = ['Forthon','build'] if build_base: sys.argv += ['--build-base',build_base] if not dobuild: sys.argv += ['install'] sys.argv += args # --- Find the location of the build directory. There must be a better way # --- of doing this. if builddir is None: dummydist = Distribution() dummydist.parse_command_line() dummybuild = dummydist.get_command_obj('build') dummybuild.finalize_options() builddir = dummybuild.build_temp bb = builddir.split(os.sep) upbuilddir = len(bb)*(os.pardir + os.sep) del dummydist,dummybuild,bb else: upbuilddir = os.getcwd() if dobuild: # --- Add the build-temp option. This is needed since distutils would otherwise # --- put the object files from compiling the pkgnamepy.c file in a temp # --- directory relative to the file. build_temp defaults to an empty string, # --- so the .o files are put in the same place as the .c files. sys.argv += ['--build-temp',build_temp]
def install_connector(root_dir, install_dir, protobuf_include_dir, protobuf_lib_dir, protoc, connc_location=None, extra_compile_args=None, extra_link_args=None, debug=False): """Install Connector/Python in working directory """ logfile = 'myconnpy_install.log' LOGGER.info("Installing Connector/Python in {0}".format(install_dir)) try: # clean up previous run if os.path.exists(logfile): os.unlink(logfile) shutil.rmtree(install_dir) except OSError: pass cmd = [ sys.executable, 'setup.py', 'clean', '--all', # necessary for removing the build/ ] dist = Distribution() cmd_build = dist.get_command_obj('build') cmd_build.ensure_finalized() cmd.extend([ 'install', '--root', install_dir, '--install-lib', '.', '--static', '--is-wheel' ]) if os.name == 'nt': cmd.extend([ '--install-data', cmd_build.build_platlib ]) if any((protobuf_include_dir, protobuf_lib_dir, protoc)): cmd.extend([ '--with-protobuf-include-dir', protobuf_include_dir, '--with-protobuf-lib-dir', protobuf_lib_dir, '--with-protoc', protoc, ]) if connc_location: cmd.extend(['--with-mysql-capi', connc_location]) if extra_compile_args: cmd.extend(['--extra-compile-args', extra_compile_args]) if extra_link_args: cmd.extend(['--extra-link-args', extra_link_args]) LOGGER.debug("Installing command: {0}".format(cmd)) prc = subprocess.Popen(cmd, stdin=subprocess.PIPE, stderr=subprocess.STDOUT, stdout=subprocess.PIPE, cwd=root_dir) stdout = prc.communicate()[0] if prc.returncode is not 0: with open(logfile, 'wb') as logfp: logfp.write(stdout) LOGGER.error("Failed installing Connector/Python, see {log}".format( log=logfile)) if debug: with open(logfile) as logfr: print(logfr.read()) sys.exit(1)
] def distutils_scheme(dist_name, user=False, home=None, root=None, isolated=False, prefix=None): >>>>>>> 54eef0be98b1b67c8507db91f4cfa90b64991027 """ Return a distutils install scheme """ from distutils.dist import Distribution scheme = {} <<<<<<< HEAD d = Distribution({'name': dist_name}) d.parse_config_files() i = d.get_command_obj('install', create=True) # NOTE: setting user or home has the side-effect of creating the home dir or # user base for installations during finalize_options() # ideally, we'd prefer a scheme class that has no side-effects. i.user = user or i.user ======= if isolated: extra_dist_args = {"script_args": ["--no-user-cfg"]} else: extra_dist_args = {} dist_args = {'name': dist_name} dist_args.update(extra_dist_args) d = Distribution(dist_args) d.parse_config_files()
def pyx_to_dll(filename, ext=None, force_rebuild=0, build_in_temp=False, pyxbuild_dir=None, setup_args={}, reload_support=False): """Compile a PYX file to a DLL and return the name of the generated .so or .dll .""" assert os.path.exists( filename), "Could not find %s" % os.path.abspath(filename) path, name = os.path.split(filename) if not ext: modname, extension = os.path.splitext(name) assert extension in (".pyx", ".py"), extension if not HAS_CYTHON: filename = filename[:-len(extension)] + '.c' ext = Extension(name=modname, sources=[filename]) if not pyxbuild_dir: pyxbuild_dir = os.path.join(path, "_pyxbld") script_args = setup_args.get("script_args", []) if DEBUG or "--verbose" in script_args: quiet = "--verbose" else: quiet = "--quiet" args = [quiet, "build_ext"] if force_rebuild: args.append("--force") if HAS_CYTHON and build_in_temp: args.append("--pyrex-c-in-temp") sargs = setup_args.copy() sargs.update({"script_name": None, "script_args": args + script_args}) dist = Distribution(sargs) if not dist.ext_modules: dist.ext_modules = [] dist.ext_modules.append(ext) if HAS_CYTHON: dist.cmdclass = {'build_ext': build_ext} build = dist.get_command_obj('build') build.build_base = pyxbuild_dir config_files = dist.find_config_files() try: config_files.remove('setup.cfg') except ValueError: pass dist.parse_config_files(config_files) cfgfiles = dist.find_config_files() try: cfgfiles.remove('setup.cfg') except ValueError: pass dist.parse_config_files(cfgfiles) try: ok = dist.parse_command_line() except DistutilsArgError: raise if DEBUG: print("options (after parsing command line):") dist.dump_option_dicts() assert ok try: dist.run_commands() obj_build_ext = dist.get_command_obj("build_ext") so_path = obj_build_ext.get_outputs()[0] if obj_build_ext.inplace: # Python distutils get_outputs()[ returns a wrong so_path # when --inplace ; see http://bugs.python.org/issue5977 # workaround: so_path = os.path.join(os.path.dirname(filename), os.path.basename(so_path)) if reload_support: org_path = so_path timestamp = os.path.getmtime(org_path) global _reloads last_timestamp, last_path, count = _reloads.get( org_path, (None, None, 0)) if last_timestamp == timestamp: so_path = last_path else: basename = os.path.basename(org_path) while count < 100: count += 1 r_path = os.path.join(obj_build_ext.build_lib, basename + '.reload%s' % count) try: import shutil # late import / reload_support is: debugging shutil.copy2(org_path, r_path) so_path = r_path except IOError: continue break else: # used up all 100 slots raise ImportError("reload count for %s reached maximum" % org_path) _reloads[org_path] = (timestamp, so_path, count) return so_path except KeyboardInterrupt: sys.exit(1) except (IOError, os.error): exc = sys.exc_info()[1] error = grok_environment_error(exc) if DEBUG: sys.stderr.write(error + "\n") raise
def cython(filename, verbose=0, compile_message=False, use_cache=False, create_local_c_file=False, annotate=True, sage_namespace=True, create_local_so_file=False): r""" Compile a Cython file. This converts a Cython file to a C (or C++ file), and then compiles that. The .c file and the .so file are created in a temporary directory. INPUT: - ``filename`` -- the name of the file to be compiled. Should end with 'pyx'. - ``verbose`` (integer, default 0) -- level of verbosity. - ``compile_message`` (bool, default False) -- if True, print ``'Compiling <filename>...'`` to the standard error. - ``use_cache`` (bool, default False) -- if True, check the temporary build directory to see if there is already a corresponding .so file. If so, and if the .so file is newer than the Cython file, don't recompile, just reuse the .so file. - ``create_local_c_file`` (bool, default False) -- if True, save a copy of the ``.c`` or ``.cpp`` file in the current directory. - ``annotate`` (bool, default True) -- if True, create an html file which annotates the conversion from .pyx to .c. By default this is only created in the temporary directory, but if ``create_local_c_file`` is also True, then save a copy of the .html file in the current directory. - ``sage_namespace`` (bool, default True) -- if True, import ``sage.all``. - ``create_local_so_file`` (bool, default False) -- if True, save a copy of the compiled .so file in the current directory. TESTS: Before :trac:`12975`, it would have been needed to write ``#clang c++``, but upper case ``C++`` has resulted in an error. Using pkgconfig to find the libraries, headers and macros. This is a work around while waiting for :trac:`22461` which will offer a better solution:: sage: code = [ ....: "#clang C++", ....: "from sage.rings.polynomial.multi_polynomial_libsingular cimport MPolynomial_libsingular", ....: "from sage.libs.singular.polynomial cimport singular_polynomial_pow", ....: "def test(MPolynomial_libsingular p):", ....: " singular_polynomial_pow(&p._poly, p._poly, 2, p._parent_ring)"] sage: cython(os.linesep.join(code)) The function ``test`` now manipulates internal C data of polynomials, squaring them:: sage: P.<x,y>=QQ[] sage: test(x) sage: x x^2 Check that compiling C++ code works:: sage: cython("# distutils: language = c++\n"+ ....: "from libcpp.vector cimport vector\n" ....: "cdef vector[int] * v = new vector[int](4)\n") Check that compiling C++ code works when creating a local C file, first moving to a tempdir to avoid clutter. Before :trac:`22113`, the create_local_c_file argument was not tested for C++ code:: sage: import sage.misc.cython sage: d = sage.misc.temporary_file.tmp_dir() sage: os.chdir(d) sage: with open("test.pyx", 'w') as f: ....: _ = f.write("# distutils: language = c++\n" ....: "from libcpp.vector cimport vector\n" ....: "cdef vector[int] * v = new vector[int](4)\n") sage: output = sage.misc.cython.cython("test.pyx", create_local_c_file=True) Sage used to automatically include various ``.pxi`` files. Since :trac:`22805`, we no longer do this. But we make sure to give a useful message in case the ``.pxi`` files were needed:: sage: cython("sig_malloc(0)") Traceback (most recent call last): ... RuntimeError: Error converting ... to C NOTE: Sage no longer automatically includes the deprecated files "cdefs.pxi", "signals.pxi" and "stdsage.pxi" in Cython files. You can fix your code by adding "from cysignals.memory cimport sig_malloc". """ if not filename.endswith('pyx'): print( "Warning: file (={}) should have extension .pyx".format(filename), file=sys.stderr) # base is the name of the .so module that we create. If we are # creating a local shared object file, we use a more natural # naming convention. If we are not creating a local shared object # file, the main constraint is that it is unique and determined by # the file that we're running Cython on, so that in some cases we # can cache the result (e.g., recompiling the same pyx file during # the same session). if create_local_so_file: base, ext = os.path.splitext(os.path.basename(filename)) base = sanitize(base) else: base = sanitize(os.path.abspath(filename)) # This is the *temporary* directory where we store the pyx file. # This is deleted when Sage exits, which means pyx files must be # rebuilt every time Sage is restarted at present. target_dir = os.path.join(SPYX_TMP, base) # Build directory for Cython/distutils build_dir = os.path.join(target_dir, "build") if os.path.exists(target_dir): # There is already a module here. Maybe we do not have to rebuild? # Find the name. if use_cache: from sage.misc.sageinspect import loadable_module_extension prev_so = [ F for F in os.listdir(target_dir) if F.endswith(loadable_module_extension()) ] if len(prev_so) > 0: prev_so = prev_so[ 0] # should have length 1 because of deletes below if os.path.getmtime(filename) <= os.path.getmtime( '%s/%s' % (target_dir, prev_so)): # We do not have to rebuild. return prev_so[:-len(loadable_module_extension() )], target_dir # Delete all ordinary files in target_dir for F in os.listdir(target_dir): G = os.path.join(target_dir, F) if os.path.isdir(G): continue try: os.unlink(G) except OSError: pass else: sage_makedirs(target_dir) if create_local_so_file: name = base else: global sequence_number if base not in sequence_number: sequence_number[base] = 0 name = '%s_%s' % (base, sequence_number[base]) # increment the sequence number so will use a different one next time. sequence_number[base] += 1 if compile_message: print("Compiling {}...".format(filename), file=sys.stderr) with open(filename) as f: (preparsed, libs, includes, language, additional_source_files, extra_args, libdirs) = _pyx_preparse(f.read()) # New filename with preparsed code. # NOTE: if we ever stop preparsing, we should still copy the # original file to the target directory. pyxfile = os.path.join(target_dir, name + ".pyx") with open(pyxfile, 'w') as f: f.write(preparsed) extra_sources = [] for fname in additional_source_files: fname = fname.replace("$SAGE_SRC", SAGE_SRC) fname = fname.replace("$SAGE_LOCAL", SAGE_LOCAL) extra_sources.append(fname) # Now do the actual build, directly calling Cython and distutils from Cython.Build import cythonize from Cython.Compiler.Errors import CompileError import Cython.Compiler.Options from distutils.dist import Distribution from distutils.core import Extension from distutils.log import set_verbosity set_verbosity(verbose) Cython.Compiler.Options.annotate = annotate Cython.Compiler.Options.embed_pos_in_docstring = True Cython.Compiler.Options.pre_import = "sage.all" if sage_namespace else None ext = Extension(name, sources=[pyxfile] + extra_sources, libraries=libs, library_dirs=[os.path.join(SAGE_LOCAL, "lib")] + libdirs, extra_compile_args=extra_args, language=language) orig_cwd = os.getcwd() try: # Change directories to target_dir so that Cython produces the correct # relative path; https://trac.sagemath.org/ticket/24097 os.chdir(target_dir) ext, = cythonize([ext], aliases=cython_aliases(), include_path=includes, quiet=not verbose) except CompileError: # Check for names in old_pxi_names note = '' for pxd, names in old_pxi_names.items(): for name in names: if re.search(r"\b{}\b".format(name), preparsed): note += dedent(""" NOTE: Sage no longer automatically includes the deprecated files "cdefs.pxi", "signals.pxi" and "stdsage.pxi" in Cython files. You can fix your code by adding "from {} cimport {}". """.format(pxd, name)) raise RuntimeError("Error converting {} to C".format(filename) + note) finally: os.chdir(orig_cwd) if create_local_c_file: shutil.copy(os.path.join(target_dir, ext.sources[0]), os.curdir) if annotate: shutil.copy(os.path.join(target_dir, name + ".html"), os.curdir) # This emulates running "setup.py build" with the correct options dist = Distribution() dist.ext_modules = [ext] dist.include_dirs = includes buildcmd = dist.get_command_obj("build") buildcmd.build_base = build_dir buildcmd.build_lib = target_dir dist.run_command("build") if create_local_so_file: # Copy module to current directory from sage.misc.sageinspect import loadable_module_extension shutil.copy( os.path.join(target_dir, name + loadable_module_extension()), os.curdir) return name, target_dir
'grdinit.o', 'grdread.o', 'grdwrit.o', 'daspk.o', 'nksol.o', 'svrut1.o', 'svrut2.o', 'svrut3.o', 'svrut4.o', 'vodpk.o', 'uoa.o', 'dsum.o', 'dummy_py.o', 'error.o', 'getmsg.o', 'ssum.o', 'daux1.o', 'wdf.o' ] if petsc: uedgeobjects = uedgeobjects + ['petsc-uedge.o', 'petscMod.o'] if parallel: # add extra dot o's needed if we're parallel uedgeobjects = uedgeobjects + [] else: dummydist = Distribution() dummydist.parse_command_line() dummybuild = dummydist.get_command_obj('build') dummybuild.finalize_options() builddir = dummybuild.build_temp uedgeobjects = map(lambda p: os.path.join(builddir, p), uedgeobjects) if os.getenv('PACT_DIR') != None: library_dirs = fcompiler.libdirs + [ os.path.join(os.getenv('PACT_DIR'), 'lib') ] libraries = ['pdb', 'pml', 'score', 'blas', 'm'] + fcompiler.libs else: library_dirs = fcompiler.libdirs libraries = fcompiler.libs if petsc:
def install_connector(root_dir, install_dir, protobuf_include_dir, protobuf_lib_dir, protoc, connc_location=None, extra_compile_args=None, extra_link_args=None, debug=False): """Install Connector/Python in working directory """ logfile = 'myconnpy_install.log' LOGGER.info("Installing Connector/Python in {0}".format(install_dir)) try: # clean up previous run if os.path.exists(logfile): os.unlink(logfile) shutil.rmtree(install_dir) except OSError: pass cmd = [ sys.executable, 'setup.py', 'clean', '--all', # necessary for removing the build/ ] dist = Distribution() cmd_build = dist.get_command_obj('build') cmd_build.ensure_finalized() cmd.extend([ 'install', '--root', install_dir, '--install-lib', '.', '--static', '--is-wheel' ]) if os.name == 'nt': cmd.extend([ '--install-data', cmd_build.build_platlib ]) if any((protobuf_include_dir, protobuf_lib_dir, protoc)): cmd.extend([ '--with-protobuf-include-dir', protobuf_include_dir, '--with-protobuf-lib-dir', protobuf_lib_dir, '--with-protoc', protoc, ]) if connc_location: cmd.extend(['--with-mysql-capi', connc_location]) if extra_compile_args: cmd.extend(['--extra-compile-args', extra_compile_args]) if extra_link_args: cmd.extend(['--extra-link-args', extra_link_args]) LOGGER.debug("Installing command: {0}".format(cmd)) prc = subprocess.Popen(cmd, stdin=subprocess.PIPE, stderr=subprocess.STDOUT, stdout=subprocess.PIPE, cwd=root_dir) stdout = prc.communicate()[0] if prc.returncode is not 0: with open(logfile, 'wb') as logfp: logfp.write(stdout) LOGGER.error("Failed installing Connector/Python, see {log}".format( log=logfile)) if debug: with open(logfile) as logfr: print(logfr.read()) sys.exit(1)
def c_to_dll(filename, ext = None, force_rebuild = 0, build_in_temp=False, cbuild_dir=None, setup_args={}, reload_support=False, inplace=False): """Compile a C file to a DLL and return the name of the generated .so or .dll .""" assert os.path.exists(filename), "Could not find %s" % os.path.abspath(filename) path, name = os.path.split(os.path.abspath(filename)) if not ext: modname, extension = os.path.splitext(name) assert extension in (".c", ".py"), extension if not HAS_CYTHON: filename = filename[:-len(extension)] + '.c' ext = Extension(name=modname, sources=[filename]) if not cbuild_dir: cbuild_dir = os.path.join(path, "_cbld") package_base_dir = path for package_name in ext.name.split('.')[-2::-1]: package_base_dir, pname = os.path.split(package_base_dir) if pname != package_name: # something is wrong - package path doesn't match file path package_base_dir = None break script_args=setup_args.get("script_args",[]) if DEBUG or "--verbose" in script_args: quiet = "--verbose" else: quiet = "--quiet" args = [quiet, "build_ext"] if force_rebuild: args.append("--force") if inplace and package_base_dir: args.extend(['--build-lib', package_base_dir]) if ext.name == '__init__' or ext.name.endswith('.__init__'): # package => provide __path__ early if not hasattr(ext, 'cython_directives'): ext.cython_directives = {'set_initial_path' : 'SOURCEFILE'} elif 'set_initial_path' not in ext.cython_directives: ext.cython_directives['set_initial_path'] = 'SOURCEFILE' if HAS_CYTHON and build_in_temp: args.append("--pyrex-c-in-temp") sargs = setup_args.copy() sargs.update({ "script_name": None, "script_args": args + script_args, }) # late import, in case setuptools replaced it from distutils.dist import Distribution dist = Distribution(sargs) if not dist.ext_modules: dist.ext_modules = [] dist.ext_modules.append(ext) if HAS_CYTHON: dist.cmdclass = {'build_ext': build_ext} build = dist.get_command_obj('build') build.build_base = cbuild_dir cfgfiles = dist.find_config_files() dist.parse_config_files(cfgfiles) try: ok = dist.parse_command_line() except DistutilsArgError: raise if DEBUG: print("options (after parsing command line):") dist.dump_option_dicts() assert ok try: obj_build_ext = dist.get_command_obj("build_ext") dist.run_commands() so_path = obj_build_ext.get_outputs()[0] if obj_build_ext.inplace: # Python distutils get_outputs()[ returns a wrong so_path # when --inplace ; see http://bugs.python.org/issue5977 # workaround: so_path = os.path.join(os.path.dirname(filename), os.path.basename(so_path)) if reload_support: org_path = so_path timestamp = os.path.getmtime(org_path) global _reloads last_timestamp, last_path, count = _reloads.get(org_path, (None,None,0) ) if last_timestamp == timestamp: so_path = last_path else: basename = os.path.basename(org_path) while count < 100: count += 1 r_path = os.path.join(obj_build_ext.build_lib, basename + '.reload%s'%count) try: import shutil # late import / reload_support is: debugging try: # Try to unlink first --- if the .so file # is mmapped by another process, # overwriting its contents corrupts the # loaded image (on Linux) and crashes the # other process. On Windows, unlinking an # open file just fails. if os.path.isfile(r_path): os.unlink(r_path) except OSError: continue shutil.copy2(org_path, r_path) so_path = r_path except IOError: continue break else: # used up all 100 slots raise ImportError("reload count for %s reached maximum"%org_path) _reloads[org_path]=(timestamp, so_path, count) return so_path except KeyboardInterrupt: sys.exit(1) except (IOError, os.error): exc = sys.exc_info()[1] error = grok_environment_error(exc) if DEBUG: sys.stderr.write(error + "\n") raise
def pyx_to_dll(filename, ext=None, force_rebuild=0, build_in_temp=False, pyxbuild_dir=None, setup_args={}, reload_support=False, inplace=False): """Compile a PYX file to a DLL and return the name of the generated .so or .dll .""" assert os.path.exists( filename), "Could not find %s" % os.path.abspath(filename) path, name = os.path.split(os.path.abspath(filename)) if not ext: modname, extension = os.path.splitext(name) assert extension in (".pyx", ".py"), extension if not HAS_CYTHON: filename = filename[:-len(extension)] + '.c' ext = Extension(name=modname, sources=[filename]) if not pyxbuild_dir: pyxbuild_dir = os.path.join(path, "_pyxbld") package_base_dir = path for package_name in ext.name.split('.')[-2::-1]: package_base_dir, pname = os.path.split(package_base_dir) if pname != package_name: # something is wrong - package path doesn't match file path package_base_dir = None break script_args = setup_args.get("script_args", []) if DEBUG or "--verbose" in script_args: quiet = "--verbose" else: quiet = "--quiet" args = [quiet, "build_ext"] if force_rebuild: args.append("--force") if inplace and package_base_dir: args.extend(['--build-lib', package_base_dir]) if ext.name == '__init__' or ext.name.endswith('.__init__'): # package => provide __path__ early if not hasattr(ext, 'cython_directives'): ext.cython_directives = {'set_initial_path': 'SOURCEFILE'} elif 'set_initial_path' not in ext.cython_directives: ext.cython_directives['set_initial_path'] = 'SOURCEFILE' if HAS_CYTHON and build_in_temp: args.append("--pyrex-c-in-temp") sargs = setup_args.copy() sargs.update({"script_name": None, "script_args": args + script_args}) dist = Distribution(sargs) if not dist.ext_modules: dist.ext_modules = [] dist.ext_modules.append(ext) if HAS_CYTHON: dist.cmdclass = {'build_ext': build_ext} build = dist.get_command_obj('build') build.build_base = pyxbuild_dir cfgfiles = dist.find_config_files() dist.parse_config_files(cfgfiles) try: ok = dist.parse_command_line() except DistutilsArgError: raise if DEBUG: print("options (after parsing command line):") dist.dump_option_dicts() assert ok try: obj_build_ext = dist.get_command_obj("build_ext") dist.run_commands() so_path = obj_build_ext.get_outputs()[0] if obj_build_ext.inplace: # Python distutils get_outputs()[ returns a wrong so_path # when --inplace ; see http://bugs.python.org/issue5977 # workaround: so_path = os.path.join(os.path.dirname(filename), os.path.basename(so_path)) if reload_support: org_path = so_path timestamp = os.path.getmtime(org_path) global _reloads last_timestamp, last_path, count = _reloads.get( org_path, (None, None, 0)) if last_timestamp == timestamp: so_path = last_path else: basename = os.path.basename(org_path) while count < 100: count += 1 r_path = os.path.join(obj_build_ext.build_lib, basename + '.reload%s' % count) try: import shutil # late import / reload_support is: debugging try: # Try to unlink first --- if the .so file # is mmapped by another process, # overwriting its contents corrupts the # loaded image (on Linux) and crashes the # other process. On Windows, unlinking an # open file just fails. if os.path.isfile(r_path): os.unlink(r_path) except OSError: continue shutil.copy2(org_path, r_path) so_path = r_path except IOError: continue break else: # used up all 100 slots raise ImportError("reload count for %s reached maximum" % org_path) _reloads[org_path] = (timestamp, so_path, count) return so_path except KeyboardInterrupt: sys.exit(1) except (IOError, os.error): exc = sys.exc_info()[1] error = grok_environment_error(exc) if DEBUG: sys.stderr.write(error + "\n") raise
def cython(filename, verbose=0, compile_message=False, use_cache=False, create_local_c_file=False, annotate=True, sage_namespace=True, create_local_so_file=False): r""" Compile a Cython file. This converts a Cython file to a C (or C++ file), and then compiles that. The .c file and the .so file are created in a temporary directory. INPUT: - ``filename`` -- the name of the file to be compiled. Should end with 'pyx'. - ``verbose`` (integer, default 0) -- level of verbosity. A negative value ensures complete silence. - ``compile_message`` (bool, default False) -- if True, print ``'Compiling <filename>...'`` to the standard error. - ``use_cache`` (bool, default False) -- if True, check the temporary build directory to see if there is already a corresponding .so file. If so, and if the .so file is newer than the Cython file, don't recompile, just reuse the .so file. - ``create_local_c_file`` (bool, default False) -- if True, save a copy of the ``.c`` or ``.cpp`` file in the current directory. - ``annotate`` (bool, default True) -- if True, create an html file which annotates the conversion from .pyx to .c. By default this is only created in the temporary directory, but if ``create_local_c_file`` is also True, then save a copy of the .html file in the current directory. - ``sage_namespace`` (bool, default True) -- if True, import ``sage.all``. - ``create_local_so_file`` (bool, default False) -- if True, save a copy of the compiled .so file in the current directory. OUTPUT: a tuple ``(name, dir)`` where ``name`` is the name of the compiled module and ``dir`` is the directory containing the generated files. TESTS: Before :trac:`12975`, it would have been needed to write ``#clang c++``, but upper case ``C++`` has resulted in an error. Using pkgconfig to find the libraries, headers and macros. This is a work around while waiting for :trac:`22461` which will offer a better solution:: sage: code = [ ....: "#clang C++", ....: "from sage.rings.polynomial.multi_polynomial_libsingular cimport MPolynomial_libsingular", ....: "from sage.libs.singular.polynomial cimport singular_polynomial_pow", ....: "def test(MPolynomial_libsingular p):", ....: " singular_polynomial_pow(&p._poly, p._poly, 2, p._parent_ring)"] sage: cython(os.linesep.join(code)) The function ``test`` now manipulates internal C data of polynomials, squaring them:: sage: P.<x,y>=QQ[] sage: test(x) sage: x x^2 Check that compiling C++ code works:: sage: cython("# distutils: language = c++\n"+ ....: "from libcpp.vector cimport vector\n" ....: "cdef vector[int] * v = new vector[int](4)\n") Check that compiling C++ code works when creating a local C file, first moving to a tempdir to avoid clutter. Before :trac:`22113`, the create_local_c_file argument was not tested for C++ code:: sage: import sage.misc.cython sage: d = sage.misc.temporary_file.tmp_dir() sage: os.chdir(d) sage: with open("test.pyx", 'w') as f: ....: _ = f.write("# distutils: language = c++\n" ....: "from libcpp.vector cimport vector\n" ....: "cdef vector[int] * v = new vector[int](4)\n") sage: output = sage.misc.cython.cython("test.pyx", create_local_c_file=True) Accessing a ``.pxd`` file from the current directory works:: sage: import sage.misc.cython sage: d = sage.misc.temporary_file.tmp_dir() sage: os.chdir(d) sage: with open("helper.pxd", 'w') as f: ....: f.write("cdef inline int the_answer(): return 42") sage: cython(''' ....: from helper cimport the_answer ....: print(the_answer()) ....: ''') 42 Warning and error messages generated by Cython are properly handled. Warnings are only shown if verbose >= 0:: sage: code = ''' ....: def test_unreachable(): ....: raise Exception ....: return 42 ....: ''' sage: cython(code, verbose=-1) sage: cython(code, verbose=0) warning: ...:4:4: Unreachable code sage: cython("foo = bar\n") Traceback (most recent call last): ... RuntimeError: Error compiling Cython file: ------------------------------------------------------------ ... foo = bar ^ ------------------------------------------------------------ <BLANKLINE> ...:1:6: undeclared name not builtin: bar sage: cython("cdef extern from 'no_such_header_file': pass") Traceback (most recent call last): ... RuntimeError: ... Sage used to automatically include various ``.pxi`` files. Since :trac:`22805`, we no longer do this. But we make sure to give a useful message in case the ``.pxi`` files were needed:: sage: cython("sig_malloc(0)\n") Traceback (most recent call last): ... RuntimeError: Error compiling Cython file: ------------------------------------------------------------ ... sig_malloc(0) ^ ------------------------------------------------------------ <BLANKLINE> ...:1:0: undeclared name not builtin: sig_malloc <BLANKLINE> NOTE: Sage no longer automatically includes the deprecated files "cdefs.pxi", "signals.pxi" and "stdsage.pxi" in Cython files. You can fix your code by adding "from cysignals.memory cimport sig_malloc". """ if not filename.endswith('pyx'): print("Warning: file (={}) should have extension .pyx".format(filename), file=sys.stderr) # base is the name of the .so module that we create. If we are # creating a local shared object file, we use a more natural # naming convention. If we are not creating a local shared object # file, the main constraint is that it is unique and determined by # the file that we're running Cython on, so that in some cases we # can cache the result (e.g., recompiling the same pyx file during # the same session). if create_local_so_file: base, ext = os.path.splitext(os.path.basename(filename)) else: base = os.path.abspath(filename) base = sanitize(base) # This is the *temporary* directory where we store the pyx file. # This is deleted when Sage exits, which means pyx files must be # rebuilt every time Sage is restarted at present. target_dir = os.path.join(SPYX_TMP, base) # Build directory for Cython/distutils build_dir = os.path.join(target_dir, "build") if os.path.exists(target_dir): # There is already a module here. Maybe we do not have to rebuild? # Find the name. if use_cache: from sage.misc.sageinspect import loadable_module_extension prev_so = [F for F in os.listdir(target_dir) if F.endswith(loadable_module_extension())] if len(prev_so) > 0: prev_so = prev_so[0] # should have length 1 because of deletes below if os.path.getmtime(filename) <= os.path.getmtime('%s/%s'%(target_dir, prev_so)): # We do not have to rebuild. return prev_so[:-len(loadable_module_extension())], target_dir # Delete all ordinary files in target_dir for F in os.listdir(target_dir): G = os.path.join(target_dir, F) if os.path.isdir(G): continue try: os.unlink(G) except OSError: pass else: sage_makedirs(target_dir) if create_local_so_file: name = base else: global sequence_number if base not in sequence_number: sequence_number[base] = 0 name = '%s_%s'%(base, sequence_number[base]) # increment the sequence number so will use a different one next time. sequence_number[base] += 1 if compile_message: print("Compiling {}...".format(filename), file=sys.stderr) sys.stderr.flush() with open(filename) as f: (preparsed, libs, includes, language, additional_source_files, extra_args, libdirs) = _pyx_preparse(f.read()) # New filename with preparsed code. # NOTE: if we ever stop preparsing, we should still copy the # original file to the target directory. pyxfile = os.path.join(target_dir, name + ".pyx") with open(pyxfile, 'w') as f: f.write(preparsed) extra_sources = [] for fname in additional_source_files: fname = fname.replace("$SAGE_SRC", SAGE_SRC) fname = fname.replace("$SAGE_LOCAL", SAGE_LOCAL) extra_sources.append(fname) # Add current working directory to includes. This is needed because # we cythonize from a different directory. See Trac #24764. includes.insert(0, os.getcwd()) # Now do the actual build, directly calling Cython and distutils from Cython.Build import cythonize from Cython.Compiler.Errors import CompileError import Cython.Compiler.Options from distutils.dist import Distribution from distutils.core import Extension from distutils.log import set_verbosity set_verbosity(verbose) Cython.Compiler.Options.annotate = annotate Cython.Compiler.Options.embed_pos_in_docstring = True Cython.Compiler.Options.pre_import = "sage.all" if sage_namespace else None ext = Extension(name, sources=[pyxfile] + extra_sources, libraries=libs, library_dirs=[os.path.join(SAGE_LOCAL, "lib")] + libdirs, extra_compile_args=extra_args, language=language) try: # Change directories to target_dir so that Cython produces the correct # relative path; https://trac.sagemath.org/ticket/24097 with restore_cwd(target_dir): try: ext, = cythonize([ext], aliases=cython_aliases(), include_path=includes, quiet=(verbose <= 0), errors_to_stderr=False, use_listing_file=True) finally: # Read the "listing file" which is the file containing # warning and error messages generated by Cython. try: with open(name + ".lis") as f: cython_messages = f.read() except IOError: cython_messages = "Error compiling Cython file" except CompileError: # Check for names in old_pxi_names for pxd, names in old_pxi_names.items(): for name in names: if re.search(r"\b{}\b".format(name), cython_messages): cython_messages += dedent( """ NOTE: Sage no longer automatically includes the deprecated files "cdefs.pxi", "signals.pxi" and "stdsage.pxi" in Cython files. You can fix your code by adding "from {} cimport {}". """.format(pxd, name)) raise RuntimeError(cython_messages.strip()) if verbose >= 0: sys.stderr.write(cython_messages) sys.stderr.flush() if create_local_c_file: shutil.copy(os.path.join(target_dir, ext.sources[0]), os.curdir) if annotate: shutil.copy(os.path.join(target_dir, name + ".html"), os.curdir) # This emulates running "setup.py build" with the correct options dist = Distribution() dist.ext_modules = [ext] dist.include_dirs = includes buildcmd = dist.get_command_obj("build") buildcmd.build_base = build_dir buildcmd.build_lib = target_dir try: # Capture errors from distutils and its child processes with open(os.path.join(target_dir, name + ".err"), 'w+') as errfile: try: # Redirect stderr to errfile. We use the file descriptor # number "2" instead of "sys.stderr" because we really # want to redirect the messages from GCC. These are sent # to the actual stderr, regardless of what sys.stderr is. sys.stderr.flush() with redirection(2, errfile, close=False): dist.run_command("build") finally: errfile.seek(0) distutils_messages = errfile.read() except Exception as msg: msg = str(msg) + "\n" + distutils_messages raise RuntimeError(msg.strip()) if verbose >= 0: sys.stderr.write(distutils_messages) sys.stderr.flush() if create_local_so_file: # Copy module to current directory from sage.misc.sageinspect import loadable_module_extension shutil.copy(os.path.join(target_dir, name + loadable_module_extension()), os.curdir) return name, target_dir
class DistutilsBuilder(object): def __init__(self, verbosity=1, build_base=None): from distutils.dist import Distribution from distutils import log log.set_verbosity(verbosity) self._dist = Distribution() self._compilers = {} self._cmds = {} if build_base: opt_dict = self._dist.get_option_dict("build") opt_dict["build_base"] = ("bento", build_base) build = self._dist.get_command_obj("build") self._build_base = build.build_base def _setup_cmd(self, cmd, t): from distutils.ccompiler import new_compiler from distutils.sysconfig import customize_compiler from distutils.command.build import build build = self._dist.get_command_obj("build") bld_cmd = build.get_finalized_command(cmd) compiler = new_compiler(compiler=bld_cmd.compiler, dry_run=bld_cmd.dry_run, force=bld_cmd.force) customize_compiler(compiler) return bld_cmd, compiler def _setup_clib(self): from distutils.command.build_clib import build_clib return self._setup_cmd("build_clib", "compiled_libraries") def _setup_ext(self): from distutils.command.build_ext import build_ext return self._setup_cmd("build_ext", "extensions") def _extension_filename(self, name, cmd): m = module_to_path(name) d, b = os.path.split(m) return os.path.join(d, cmd.get_ext_filename(b)) def _compiled_library_filename(self, name, compiler): m = module_to_path(name) d, b = os.path.split(m) return os.path.join(d, compiler.library_filename(b)) def build_extension(self, extension): import distutils.errors dist = self._dist dist.ext_modules = [toyext_to_distext(extension)] bld_cmd, compiler = self._setup_ext() try: bld_cmd.run() base, filename = os.path.split(self._extension_filename(extension.name, bld_cmd)) fullname = os.path.join(bld_cmd.build_lib, base, filename) return [relpath(fullname, self._build_base)] except distutils.errors.DistutilsError: e = extract_exception() raise BuildError(str(e)) def build_compiled_library(self, library): import distutils.errors dist = self._dist dist.libraries = [to_dist_compiled_library(library)] bld_cmd, compiler = self._setup_clib() base, filename = os.path.split(self._compiled_library_filename(library.name, compiler)) old_build_clib = bld_cmd.build_clib if base: # workaround for a distutils issue: distutils put all C libraries # in the same directory, and we cannot control the output directory # from the name - we need to hack build_clib directory bld_cmd.build_clib = os.path.join(old_build_clib, base) try: try: # workaround for yet another bug in distutils: distutils f***s up when # building a static library if the target alread exists on at least mac # os x. target = os.path.join(old_build_clib, base, filename) try: os.remove(target) except OSError: e = extract_exception() if e.errno != errno.ENOENT: raise bld_cmd.run() return [relpath(target, self._build_base)] except distutils.errors.DistutilsError: e = extract_exception() raise BuildError(str(e)) finally: bld_cmd.build_clib = old_build_clib
def distutils_scheme( dist_name: str, user: bool = False, home: str = None, root: str = None, isolated: bool = False, prefix: str = None, *, ignore_config_files: bool = False, ) -> Dict[str, str]: """ Return a distutils install scheme """ from distutils.dist import Distribution dist_args: Dict[str, Union[str, List[str]]] = {"name": dist_name} if isolated: dist_args["script_args"] = ["--no-user-cfg"] d = Distribution(dist_args) if not ignore_config_files: try: d.parse_config_files() except UnicodeDecodeError: # Typeshed does not include find_config_files() for some reason. paths = d.find_config_files() # type: ignore logger.warning( "Ignore distutils configs in %s due to encoding errors.", ", ".join(os.path.basename(p) for p in paths), ) obj: Optional[DistutilsCommand] = None obj = d.get_command_obj("install", create=True) assert obj is not None i = cast(distutils_install_command, obj) # NOTE: setting user or home has the side-effect of creating the home dir # or user base for installations during finalize_options() # ideally, we'd prefer a scheme class that has no side-effects. assert not (user and prefix), f"user={user} prefix={prefix}" assert not (home and prefix), f"home={home} prefix={prefix}" i.user = user or i.user if user or home: i.prefix = "" i.prefix = prefix or i.prefix i.home = home or i.home i.root = root or i.root i.finalize_options() scheme = {} for key in SCHEME_KEYS: scheme[key] = getattr(i, "install_" + key) # install_lib specified in setup.cfg should install *everything* # into there (i.e. it takes precedence over both purelib and # platlib). Note, i.install_lib is *always* set after # finalize_options(); we only want to override here if the user # has explicitly requested it hence going back to the config if "install_lib" in d.get_option_dict("install"): scheme.update(dict(purelib=i.install_lib, platlib=i.install_lib)) if running_under_virtualenv(): scheme["headers"] = os.path.join( i.prefix, "include", "site", f"python{get_major_minor_version()}", dist_name, ) if root is not None: path_no_drive = os.path.splitdrive( os.path.abspath(scheme["headers"]))[1] scheme["headers"] = os.path.join( root, path_no_drive[1:], ) return scheme
<<<<<<< HEAD scheme = {} if isolated: extra_dist_args = {"script_args": ["--no-user-cfg"]} else: extra_dist_args = {} dist_args = {'name': dist_name} # type: Dict[str, Union[str, List[str]]] dist_args.update(extra_dist_args) d = Distribution(dist_args) # Ignoring, typeshed issue reported python/typeshed/issues/2567 d.parse_config_files() # NOTE: Ignoring type since mypy can't find attributes on 'Command' i = d.get_command_obj('install', create=True) # type: Any assert i is not None ======= dist_args = {'name': dist_name} # type: Dict[str, Union[str, List[str]]] if isolated: dist_args["script_args"] = ["--no-user-cfg"] d = Distribution(dist_args) d.parse_config_files() obj = None # type: Optional[DistutilsCommand] obj = d.get_command_obj('install', create=True) assert obj is not None i = cast(distutils_install_command, obj) >>>>>>> e585743114c1741ec20dc76010f96171f3516589 # NOTE: setting user or home has the side-effect of creating the home dir # or user base for installations during finalize_options()
def pyx_to_dll( filename, ext=None, force_rebuild=0, build_in_temp=False, pyxbuild_dir=None, setup_args={}, reload_support=False ): """Compile a PYX file to a DLL and return the name of the generated .so or .dll .""" assert os.path.exists(filename), "Could not find %s" % os.path.abspath(filename) path, name = os.path.split(filename) if not ext: modname, extension = os.path.splitext(name) assert extension in (".pyx", ".py"), extension if not HAS_CYTHON: filename = filename[: -len(extension)] + ".c" ext = Extension(name=modname, sources=[filename]) if not pyxbuild_dir: pyxbuild_dir = os.path.join(path, "_pyxbld") script_args = setup_args.get("script_args", []) if DEBUG or "--verbose" in script_args: quiet = "--verbose" else: quiet = "--quiet" args = [quiet, "build_ext"] if force_rebuild: args.append("--force") if HAS_CYTHON and build_in_temp: args.append("--pyrex-c-in-temp") sargs = setup_args.copy() sargs.update({"script_name": None, "script_args": args + script_args}) dist = Distribution(sargs) if not dist.ext_modules: dist.ext_modules = [] dist.ext_modules.append(ext) if HAS_CYTHON: dist.cmdclass = {"build_ext": build_ext} build = dist.get_command_obj("build") build.build_base = pyxbuild_dir config_files = dist.find_config_files() try: config_files.remove("setup.cfg") except ValueError: pass dist.parse_config_files(config_files) cfgfiles = dist.find_config_files() try: cfgfiles.remove("setup.cfg") except ValueError: pass dist.parse_config_files(cfgfiles) try: ok = dist.parse_command_line() except DistutilsArgError: raise if DEBUG: print("options (after parsing command line):") dist.dump_option_dicts() assert ok try: dist.run_commands() obj_build_ext = dist.get_command_obj("build_ext") so_path = obj_build_ext.get_outputs()[0] if obj_build_ext.inplace: # Python distutils get_outputs()[ returns a wrong so_path # when --inplace ; see http://bugs.python.org/issue5977 # workaround: so_path = os.path.join(os.path.dirname(filename), os.path.basename(so_path)) if reload_support: org_path = so_path timestamp = os.path.getmtime(org_path) global _reloads last_timestamp, last_path, count = _reloads.get(org_path, (None, None, 0)) if last_timestamp == timestamp: so_path = last_path else: basename = os.path.basename(org_path) while count < 100: count += 1 r_path = os.path.join(obj_build_ext.build_lib, basename + ".reload%s" % count) try: import shutil # late import / reload_support is: debugging shutil.copy2(org_path, r_path) so_path = r_path except IOError: continue break else: # used up all 100 slots raise ImportError("reload count for %s reached maximum" % org_path) _reloads[org_path] = (timestamp, so_path, count) return so_path except KeyboardInterrupt: sys.exit(1) except (IOError, os.error): exc = sys.exc_info()[1] error = grok_environment_error(exc) if DEBUG: sys.stderr.write(error + "\n") raise
class DistutilsBuilder(object): def __init__(self, verbosity=1, build_base=None): from distutils.dist import Distribution from distutils import log log.set_verbosity(verbosity) self._dist = Distribution() self._compilers = {} self._cmds = {} if build_base: opt_dict = self._dist.get_option_dict("build") opt_dict["build_base"] = ("bento", build_base) build = self._dist.get_command_obj("build") self._build_base = build.build_base self.ext_bld_cmd = self._setup_build_ext() self.clib_bld_cmd = self._setup_build_clib() def _setup_build_ext(self): # Horrible hack to initialize build_ext: build_ext initialization is # partially done within the run function (!), and is bypassed if no # extensions is available. We fake it just enough so that run does all # the initialization without trying to actually build anything. build = self._dist.get_command_obj("build") bld_cmd = build.get_finalized_command("build_ext") bld_cmd.initialize_options() bld_cmd.finalize_options() old_build_extensions = bld_cmd.build_extensions try: bld_cmd.build_extensions = lambda: None bld_cmd.extensions = [None] bld_cmd.run() finally: bld_cmd.build_extensions = old_build_extensions return bld_cmd def _setup_build_clib(self): # Horrible hack to initialize build_ext: build_ext initialization is # partially done within the run function (!), and is bypassed if no # extensions is available. We fake it just enough so that run does all # the initialization without trying to actually build anything. build = self._dist.get_command_obj("build") bld_cmd = build.get_finalized_command("build_clib") bld_cmd.initialize_options() bld_cmd.finalize_options() old_build_libraries = bld_cmd.build_libraries try: bld_cmd.build_libraries = lambda ignored: None bld_cmd.libraries = [None] bld_cmd.run() finally: bld_cmd.build_libraries = old_build_libraries return bld_cmd def _extension_filename(self, name, cmd): m = module_to_path(name) d, b = os.path.split(m) return os.path.join(d, cmd.get_ext_filename(b)) def _compiled_library_filename(self, name, compiler): m = module_to_path(name) d, b = os.path.split(m) return os.path.join(d, compiler.library_filename(b)) def build_extension(self, extension): import distutils.errors dist_extension = toyext_to_distext(extension) bld_cmd = self.ext_bld_cmd try: bld_cmd.build_extension(dist_extension) base, filename = os.path.split(self._extension_filename(dist_extension.name, bld_cmd)) fullname = os.path.join(bld_cmd.build_lib, base, filename) return [relpath(fullname, self._build_base)] except distutils.errors.DistutilsError: e = extract_exception() raise BuildError(str(e)) def build_compiled_library(self, library): import distutils.errors bld_cmd = self.clib_bld_cmd compiler = bld_cmd.compiler base, filename = os.path.split(self._compiled_library_filename(library.name, compiler)) old_build_clib = bld_cmd.build_clib if base: # workaround for a distutils issue: distutils put all C libraries # in the same directory, and we cannot control the output directory # from the name - we need to hack build_clib directory bld_cmd.build_clib = os.path.join(old_build_clib, base) try: try: # workaround for yet another bug in distutils: distutils f***s up when # building a static library if the target alread exists on at least mac # os x. target = os.path.join(old_build_clib, base, filename) try: os.remove(target) except OSError: e = extract_exception() if e.errno != errno.ENOENT: raise build_info = {"sources": library.sources, "include_dirs": library.include_dirs} bld_cmd.build_libraries([(library.name, build_info)]) return [relpath(target, self._build_base)] except distutils.errors.DistutilsError: e = extract_exception() raise bento.errors.BuildError(str(e)) finally: bld_cmd.build_clib = old_build_clib