def egg2wheel(egg_path, dest_dir): filename = os.path.basename(egg_path) match = egg_info_re.match(filename) if not match: raise WheelError("Invalid egg file name: {}".format(filename)) egg_info = match.groupdict() dir = tempfile.mkdtemp(suffix="_e2w") if os.path.isfile(egg_path): # assume we have a bdist_egg otherwise with zipfile.ZipFile(egg_path) as egg: egg.extractall(dir) else: # support buildout-style installed eggs directories for pth in os.listdir(egg_path): src = os.path.join(egg_path, pth) if os.path.isfile(src): shutil.copy2(src, dir) else: shutil.copytree(src, os.path.join(dir, pth)) pyver = egg_info["pyver"] if pyver: pyver = egg_info["pyver"] = pyver.replace(".", "") arch = (egg_info["arch"] or "any").replace(".", "_").replace("-", "_") # assume all binary eggs are for CPython abi = "cp" + pyver[2:] if arch != "any" else "none" root_is_purelib = egg_info["arch"] is None if root_is_purelib: bw = bdist_wheel(dist.Distribution()) else: bw = _bdist_wheel_tag(dist.Distribution()) bw.root_is_pure = root_is_purelib bw.python_tag = pyver bw.plat_name_supplied = True bw.plat_name = egg_info["arch"] or "any" if not root_is_purelib: bw.full_tag_supplied = True bw.full_tag = (pyver, abi, arch) dist_info_dir = os.path.join(dir, "{name}-{ver}.dist-info".format(**egg_info)) bw.egg2dist(os.path.join(dir, "EGG-INFO"), dist_info_dir) bw.write_wheelfile(dist_info_dir, generator="egg2wheel") wheel_name = "{name}-{ver}-{pyver}-{}-{}.whl".format(abi, arch, **egg_info) with WheelFile(os.path.join(dest_dir, wheel_name), "w") as wf: wf.write_files(dir) shutil.rmtree(dir)
def egg2wheel(egg_path, dest_dir): filename = os.path.basename(egg_path) match = egg_info_re.match(filename) if not match: raise WheelError('Invalid egg file name: {}'.format(filename)) egg_info = match.groupdict() dir = tempfile.mkdtemp(suffix="_e2w") if os.path.isfile(egg_path): # assume we have a bdist_egg otherwise with zipfile.ZipFile(egg_path) as egg: egg.extractall(dir) else: # support buildout-style installed eggs directories for pth in os.listdir(egg_path): src = os.path.join(egg_path, pth) if os.path.isfile(src): shutil.copy2(src, dir) else: shutil.copytree(src, os.path.join(dir, pth)) pyver = egg_info['pyver'] if pyver: pyver = egg_info['pyver'] = pyver.replace('.', '') arch = (egg_info['arch'] or 'any').replace('.', '_').replace('-', '_') # assume all binary eggs are for CPython abi = 'cp' + pyver[2:] if arch != 'any' else 'none' root_is_purelib = egg_info['arch'] is None if root_is_purelib: bw = bdist_wheel(dist.Distribution()) else: bw = _bdist_wheel_tag(dist.Distribution()) bw.root_is_pure = root_is_purelib bw.python_tag = pyver bw.plat_name_supplied = True bw.plat_name = egg_info['arch'] or 'any' if not root_is_purelib: bw.full_tag_supplied = True bw.full_tag = (pyver, abi, arch) dist_info_dir = os.path.join(dir, '{name}-{ver}.dist-info'.format(**egg_info)) bw.egg2dist(os.path.join(dir, 'EGG-INFO'), dist_info_dir) bw.write_wheelfile(dist_info_dir, generator='egg2wheel') wheel_name = '{name}-{ver}-{pyver}-{}-{}.whl'.format(abi, arch, **egg_info) with WheelFile(os.path.join(dest_dir, wheel_name), 'w') as wf: wf.write_files(dir) shutil.rmtree(dir)
def setUp(self): super(APIAutoDocTest, self).setUp() # setup_command requires the Sphinx instance to have some # attributes that aren't set normally with the way we use the # class (because we replace the constructor). Add default # values directly to the class definition. import sphinx.application sphinx.application.Sphinx.messagelog = [] sphinx.application.Sphinx.statuscode = 0 self.useFixture(fixtures.MonkeyPatch( "sphinx.application.Sphinx.__init__", lambda *a, **kw: None)) self.useFixture(fixtures.MonkeyPatch( "sphinx.application.Sphinx.build", lambda *a, **kw: None)) self.useFixture(fixtures.MonkeyPatch( "sphinx.application.Sphinx.config", _SphinxConfig)) self.useFixture(fixtures.MonkeyPatch( "sphinx.config.Config.init_values", lambda *a: None)) self.useFixture(fixtures.MonkeyPatch( "sphinx.config.Config.__init__", lambda *a: None)) from distutils import dist self.distr = dist.Distribution() self.distr.packages = ("fake_package",) self.distr.command_options["build_sphinx"] = { "source_dir": ["a", "."]} self.sphinx_options = self.distr.command_options["build_sphinx"] pkg_fixture = fixtures.PythonPackage( "fake_package", [("fake_module.py", b""), ("another_fake_module_for_testing.py", b""), ("fake_private_module.py", b"")]) self.useFixture(pkg_fixture) self.useFixture(base.DiveDir(pkg_fixture.base)) self.pbr_options = self.distr.command_options.setdefault('pbr', {}) self.pbr_options["autodoc_index_modules"] = ('setup.cfg', 'True')
def _distutils_install(): # use distutils primarily because that's what pip does # https://github.com/pypa/pip/blob/main/src/pip/_internal/locations.py#L95 # note here we don't import Distribution directly to allow setuptools to patch it with warnings.catch_warnings(): # disable warning for PEP-632 warnings.simplefilter("ignore") try: from distutils import dist from distutils.command.install import SCHEME_KEYS except ImportError: # if removed or not installed ignore return {} d = dist.Distribution({ "script_args": "--no-user-cfg" }) # conf files not parsed so they do not hijack paths if hasattr(sys, "_framework"): sys._framework = None # disable macOS static paths for framework with warnings.catch_warnings(): # disable warning for PEP-632 warnings.simplefilter("ignore") i = d.get_command_obj("install", create=True) i.prefix = os.sep # paths generated are relative to prefix that contains the path sep, this makes it relative i.finalize_options() result = { key: (getattr(i, "install_{}".format(key))[1:]).lstrip(os.sep) for key in SCHEME_KEYS } return result
def setUp(self): super(BuildSphinxTest, self).setUp() self.useFixture( fixtures.MonkeyPatch("sphinx.setup_command.BuildDoc.run", lambda self: None)) from distutils import dist self.distr = dist.Distribution() self.distr.packages = ("fake_package", ) self.distr.command_options["build_sphinx"] = {"source_dir": ["a", "."]} pkg_fixture = fixtures.PythonPackage( "fake_package", [("fake_module.py", b""), ("another_fake_module_for_testing.py", b""), ("fake_private_module.py", b"")]) self.useFixture(pkg_fixture) self.useFixture(base.DiveDir(pkg_fixture.base)) self.distr.command_options["pbr"] = {} if hasattr(self, "excludes"): self.distr.command_options["pbr"]["autodoc_exclude_modules"] = ( 'setup.cfg', "fake_package.fake_private_module\n" "fake_package.another_fake_*\n" "fake_package.unknown_module") if self.has_opt: options = self.distr.command_options["pbr"] options["autodoc_index_modules"] = ('setup.cfg', self.autodoc)
def _extra_compile_args(platform): """ We set -Wconversion args here so that we only do Wconversion checks on the code we're compiling and not on cffi itself (as passing -Wconversion in CFLAGS would do). We set no error on sign conversion because some function signatures in LibreSSL differ from OpenSSL have changed on long vs. unsigned long in the past. Since that isn't a precision issue we don't care. """ # make sure the compiler used supports the flags to be added is_gcc = False if get_default_compiler() == "unix": d = dist.Distribution() cmd = config(d) cmd._check_compiler() is_gcc = ( "gcc" in cmd.compiler.compiler[0] or "clang" in cmd.compiler.compiler[0] ) if is_gcc or not ( platform in ["win32", "hp-ux11", "sunos5"] or platform.startswith("aix") ): return ["-Wconversion", "-Wno-error=sign-conversion"] else: return []
def add_dist(self, name): pkg_dist = pkg_resources.get_distribution(name) pkg_info = pkg_dist.get_metadata('PKG-INFO') msg = rfc822.Message(StringIO.StringIO(pkg_info)) attrs = dict((self.pkg_info_attrs.get(key, key), value) for key, value in msg.items() if value != 'UNKNOWN') distribution = dist.Distribution(attrs) return distribution
def find_destination(is_user): """Returns the directory we are supposed to install into.""" install_cmd = dist_install.install(dist.Distribution()) install_cmd.finalize_options() if is_user: return install_cmd.install_usersite else: return install_cmd.install_platlib
def _get_ext_libraries(compiler): binst = build_ext.build_ext(dist.Distribution()) binst.compiler = compiler binst.initialize_options() binst.finalize_options() class _FakeExt(object): def __init__(self): self.libraries = [] return binst.get_libraries(_FakeExt())
def test_calmjs_artifact_declarations(self): from calmjs.registry import _inst # the actual implementations this is supporting from calmjs.artifact import build_calmjs_artifacts from calmjs.artifact import ArtifactRegistry working_dir = mkdtemp(self) make_dummy_dist(self, (('entry_points.txt', '\n'.join([ '[calmjs.registry]', 'calmjs.artifacts = calmjs.artifact:ArtifactsRegistry', ])), ), 'calmjs', '1.0', working_dir=working_dir) make_dummy_dist(self, (('entry_points.txt', '\n'.join([ '[calmjs.artifacts]', 'example.js = example:builder', ])), ), 'some.package', '1.0', working_dir=working_dir) mock_ws = pkg_resources.WorkingSet([working_dir]) registry_id = 'calmjs.artifacts' registry = ArtifactRegistry(registry_id, _working_set=mock_ws) # cleanup the about to be injected version. self.addCleanup(_inst.records.pop, registry_id, None) _inst.records['calmjs.artifacts'] = registry # construct a command for the declaration check. cmd = build_calmjs_artifacts(dist=distutils_dist.Distribution( attrs={'name': 'some.package'})) self.assertTrue(calmjs_dist.has_calmjs_artifact_declarations(cmd)) cmd = build_calmjs_artifacts(dist=distutils_dist.Distribution( attrs={'name': 'missing.package'})) self.assertFalse(calmjs_dist.has_calmjs_artifact_declarations(cmd)) cmd = build_calmjs_artifacts(dist=distutils_dist.Distribution( attrs={'name': 'calmjs'})) self.assertFalse(calmjs_dist.has_calmjs_artifact_declarations(cmd))
def should_raise_setup_error_when_pip_import_fails(_): pip.install = None command = pip.PipInstall(dist.Distribution()) try: command.run() except Exception as exc: assert isinstance(exc, errors.DistutilsSetupError) else: raise AssertionError('should have raised DistutilsSetupError')
def setUpClass(cls): super(CommandTestCase, cls).setUpClass() cls.distribution = dist.Distribution(attrs={'version': '1.2.3'}) cls.run_git_command = ProgrammableCallable() cls.configure(cls.run_git_command) with mock.patch('setupext.gitversion._run_command', new=cls.run_git_command): command = gitversion.GitVersion(cls.distribution) command.initialize_options() cls.execute(command)
def _distutils_install(): # follow https://github.com/pypa/pip/blob/main/src/pip/_internal/locations.py#L95 # note here we don't import Distribution directly to allow setuptools to patch it d = dist.Distribution({"script_args": "--no-user-cfg"}) # conf files not parsed so they do not hijack paths if hasattr(sys, "_framework"): sys._framework = None # disable macOS static paths for framework i = d.get_command_obj("install", create=True) i.prefix = os.sep # paths generated are relative to prefix that contains the path sep, this makes it relative i.finalize_options() result = {key: (getattr(i, "install_{}".format(key))[1:]).lstrip(os.sep) for key in SCHEME_KEYS} return result
def test_combined(tmpdir): from distutils import dist import setup output_file = tmpdir.join('combined.py') combine = setup.Combine(dist.Distribution()) combine.output_file = str(output_file) combine.run() sys.path.append(output_file.dirname) import combined assert combined
def setUp(self): super(BuildSphinxTest, self).setUp() self.useFixture( fixtures.MonkeyPatch("sphinx.setup_command.BuildDoc.run", lambda self: None)) from distutils import dist self.distr = dist.Distribution() self.distr.packages = ("fake_package", ) self.distr.command_options["build_sphinx"] = {"source_dir": ["a", "."]} pkg_fixture = fixtures.PythonPackage("fake_package", [("fake_module.py", b"")]) self.useFixture(pkg_fixture) self.useFixture(base.DiveDir(pkg_fixture.base))
def get_site_packages(): """ Depending on whether we have debian python or not, return dist_packages path or site_packages path. """ if 'deb_system' in INSTALL_SCHEMES: # Debian patched python in use install_cmd = dist.Distribution().get_command_obj('install') install_cmd.select_scheme('deb_system') install_cmd.finalize_options() plat_path = Path(install_cmd.install_platlib) else: plat_path = Path(get_path('platlib')) return str(Path(PATH_INSTALLED) / plat_path.relative_to(sys.exec_prefix))
def _site_packages_dir(self): # Defer "distutils" import until this function is called so that # "mach bootstrap" doesn't fail due to Linux distro python-distutils # package not being installed. # By the time this function is called, "distutils" must be installed # because it's needed by the "virtualenv" package. from distutils import dist distribution = dist.Distribution({"script_args": "--no-user-cfg"}) installer = distribution.get_command_obj("install") installer.prefix = os.path.normpath(self.virtualenv_root) installer.finalize_options() # Path to virtualenv's "site-packages" directory return installer.install_purelib
def _disable_pip_outdated_warning(self): """Disables the pip outdated warning by changing pip's 'installer' "pip" has behaviour to ensure that it doesn't print it's "outdated" warning if it's part of a Linux distro package. This is because Linux distros generally have a slightly out-of-date pip package that they know to be stable, and users aren't always able to (or want to) update it. This behaviour works by checking if the "pip" installer (encoded in the dist-info/INSTALLER file) is "pip" itself, or a different value (e.g.: a distro). We can take advantage of this behaviour by telling pip that it was installed by "mach", so it won't print the warning. https://github.com/pypa/pip/blob/5ee933aab81273da3691c97f2a6e7016ecbe0ef9/src/pip/_internal/self_outdated_check.py#L100-L101 # noqa F401 """ # Defer "distutils" import until this function is called so that # "mach bootstrap" doesn't fail due to Linux distro python-distutils # package not being installed. # By the time this function is called, "distutils" must be installed # because it's needed by the "virtualenv" package. from distutils import dist distribution = dist.Distribution({"script_args": "--no-user-cfg"}) installer = distribution.get_command_obj("install") installer.prefix = os.path.normpath(self.virtualenv_root) installer.finalize_options() # Path to virtualenv's "site-packages" directory site_packages = installer.install_purelib pip_dist_info = next( (file for file in os.listdir(site_packages) if file.startswith("pip-") and file.endswith(".dist-info")), None, ) if not pip_dist_info: raise Exception("Failed to find pip dist-info in new virtualenv") with open(os.path.join(site_packages, pip_dist_info, "INSTALLER"), "w") as file: file.write("mach")
def test_build_calmjs_artifacts_failure(self): def fakecmd(*a, **kw): return object dist = distutils_dist.Distribution(attrs={ 'cmdclass': { 'build': fakecmd }, }) with pretty_logging(stream=StringIO()) as stream: calmjs_dist.build_calmjs_artifacts(dist, 'build_again', True) self.assertIn( "'build' command in Distribution is not an instance of " "'distutils.command.build:build'", stream.getvalue(), )
def test_build_calmjs_artifacts_standard(self): dist = distutils_dist.Distribution() build_cmd = dist.get_command_obj('build') original_subcmds = list(build_cmd.sub_commands) calmjs_dist.build_calmjs_artifacts(dist, 'build_artifact', False) self.assertEqual(original_subcmds, build_cmd.sub_commands) # keys are named after the build step. calmjs_dist.build_calmjs_artifacts(dist, 'build_artifact', True) self.assertEqual( ('build_artifact', calmjs_dist.has_calmjs_artifact_declarations), build_cmd.sub_commands[-1], ) calmjs_dist.build_calmjs_artifacts(dist, 'calmjs_artifact', True) self.assertEqual( ('calmjs_artifact', calmjs_dist.has_calmjs_artifact_declarations), build_cmd.sub_commands[-1], )
def build_test_extensions(): """Because distutils sucks, it just copies the entire contents of the build results dir (e.g. build/lib.linux-i686-2.6) during installation. That means that we can't put any files there that we don't want to distribute. </3. To deal with that, this code will compile the test extension and place the object files in the normal temp directory using the same logic as distutils, but linked shared library will go directly into the tests directory. """ build_temp_dir = os.path.join( 'build', 'temp.%s-%s' % (util.get_platform(), sys.version[0:3])) compiler = new_compiler() distribution = dist.Distribution() build_ext_cmd = build_ext.build_ext(distribution) build_ext_cmd.finalize_options() compiler.set_library_dirs(build_ext_cmd.library_dirs) sysconfig.customize_compiler(compiler) def build_and_copy(extension): """compile sources, link shared library, and copy it into CWD""" objects = compiler.compile( extension.sources, output_dir=build_temp_dir, include_dirs=extension.include_dirs, debug=False, depends=extension.depends) output_file = os.path.join( build_temp_dir, build_ext_cmd.get_ext_filename(extension.name)) compiler.link_shared_object( objects, output_file, libraries=build_ext_cmd.get_libraries(extension), library_dirs=extension.library_dirs, runtime_library_dirs=extension.runtime_library_dirs, export_symbols=build_ext_cmd.get_export_symbols(extension), debug=False, build_temp=build_temp_dir, target_lang=compiler.detect_language(extension.sources)) file_util.copy_file(output_file, os.path.curdir) for extension in TEST_EXTENSIONS: build_and_copy(extension)
def setUp(self): super(BaseSphinxTest, self).setUp() self.useFixture( fixtures.MonkeyPatch("sphinx.application.Sphinx.__init__", lambda *a, **kw: None)) self.useFixture( fixtures.MonkeyPatch("sphinx.application.Sphinx.build", lambda *a, **kw: None)) self.useFixture( fixtures.MonkeyPatch("sphinx.config.Config.man_pages", ['foo'])) self.useFixture( fixtures.MonkeyPatch("sphinx.config.Config.init_values", lambda *a: None)) self.useFixture( fixtures.MonkeyPatch("sphinx.config.Config.__init__", lambda *a: None)) from distutils import dist self.distr = dist.Distribution() self.distr.packages = ("fake_package", ) self.distr.command_options["build_sphinx"] = {"source_dir": ["a", "."]} pkg_fixture = fixtures.PythonPackage( "fake_package", [("fake_module.py", b""), ("another_fake_module_for_testing.py", b""), ("fake_private_module.py", b"")]) self.useFixture(pkg_fixture) self.useFixture(base.DiveDir(pkg_fixture.base)) self.distr.command_options["pbr"] = {} if hasattr(self, "excludes"): self.distr.command_options["pbr"]["autodoc_exclude_modules"] = ( 'setup.cfg', "fake_package.fake_private_module\n" "fake_package.another_fake_*\n" "fake_package.unknown_module") if hasattr(self, 'has_opt') and self.has_opt: options = self.distr.command_options["pbr"] options["autodoc_index_modules"] = ('setup.cfg', self.autodoc) if hasattr(self, 'warnerrors') and self.warnerrors: options = self.distr.command_options["pbr"] options["warnerrors"] = ('setup.cfg', 'true')
def wheel_tag(): # FIXME: is there a nicer way to do that ??? from distutils import dist from wheel.bdist_wheel import bdist_wheel return "-".join(bdist_wheel(dist.Distribution()).get_tag())
def _get_ext_library_dirs(): binst = build_ext.build_ext(dist.Distribution()) binst.initialize_options() binst.finalize_options() return binst.library_dirs
def get_install_data_dir(): d = dist.Distribution() install_cmd = dist_install.install(d) install_cmd.finalize_options() return install_cmd.install_data
def wininst2wheel(path, dest_dir): with zipfile.ZipFile(path) as bdw: # Search for egg-info in the archive egginfo_name = None for filename in bdw.namelist(): if '.egg-info' in filename: egginfo_name = filename break info = parse_wininst_info(os.path.basename(path), egginfo_name) root_is_purelib = True for zipinfo in bdw.infolist(): if zipinfo.filename.startswith('PLATLIB'): root_is_purelib = False break if root_is_purelib: paths = {'purelib': ''} else: paths = {'platlib': ''} dist_info = "%(name)s-%(ver)s" % info datadir = "%s.data/" % dist_info # rewrite paths to trick ZipFile into extracting an egg # XXX grab wininst .ini - between .exe, padding, and first zip file. members = [] egginfo_name = '' for zipinfo in bdw.infolist(): key, basename = zipinfo.filename.split('/', 1) key = key.lower() basepath = paths.get(key, None) if basepath is None: basepath = datadir + key.lower() + '/' oldname = zipinfo.filename newname = basepath + basename zipinfo.filename = newname del bdw.NameToInfo[oldname] bdw.NameToInfo[newname] = zipinfo # Collect member names, but omit '' (from an entry like "PLATLIB/" if newname: members.append(newname) # Remember egg-info name for the egg2dist call below if not egginfo_name: if newname.endswith('.egg-info'): egginfo_name = newname elif '.egg-info/' in newname: egginfo_name, sep, _ = newname.rpartition('/') dir = tempfile.mkdtemp(suffix="_b2w") bdw.extractall(dir, members) # egg2wheel abi = 'none' pyver = info['pyver'] arch = (info['arch'] or 'any').replace('.', '_').replace('-', '_') # Wininst installers always have arch even if they are not # architecture-specific (because the format itself is). # So, assume the content is architecture-neutral if root is purelib. if root_is_purelib: arch = 'any' # If the installer is architecture-specific, it's almost certainly also # CPython-specific. if arch != 'any': pyver = pyver.replace('py', 'cp') wheel_name = '-'.join((dist_info, pyver, abi, arch)) if root_is_purelib: bw = bdist_wheel(dist.Distribution()) else: bw = _bdist_wheel_tag(dist.Distribution()) bw.root_is_pure = root_is_purelib bw.python_tag = pyver bw.plat_name_supplied = True bw.plat_name = info['arch'] or 'any' if not root_is_purelib: bw.full_tag_supplied = True bw.full_tag = (pyver, abi, arch) dist_info_dir = os.path.join(dir, '%s.dist-info' % dist_info) bw.egg2dist(os.path.join(dir, egginfo_name), dist_info_dir) bw.write_wheelfile(dist_info_dir, generator='wininst2wheel') wheel_path = os.path.join(dest_dir, wheel_name) with WheelFile(wheel_path, 'w') as wf: wf.write_files(dir) shutil.rmtree(dir)
def get_install_command(name): # late binding due to potential monkeypatching d = dist.Distribution({'name':name}) i = install.install(d) i.finalize_options() return i
def setUp(self): super(WhenInitializingOptions, self).setUp() self.distribution = dist.Distribution() self.command = gitversion.GitVersion(self.distribution) self.command.initialize_options()
def make_command(requires): return command.Command(dist.Distribution(requires))