def dependencies(self, extra=None): """Access the dependencies of this Wheel. Args: extra: if specified, include the additional dependencies of the named "extra". Yields: the names of requirements from the metadata.json """ # TODO(mattmoor): Is there a schema to follow for this? run_requires = self.metadata().get("run_requires", []) for requirement in run_requires: if requirement.get("extra") != extra: # Match the requirements for the extra we're looking for. continue marker = requirement.get("environment") if marker and not pkg_resources.evaluate_marker(marker): # The current environment does not match the provided PEP 508 marker, # so ignore this requirement. continue requires = requirement.get("requires", []) for entry in requires: # Strip off any trailing versioning data. parts = re.split("[ ><=()]", entry) package_name = parts[0] # For some packages Requires-Dist contains extras defined like: # tablib[html,ods,xls,xlsx,yaml] (>=0.14.0) # Since we don't support extras, we should remove that part completely, # otherwise we end up having `py_library.deps` like `tablib[html,ods,xls,xlsx,yaml]` # which isn't a valid bazel dep if package_name.endswith(']'): package_name = package_name.split('[')[0] yield package_name
def get_packages_info(requirement_file): regex_license = re.compile(r'License: (?P<license>[^\r\n]+)\r?\n') regex_classifier = re.compile( r'Classifier: License :: OSI Approved :: (?P<classifier>[^\r\n]+)\r?\n' ) requirements = [] for req in parse_requirements(requirement_file, session=PipSession()): if req.markers: if not pkg_resources.evaluate_marker(str(req.markers)): continue requirements.append(pkg_resources.Requirement.parse(str(req.req))) def transform(dist): licenses = get_license(dist) + get_license_OSI_classifiers(dist) # Strip the useless "License" suffix and uniquify licenses = list(set([strip_license(l) for l in licenses])) return { 'name': dist.project_name, 'version': dist.version, 'location': dist.location, 'dependencies': [dependency.project_name for dependency in dist.requires()], 'licenses': licenses, } def get_license(dist): if dist.has_metadata(dist.PKG_INFO): metadata = dist.get_metadata(dist.PKG_INFO) license = regex_license.search(metadata).group('license') if license != "UNKNOWN": # Value when license not specified. return [license] return [] def get_license_OSI_classifiers(dist): if dist.has_metadata(dist.PKG_INFO): metadata = dist.get_metadata(dist.PKG_INFO) return regex_classifier.findall(metadata) return [] def strip_license(license): if license.lower().endswith(" license"): return license[:-len(" license")] return license packages = [ transform(dist) for dist in pkg_resources.working_set.resolve(requirements) ] # keep only unique values as there are maybe some duplicates unique = [] [unique.append(item) for item in packages if item not in unique] return sorted(unique, key=(lambda item: item['name'].lower()))
def dependencies(self, extra=None): """Access the dependencies of this Wheel. Args: extra: if specified, include the additional dependencies of the named "extra". Yields: the names of requirements from the metadata.json """ # TODO(mattmoor): Is there a schema to follow for this? run_requires = self.metadata().get('run_requires', []) for requirement in run_requires: if requirement.get('extra') != extra: # Match the requirements for the extra we're looking for. continue marker = requirement.get('environment') if marker and not pkg_resources.evaluate_marker(marker): # The current environment does not match the provided PEP 508 marker, # so ignore this requirement. continue requires = requirement.get('requires', []) for entry in requires: # Strip off any trailing versioning data. parts = re.split('[ ><=()]', entry) yield parts[0]
def marker_passes(marker): """ Given an environment marker, return True if the marker is valid and matches this environment. """ return (marker and not pkg_resources.invalid_marker(marker) and pkg_resources.evaluate_marker(marker))
def install_app_requirements(self): print(" * Installing requirements...") # setuptools install_requires setup() argument list entries end up in # two different self.distribution arguments: # - Entries with no environment markers # Will be found in self.distribution.install_requires. # - Entries with environment markers # Will be found under new keys in self.distribution.extras_require; # each such key is a string starting with ':' followed by the original # marker expression text itself (eg: ':python_version>"3.5"'); the # values will be lists of all the entries in the install_requires # setup() argument containing that particular marker. # With this, the full requirements are: # - The list in self.distribution.install_requires. # - Extended with all True-evaluating marker entries in # self.distribution.extras_require. requirement_list = list(self.distribution.install_requires) for extra, extra_list in self.distribution.extras_require.items(): if not extra.startswith(":"): # Discard non-environment marker entries. continue if pkg_resources.evaluate_marker(extra[1:]): # Marker evaluates to True, bring it in. requirement_list.extend(extra_list) if self.distribution.install_requires: subprocess.Popen([ sys.executable, "-m", "pip", "install", "--upgrade", "--force-reinstall", '--target={}'.format( self.app_packages_dir) ] + requirement_list, ).wait() else: print("No requirements.")
def run(self): data = { "name": self.distribution.get_name(), "version": self.distribution.get_version(), "author": u("%s <%s>") % ( u(self.distribution.get_author()), u(self.distribution.get_author_email()), ), "description": self.distribution.get_description(), "license": self.distribution.get_license(), "url": self.distribution.get_url(), } if self.distribution.has_ext_modules(): data["architecture"] = "native" else: data["architecture"] = "all" final_deps = [] if self.load_requirements_txt: requirement = open(self.requirements_txt).readlines() for dep in pkg_resources.parse_requirements(requirement): final_deps.extend(self.process_dep(dep)) else: if getattr(self.distribution, 'install_requires', None): for dep in pkg_resources.parse_requirements( self.distribution.install_requires): final_deps.extend(self.process_dep(dep)) if getattr(self.distribution, 'extras_require', None): for dep in pkg_resources.parse_requirements( v for k, v in self.distribution.extras_require.items() if k.startswith(':') and pkg_resources.evaluate_marker(k[1:])): final_deps.extend(self.process_dep(dep)) data["dependencies"] = final_deps output = open(self.output, "w") if hasattr(json, 'dumps'): def default_to_str(obj): """ Fall back to using __str__ if possible """ # This checks if the class of obj defines __str__ itself, # so we don't fall back to an inherited __str__ method. if "__str__" in type(obj).__dict__: return str(obj) return json.JSONEncoder.default(self, obj) output.write(json.dumps(data, indent=2, default=default_to_str)) else: # For Python 2.5 and Debian's python-json output.write(json.write(data))
def parse_requirements(requirement_file): requirements = [] for req in pip_parse_requirements(requirement_file, session=PipSession()): install_req = install_req_from_parsed_requirement(req) if install_req.markers and not pkg_resources.evaluate_marker(str(install_req.markers)): continue requirements.append(pkg_resources.Requirement.parse(str(install_req.req))) return requirements
def test_pkg_resources_no_parser(marker): if not hasattr(pkg_resources, "MarkerEvaluation"): pytest.skip("no markerlib variant") parser = pkg_resources.parser evaluate_marker = pkg_resources.evaluate_marker del pkg_resources.parser pkg_resources.evaluate_marker = pkg_resources.MarkerEvaluation._markerlib_evaluate try: req = "foo==1.1;" + marker + "=='2.1'" req, mark = req.split(";") pkg_resources.Requirement.parse(req) pkg_resources.evaluate_marker(mark) finally: pkg_resources.parser = parser pkg_resources.evaluate_marker = evaluate_marker
def marker_passes(marker): """ Given an environment marker, return True if the marker is valid and matches this environment. """ return ( marker and not pkg_resources.invalid_marker(marker) and pkg_resources.evaluate_marker(marker) )
def run(self): from pkg_resources import evaluate_marker requirements = set(self.distribution.install_requires + ['mock>=2.0.0', 'pytest-cov', 'pytest']) for k, v in self.distribution.extras_require.items(): if not k.startswith(':') or evaluate_marker(k[1:]): requirements.update(v) self.distribution.fetch_build_eggs(list(requirements)) self.run_tests()
def pytest_configure(config): msgs = [] if not os.path.exists(_testdata): msg = "testdata not available! " if os.path.exists(os.path.join(_root, ".git")): msg += ("Please run git submodule update --init --recursive " + "and then run tests again.") else: msg += ( "The testdata doesn't appear to be included with this package, " + "so finding the right version will be hard. :(") msgs.append(msg) if config.option.update_xfail: # Check for optional requirements req_file = os.path.join(_root, "requirements-optional.txt") if os.path.exists(req_file): with open(req_file, "r") as fp: for line in fp: if (line.strip() and not (line.startswith("-r") or line.startswith("#"))): if ";" in line: spec, marker = line.strip().split(";", 1) else: spec, marker = line.strip(), None req = pkg_resources.Requirement.parse(spec) if marker and not pkg_resources.evaluate_marker( marker): msgs.append( "%s not available in this environment" % spec) else: try: installed = pkg_resources.working_set.find(req) except pkg_resources.VersionConflict: msgs.append( "Outdated version of %s installed, need %s" % (req.name, spec)) else: if not installed: msgs.append("Need %s" % spec) # Check cElementTree import xml.etree.ElementTree as ElementTree try: import xml.etree.cElementTree as cElementTree except ImportError: msgs.append("cElementTree unable to be imported") else: if cElementTree.Element is ElementTree.Element: msgs.append("cElementTree is just an alias for ElementTree") if msgs: pytest.exit("\n".join(msgs))
def install_dists(dist): """ Install the requirements indicated by self.distribution and return an iterable of the dists that were built. """ ir_d = dist.fetch_build_eggs(dist.install_requires) tr_d = dist.fetch_build_eggs(dist.tests_require or []) er_d = dist.fetch_build_eggs( v for k, v in dist.extras_require.items() if k.startswith(':') and evaluate_marker(k[1:])) return itertools.chain(ir_d, tr_d, er_d)
def install_dists(dist): """ Install the requirements indicated by self.distribution and return an iterable of the dists that were built. """ ir_d = dist.fetch_build_eggs(dist.install_requires) tr_d = dist.fetch_build_eggs(dist.tests_require or []) er_d = dist.fetch_build_eggs( v for k, v in dist.extras_require.items() if k.startswith(':') and evaluate_marker(k[1:]) ) return itertools.chain(ir_d, tr_d, er_d)
def pytest_configure(config): msgs = [] if not os.path.exists(_testdata): msg = "testdata not available! " if os.path.exists(os.path.join(_root, ".git")): msg += ("Please run git submodule update --init --recursive " + "and then run tests again.") else: msg += ("The testdata doesn't appear to be included with this package, " + "so finding the right version will be hard. :(") msgs.append(msg) if config.option.update_xfail: # Check for optional requirements req_file = os.path.join(_root, "requirements-optional.txt") if os.path.exists(req_file): with open(req_file, "r") as fp: for line in fp: if (line.strip() and not (line.startswith("-r") or line.startswith("#"))): if ";" in line: spec, marker = line.strip().split(";", 1) else: spec, marker = line.strip(), None req = pkg_resources.Requirement.parse(spec) if marker and not pkg_resources.evaluate_marker(marker): msgs.append("%s not available in this environment" % spec) else: try: installed = pkg_resources.working_set.find(req) except pkg_resources.VersionConflict: msgs.append("Outdated version of %s installed, need %s" % (req.name, spec)) else: if not installed: msgs.append("Need %s" % spec) # Check cElementTree import xml.etree.ElementTree as ElementTree try: import xml.etree.cElementTree as cElementTree except ImportError: msgs.append("cElementTree unable to be imported") else: if cElementTree.Element is ElementTree.Element: msgs.append("cElementTree is just an alias for ElementTree") if msgs: pytest.exit("\n".join(msgs))
def parse_requirements(requirement_file): requirements = [] for req in pip_parse_requirements(requirement_file, session=PipSession()): install_req = install_req_from_parsed_requirement(req) if install_req.markers and not pkg_resources.evaluate_marker( str(install_req.markers)): # req should not installed due to env markers continue elif install_req.editable: # skip editable req as they are failing in the resolve phase continue requirements.append( pkg_resources.Requirement.parse(str(install_req.req))) return requirements
def check_modules(script=None): """Check whether mandatory modules are present. This also checks Python version when importing deptendencies from setup.py @param script: The script name to be checked for dependencies @type script: str or None @return: True if all dependencies are installed @rtype: bool @raise RuntimeError: wrong Python version found in setup.py """ import pkg_resources if script: from setup import script_deps dependencies = script_deps.get(Path(script).name, []) else: from setup import dependencies missing_requirements = [] version_conflicts = [] for requirement in pkg_resources.parse_requirements(dependencies): if requirement.marker is None \ or pkg_resources.evaluate_marker(str(requirement.marker)): try: pkg_resources.resource_exists(requirement, requirement.name) except pkg_resources.DistributionNotFound as e: missing_requirements.append(requirement) print(e) except pkg_resources.VersionConflict as e: version_conflicts.append(requirement) print(e) del pkg_resources del dependencies _print_requirements(missing_requirements, script, 'missing') _print_requirements(version_conflicts, script, 'outdated') if version_conflicts and not missing_requirements: print('\nYou may continue on your own risk; type CTRL-C to stop.') try: sleep(5) except KeyboardInterrupt: return False return not missing_requirements
def check_modules(script=None): """Check whether mandatory modules are present.""" import pkg_resources if script: from setup import script_deps try: from pathlib import Path except ImportError: # Python 2 from pathlib2 import Path dependencies = script_deps.get(Path(script).name, []) else: try: from setup import dependencies except: pass missing_requirements = [] version_conflicts = [] for requirement in pkg_resources.parse_requirements(dependencies): if requirement.marker is None \ or pkg_resources.evaluate_marker(str(requirement.marker)): try: pkg_resources.resource_exists(requirement, requirement.name) except pkg_resources.DistributionNotFound as e: missing_requirements.append(requirement) print(e) except pkg_resources.VersionConflict as e: version_conflicts.append(requirement) print(e) del pkg_resources del dependencies _print_requirements(missing_requirements, script, 'missing') _print_requirements(version_conflicts, script, 'outdated') if version_conflicts and not missing_requirements: print('\nYou may continue on your own risk; type CTRL-C to stop.') try: sleep(5) except KeyboardInterrupt: return False return not missing_requirements
def test_lexicographic_ordering(): """ Although one might like 2.7.10 to be greater than 2.7.3, the marker spec only supports lexicographic ordering. """ assert evaluate_marker("python_full_version > '2.7.3'") is False
if sys.version_info[:2] < (3, 4): tests_require.append('mock >= 1.0.1') extras_require = { ':python_version < "3.4"': ['enum34 >= 1.0.4, < 2'], ':python_version < "3.5"': ['backports.ssl_match_hostname >= 3.5'], ':python_version < "3.3"': ['ipaddress >= 1.0.16'], } try: if 'bdist_wheel' not in sys.argv: for key, value in extras_require.items(): if key.startswith(':') and pkg_resources.evaluate_marker(key[1:]): install_requires.extend(value) except Exception as e: print("Failed to compute platform dependencies: {}. ".format(e) + "All dependencies will be installed as a result.", file=sys.stderr) for key, value in extras_require.items(): if key.startswith(':'): install_requires.extend(value) setup( name='docker-compose', version=find_version("compose", "__init__.py"), description='Multi-container orchestration for Docker', url='https://www.docker.com/', author='Docker, Inc.',
def setup_cfg_to_setup_kwargs(config, script_args=()): """Processes the setup.cfg options and converts them to arguments accepted by setuptools' setup() function. """ kwargs = {} # Temporarily holds install_requires and extra_requires while we # parse env_markers. all_requirements = {} for arg in D1_D2_SETUP_ARGS: if len(D1_D2_SETUP_ARGS[arg]) == 2: # The distutils field name is different than distutils2's. section, option = D1_D2_SETUP_ARGS[arg] elif len(D1_D2_SETUP_ARGS[arg]) == 1: # The distutils field name is the same thant distutils2's. section = D1_D2_SETUP_ARGS[arg][0] option = arg in_cfg_value = has_get_option(config, section, option) if not in_cfg_value: # There is no such option in the setup.cfg if arg == "long_description": in_cfg_value = has_get_option(config, section, "description_file") if in_cfg_value: in_cfg_value = split_multiline(in_cfg_value) value = '' for filename in in_cfg_value: description_file = open(filename) try: value += description_file.read().strip() + '\n\n' finally: description_file.close() in_cfg_value = value else: continue if arg in CSV_FIELDS: in_cfg_value = split_csv(in_cfg_value) if arg in MULTI_FIELDS: in_cfg_value = split_multiline(in_cfg_value) elif arg in MAP_FIELDS: in_cfg_map = {} for i in split_multiline(in_cfg_value): k, v = i.split('=') in_cfg_map[k.strip()] = v.strip() in_cfg_value = in_cfg_map elif arg in BOOL_FIELDS: # Provide some flexibility here... if in_cfg_value.lower() in ('true', 't', '1', 'yes', 'y'): in_cfg_value = True else: in_cfg_value = False if in_cfg_value: if arg in ('install_requires', 'tests_require'): # Replaces PEP345-style version specs with the sort expected by # setuptools in_cfg_value = [ _VERSION_SPEC_RE.sub(r'\1\2', pred) for pred in in_cfg_value ] if arg == 'install_requires': # Split install_requires into package,env_marker tuples # These will be re-assembled later install_requires = [] requirement_pattern = '(?P<package>[^;]*);?(?P<env_marker>[^#]*?)(?:\s*#.*)?$' for requirement in in_cfg_value: m = re.match(requirement_pattern, requirement) requirement_package = m.group('package').strip() env_marker = m.group('env_marker').strip() install_requires.append((requirement_package, env_marker)) all_requirements[''] = install_requires elif arg == 'package_dir': in_cfg_value = {'': in_cfg_value} elif arg in ('package_data', 'data_files'): data_files = {} firstline = True prev = None for line in in_cfg_value: if '=' in line: key, value = line.split('=', 1) key, value = (key.strip(), value.strip()) if key in data_files: # Multiple duplicates of the same package name; # this is for backwards compatibility of the old # format prior to d2to1 0.2.6. prev = data_files[key] prev.extend(value.split()) else: prev = data_files[key.strip()] = value.split() elif firstline: raise errors.DistutilsOptionError( 'malformed package_data first line %r (misses ' '"=")' % line) else: prev.extend(line.strip().split()) firstline = False if arg == 'data_files': # the data_files value is a pointlessly different structure # from the package_data value data_files = data_files.items() in_cfg_value = data_files elif arg == 'cmdclass': cmdclass = {} dist = st_dist.Distribution() for cls_name in in_cfg_value: cls = resolve_name(cls_name) cmd = cls(dist) cmdclass[cmd.get_command_name()] = cls in_cfg_value = cmdclass kwargs[arg] = in_cfg_value # Transform requirements with embedded environment markers to # setuptools' supported marker-per-requirement format. # # install_requires are treated as a special case of extras, before # being put back in the expected place # # fred = # foo:marker # bar # -> {'fred': ['bar'], 'fred:marker':['foo']} if 'extras' in config: requirement_pattern = '(?P<package>[^:]*):?(?P<env_marker>[^#]*?)(?:\s*#.*)?$' extras = config['extras'] # Add contents of test-requirements, if any, into an extra named # 'test' if one does not already exist. if 'test' not in extras: from pbr import packaging extras['test'] = "\n".join( packaging.parse_requirements( packaging.TEST_REQUIREMENTS_FILES)).replace(';', ':') for extra in extras: extra_requirements = [] requirements = split_multiline(extras[extra]) for requirement in requirements: m = re.match(requirement_pattern, requirement) extras_value = m.group('package').strip() env_marker = m.group('env_marker') extra_requirements.append((extras_value, env_marker)) all_requirements[extra] = extra_requirements # Transform the full list of requirements into: # - install_requires, for those that have no extra and no # env_marker # - named extras, for those with an extra name (which may include # an env_marker) # - and as a special case, install_requires with an env_marker are # treated as named extras where the name is the empty string extras_require = {} for req_group in all_requirements: for requirement, env_marker in all_requirements[req_group]: if env_marker: extras_key = '%s:(%s)' % (req_group, env_marker) # We do not want to poison wheel creation with locally # evaluated markers. sdists always re-create the egg_info # and as such do not need guarded, and pip will never call # multiple setup.py commands at once. if 'bdist_wheel' not in script_args: try: if pkg_resources.evaluate_marker('(%s)' % env_marker): extras_key = req_group except SyntaxError: log.error( "Marker evaluation failed, see the following " "error. For more information see: " "http://docs.openstack.org/" "developer/pbr/compatibility.html#evaluate-marker") raise else: extras_key = req_group extras_require.setdefault(extras_key, []).append(requirement) kwargs['install_requires'] = extras_require.pop('', []) kwargs['extras_require'] = extras_require return kwargs
if sys.version_info[:2] < (3, 4): tests_require.append('mock >= 1.0.1, < 4') extras_require = { ':python_version < "3.5"': ['backports.ssl_match_hostname >= 3.5, < 4'], ':python_version < "3.8"': ['cached-property >= 1.2.0, < 2'], ':sys_platform == "win32"': ['colorama >= 0.4, < 1'], 'socks': ['PySocks >= 1.5.6, != 1.5.7, < 2'], 'tests': tests_require, } try: if 'bdist_wheel' not in sys.argv: for key, value in extras_require.items(): if key.startswith(':') and pkg_resources.evaluate_marker(key[1:]): install_requires.extend(value) except Exception as e: print("Failed to compute platform dependencies: {}. ".format(e) + "All dependencies will be installed as a result.", file=sys.stderr) for key, value in extras_require.items(): if key.startswith(':'): install_requires.extend(value) setup( name='docker-compose', version=find_version("compose", "__init__.py"), description='Multi-container orchestration for Docker', long_description=read('README.md'), long_description_content_type='text/markdown',
def munge(rqr): return chain.from_iterable( [parse_requirements(reqset['requires']) for reqset in rqr if not reqset.get('environment') or evaluate_marker(reqset['environment'])])
def test_pkg_resources(marker): req = "foo==1.1;" + marker + "=='2.1'" req, mark = req.split(";") pkg_resources.Requirement.parse(req) pkg_resources.evaluate_marker(mark)
import pytest import yaml from run_notebook_checks import _notebook_check_is_clean _exclude_file = pathlib.Path(__file__).absolute().parent / "exclude.yml" _EXCLUDE = dict() with open(_exclude_file, "r") as fp: for item in yaml.safe_load(fp): filename, reason = item["file"], item["reason"] try: unless = item["unless"] except KeyError: exclude = True else: exclude = not evaluate_marker(unless) if exclude: _EXCLUDE[filename] = reason def _notebook_check(notebook): """Check a notebook for errors. Parameters ---------- notebook : Notebook node Path the to notebook to execute. Returns ------- errors : list of str A list of the errors encountered in the notebook cells. """
def test_ordering(python_version_mock): assert evaluate_marker("python_full_version > '2.7.3'") is True
def setup_cfg_to_setup_kwargs(config, script_args=()): """Processes the setup.cfg options and converts them to arguments accepted by setuptools' setup() function. """ kwargs = {} # Temporarily holds install_requires and extra_requires while we # parse env_markers. all_requirements = {} for arg in D1_D2_SETUP_ARGS: if len(D1_D2_SETUP_ARGS[arg]) == 2: # The distutils field name is different than distutils2's. section, option = D1_D2_SETUP_ARGS[arg] elif len(D1_D2_SETUP_ARGS[arg]) == 1: # The distutils field name is the same thant distutils2's. section = D1_D2_SETUP_ARGS[arg][0] option = arg in_cfg_value = has_get_option(config, section, option) if not in_cfg_value: # There is no such option in the setup.cfg if arg == "long_description": in_cfg_value = has_get_option(config, section, "description_file") if in_cfg_value: in_cfg_value = split_multiline(in_cfg_value) value = '' for filename in in_cfg_value: description_file = open(filename) try: value += description_file.read().strip() + '\n\n' finally: description_file.close() in_cfg_value = value else: continue if arg in CSV_FIELDS: in_cfg_value = split_csv(in_cfg_value) if arg in MULTI_FIELDS: in_cfg_value = split_multiline(in_cfg_value) elif arg in MAP_FIELDS: in_cfg_map = {} for i in split_multiline(in_cfg_value): k, v = i.split('=') in_cfg_map[k.strip()] = v.strip() in_cfg_value = in_cfg_map elif arg in BOOL_FIELDS: # Provide some flexibility here... if in_cfg_value.lower() in ('true', 't', '1', 'yes', 'y'): in_cfg_value = True else: in_cfg_value = False if in_cfg_value: if arg in ('install_requires', 'tests_require'): # Replaces PEP345-style version specs with the sort expected by # setuptools in_cfg_value = [_VERSION_SPEC_RE.sub(r'\1\2', pred) for pred in in_cfg_value] if arg == 'install_requires': # Split install_requires into package,env_marker tuples # These will be re-assembled later install_requires = [] requirement_pattern = '(?P<package>[^;]*);?(?P<env_marker>[^#]*?)(?:\s*#.*)?$' for requirement in in_cfg_value: m = re.match(requirement_pattern, requirement) requirement_package = m.group('package').strip() env_marker = m.group('env_marker').strip() install_requires.append((requirement_package,env_marker)) all_requirements[''] = install_requires elif arg == 'package_dir': in_cfg_value = {'': in_cfg_value} elif arg in ('package_data', 'data_files'): data_files = {} firstline = True prev = None for line in in_cfg_value: if '=' in line: key, value = line.split('=', 1) key, value = (key.strip(), value.strip()) if key in data_files: # Multiple duplicates of the same package name; # this is for backwards compatibility of the old # format prior to d2to1 0.2.6. prev = data_files[key] prev.extend(value.split()) else: prev = data_files[key.strip()] = value.split() elif firstline: raise errors.DistutilsOptionError( 'malformed package_data first line %r (misses ' '"=")' % line) else: prev.extend(line.strip().split()) firstline = False if arg == 'data_files': # the data_files value is a pointlessly different structure # from the package_data value data_files = data_files.items() in_cfg_value = data_files elif arg == 'cmdclass': cmdclass = {} dist = st_dist.Distribution() for cls_name in in_cfg_value: cls = resolve_name(cls_name) cmd = cls(dist) cmdclass[cmd.get_command_name()] = cls in_cfg_value = cmdclass kwargs[arg] = in_cfg_value # Transform requirements with embedded environment markers to # setuptools' supported marker-per-requirement format. # # install_requires are treated as a special case of extras, before # being put back in the expected place # # fred = # foo:marker # bar # -> {'fred': ['bar'], 'fred:marker':['foo']} if 'extras' in config: requirement_pattern = '(?P<package>[^:]*):?(?P<env_marker>[^#]*?)(?:\s*#.*)?$' extras = config['extras'] # Add contents of test-requirements, if any, into an extra named # 'test' if one does not already exist. if 'test' not in extras: from pbr import packaging extras['test'] = "\n".join(packaging.parse_requirements( packaging.TEST_REQUIREMENTS_FILES)).replace(';', ':') for extra in extras: extra_requirements = [] requirements = split_multiline(extras[extra]) for requirement in requirements: m = re.match(requirement_pattern, requirement) extras_value = m.group('package').strip() env_marker = m.group('env_marker') extra_requirements.append((extras_value,env_marker)) all_requirements[extra] = extra_requirements # Transform the full list of requirements into: # - install_requires, for those that have no extra and no # env_marker # - named extras, for those with an extra name (which may include # an env_marker) # - and as a special case, install_requires with an env_marker are # treated as named extras where the name is the empty string extras_require = {} for req_group in all_requirements: for requirement, env_marker in all_requirements[req_group]: if env_marker: extras_key = '%s:(%s)' % (req_group, env_marker) # We do not want to poison wheel creation with locally # evaluated markers. sdists always re-create the egg_info # and as such do not need guarded, and pip will never call # multiple setup.py commands at once. if 'bdist_wheel' not in script_args: try: if pkg_resources.evaluate_marker('(%s)' % env_marker): extras_key = req_group except SyntaxError: log.error( "Marker evaluation failed, see the following " "error. For more information see: " "http://docs.openstack.org/" "developer/pbr/compatibility.html#evaluate-marker" ) raise else: extras_key = req_group extras_require.setdefault(extras_key, []).append(requirement) kwargs['install_requires'] = extras_require.pop('', []) kwargs['extras_require'] = extras_require return kwargs
def check_modules(script=None): """Check whether mandatory modules are present. This also checks Python version when importing dependencies from setup.py :param script: The script name to be checked for dependencies :type script: str or None :return: True if all dependencies are installed :rtype: bool :raise RuntimeError: wrong Python version found in setup.py """ import pkg_resources from setup import script_deps missing_requirements = [] version_conflicts = [] if script: dependencies = script_deps.get(Path(script).name, []) else: from setup import dependencies try: next(pkg_resources.parse_requirements(dependencies)) except ValueError as e: # T286980: setuptools is too old and requirement parsing fails import setuptools setupversion = tuple(int(num) for num in setuptools.__version__.split('.')) if setupversion < (20, 8, 1): # print the minimal requirement _print_requirements( ['setuptools>=20.8.1'], None, 'outdated ({})'.format(setuptools.__version__)) return False raise e for requirement in pkg_resources.parse_requirements(dependencies): if requirement.marker is None \ or pkg_resources.evaluate_marker(str(requirement.marker)): try: pkg_resources.resource_exists(requirement, requirement.name) except pkg_resources.DistributionNotFound as e: missing_requirements.append(requirement) print(e) except pkg_resources.VersionConflict as e: version_conflicts.append(requirement) print(e) del pkg_resources del dependencies del script_deps _print_requirements(missing_requirements, script, 'missing') _print_requirements(version_conflicts, script, 'outdated') if version_conflicts and not missing_requirements: print('\nYou may continue on your own risk; type CTRL-C to stop.') try: sleep(5) except KeyboardInterrupt: return False return not missing_requirements