def setup_hook(config): """Filter config parsed from a setup.cfg to inject our defaults.""" metadata = config['metadata'] metadata['version'] = packaging.get_version(metadata['name'], metadata.get('version', None)) metadata['requires_dist'] = "\n".join(packaging.parse_requirements()) config['metadata'] = metadata config['global'] = config.get('global', dict()) config['global']['commands'] = config['global'].get('commands', "") + """ pbr.packaging.LocalSDist """ if packaging.have_sphinx(): config['global']['commands'] = config['global']['commands'] + """ pbr.packaging.LocalBuildDoc pbr.packaging.LocalBuildLatex """ pbr_config = config.get('pbr', dict()) use_egg = packaging.get_boolean_option( pbr_config, 'use-egg', 'PBR_USE_EGG') # We always want non-egg install unless explicitly requested if 'manpages' in pbr_config or not use_egg: config['global']['commands'] = config['global']['commands'] + """ pbr.packaging.DistutilsInstall """ backwards_compat = config.get('backwards_compat', dict()) backwards_compat['dependency_links'] = "\n".join( packaging.parse_dependency_links()) backwards_compat['include_package_data'] = 'True' backwards_compat['tests_require'] = "\n".join( packaging.parse_requirements( ["test-requirements.txt", "tools/test-requires"])) config['backwards_compat'] = backwards_compat files = config.get('files', dict()) package = files.get('packages', metadata['name']).strip() if os.path.isdir(package): files['packages'] = smart_find_packages(package) if 'manpages' in pbr_config: man_sections = dict() manpages = pbr_config['manpages'] data_files = files.get('data_files', '') for manpage in manpages.split(): section_number = manpage.strip()[-1] section = man_sections.get(section_number, list()) section.append(manpage.strip()) man_sections[section_number] = section for (section, pages) in man_sections.items(): manpath = os.path.join(packaging.get_manpath(), 'man%s' % section) data_files = "%s\n%s" % (data_files, "%s =" % manpath) for page in pages: data_files = "%s\n%s" % (data_files, page) files['data_files'] = data_files config['files'] = files
def setup_hook(config): """Filter config parsed from a setup.cfg to inject our defaults.""" metadata = config['metadata'] metadata['version'] = packaging.get_version(metadata['name'], metadata.get('version', None)) metadata['requires_dist'] = "\n".join(packaging.parse_requirements()) config['metadata'] = metadata config['global'] = config.get('global', dict()) config['global']['commands'] = config['global'].get('commands', "") + """ pbr.packaging.LocalSDist """ if packaging.have_sphinx(): config['global']['commands'] = config['global']['commands'] + """ pbr.packaging.LocalBuildDoc pbr.packaging.LocalBuildLatex """ #config['backwards_compat']['dependency_links'] = parse_dependency_links() #config['backwards_compat']['include_package_data'] = True #config['backwards_compat']['tests_require'] = parse_requirements( # ["test-requirements.txt", "tools/test-requires"]) files = config.get('files', dict()) files['packages'] = smart_find_packages( files.get('packages', metadata['name'])) config['files'] = files
def run(self, *args, **kwargs): print "MODIFIED RUN: %s" % self.__class__ if self.minimal: self.distribution.install_requires = parse_requirements(["minimal-requirements.txt"]) print "SELF.MINIMAL" print "install requires: %s" % (self.distribution.install_requires) install_command.run(self, *args, **kwargs)
def test_parse_requirements_keeps_versioned_ordereddict(self): self.useFixture(fixtures.MonkeyPatch('sys.version_info', (2, 6))) with open(self.tmp_file, 'w') as fh: fh.write("ordereddict==1.0.1") self.assertEqual([ "ordereddict==1.0.1"], packaging.parse_requirements([self.tmp_file]))
def test_python_version(self, mock_warn): with open("requirements-py%d.txt" % sys.version_info[0], "w") as fh: fh.write("# this is a comment\nfoobar\n# and another one\nfoobaz") self.assertEqual(['foobar', 'foobaz'], packaging.parse_requirements()) mock_warn.assert_called_once_with(mock.ANY, DeprecationWarning)
def test_parse_requirements_right_python_version(self): with open("requirements-py1.txt", "w") as fh: fh.write("thisisatrap") with open("requirements-py%d.txt" % sys.version_info[0], "w") as fh: fh.write("# this is a comment\nfoobar\n# and another one\nfoobaz") self.assertEqual(['foobar', 'foobaz'], packaging.parse_requirements())
def test_parse_requirements_override_with_env_multiple_files(self): with open(self.tmp_file, 'w') as fh: fh.write("foo\nbar") self.useFixture( fixtures.EnvironmentVariable('PBR_REQUIREMENTS_FILES', "no-such-file," + self.tmp_file)) self.assertEqual(['foo', 'bar'], packaging.parse_requirements())
def hook(self): packaging.append_text_list( self.config, 'dependency_links', packaging.parse_dependency_links()) packaging.append_text_list( self.config, 'tests_require', packaging.parse_requirements( packaging.TEST_REQUIREMENTS_FILES))
def test_override_with_env(self): """Ensure environment variable used if no files provided.""" _, tmp_file = tempfile.mkstemp(prefix='openstack', suffix='.setup') with open(tmp_file, 'w') as fh: fh.write("foo\nbar") self.useFixture( fixtures.EnvironmentVariable('PBR_REQUIREMENTS_FILES', tmp_file)) self.assertEqual(['foo', 'bar'], packaging.parse_requirements())
def test_override_with_env_multiple_files(self): _, tmp_file = tempfile.mkstemp(prefix='openstack', suffix='.setup') with open(tmp_file, 'w') as fh: fh.write("foo\nbar") self.useFixture( fixtures.EnvironmentVariable('PBR_REQUIREMENTS_FILES', "no-such-file," + tmp_file)) self.assertEqual(['foo', 'bar'], packaging.parse_requirements())
def test_index_present(self): tempdir = tempfile.mkdtemp() requirements = os.path.join(tempdir, 'requirements.txt') with open(requirements, 'w') as f: f.write('-i https://myindex.local') f.write(' --index-url https://myindex.local') f.write(' --extra-index-url https://myindex.local') result = packaging.parse_requirements([requirements]) self.assertEqual([], result)
def test_python_version_multiple_options(self, mock_warn): with open("requirements-py1.txt", "w") as fh: fh.write("thisisatrap") with open("requirements-py%d.txt" % sys.version_info[0], "w") as fh: fh.write("# this is a comment\nfoobar\n# and another one\nfoobaz") self.assertEqual(['foobar', 'foobaz'], packaging.parse_requirements()) # even though we have multiple offending files, this should only be # called once mock_warn.assert_called_once_with(mock.ANY, DeprecationWarning)
def hook(self): self.config['include_package_data'] = 'True' packaging.append_text_list( self.config, 'dependency_links', packaging.parse_dependency_links()) packaging.append_text_list( self.config, 'tests_require', packaging.parse_requirements( ["test-requirements.txt", "tools/test-requires"]))
def hook(self): self.config['include_package_data'] = 'True' packaging.append_text_list( self.config, 'dependency_links', packaging.parse_dependency_links()) packaging.append_text_list( self.config, 'tests_require', packaging.parse_requirements( packaging.TEST_REQUIREMENTS_FILES))
def test_nested_requirement(self): tempdir = tempfile.mkdtemp() requirements = os.path.join(tempdir, "requirements.txt") nested = os.path.join(tempdir, "nested.txt") with open(requirements, "w") as f: f.write("-r " + nested) with open(nested, "w") as f: f.write("pbr") result = packaging.parse_requirements([requirements]) self.assertEqual(result, ["pbr"])
def test_nested_requirements(self): tempdir = tempfile.mkdtemp() requirements = os.path.join(tempdir, 'requirements.txt') nested = os.path.join(tempdir, 'nested.txt') with open(requirements, 'w') as f: f.write('-r ' + nested) with open(nested, 'w') as f: f.write('pbr') result = packaging.parse_requirements([requirements]) self.assertEqual(['pbr'], result)
def test_nested_requirement(self): tempdir = tempfile.mkdtemp() requirements = os.path.join(tempdir, 'requirements.txt') nested = os.path.join(tempdir, 'nested.txt') with open(requirements, 'w') as f: f.write('-r ' + nested) with open(nested, 'w') as f: f.write('pbr') result = packaging.parse_requirements([requirements]) self.assertEqual(result, ['pbr'])
def test_parse_requirements(self): tmp_file = tempfile.NamedTemporaryFile() req_string = self.url if hasattr(self, 'editable') and self.editable: req_string = ("-e %s" % req_string) if hasattr(self, 'versioned') and self.versioned: req_string = ("%s-1.2.3" % req_string) with open(tmp_file.name, 'w') as fh: fh.write(req_string) self.assertEqual(self.expected, packaging.parse_requirements([tmp_file.name]))
def test_default_requirements(self): """Ensure default files used if no files provided.""" tempdir = tempfile.mkdtemp() requirements = os.path.join(tempdir, 'requirements.txt') with open(requirements, 'w') as f: f.write('pbr') # the defaults are relative to where pbr is called from so we need to # override them. This is OK, however, as we want to validate that # defaults are used - not what those defaults are with mock.patch.object(packaging, 'REQUIREMENTS_FILES', (requirements, )): result = packaging.parse_requirements() self.assertEqual(['pbr'], result)
def test_default_requirements(self): """Ensure default files used if no files provided.""" tempdir = tempfile.mkdtemp() requirements = os.path.join(tempdir, 'requirements.txt') with open(requirements, 'w') as f: f.write('pbr') # the defaults are relative to where pbr is called from so we need to # override them. This is OK, however, as we want to validate that # defaults are used - not what those defaults are with mock.patch.object(packaging, 'REQUIREMENTS_FILES', ( requirements,)): result = packaging.parse_requirements() self.assertEqual(['pbr'], result)
def test_parse_repo_url_requirements(self): result = packaging.parse_requirements([self.requirements]) self.assertEqual(['oslo.messaging>=1.0.0-rc', 'django-thumborize', 'django-thumborize-beta', 'django-thumborize2-beta', 'django-thumborize2-beta>=4.0.1', 'django-thumborize2-beta>=1.0.0-alpha.beta.1', 'django-thumborize2-beta>=1.0.0-alpha-a.b-c-somethinglong+build.1-aef.1-its-okay', # noqa 'django-thumborize2-beta>=2.0.0-rc.1+build.123', 'Proj1', 'Proj2>=0.0.1', 'Proj3', 'Proj4>=0.0.2', 'Proj5', 'Proj>=0.0.3', 'Proj', 'Proj>=0.0.4', 'Proj', 'foo-bar>=1.2.4', 'pypi-proj1', 'pypi-proj2'], result)
def _process_requirements(cls, config, field, defaults): existing_requirements_files = cls._get_requirements(config, field, defaults) top_one = existing_requirements_files[0] if existing_requirements_files else None if not top_one: return Requirements([], [], []) with cls._cd_to_file(top_one) as (dirname, filename): # # pbr doesn't guess cwd from file path requirements = packaging.parse_requirements([filename]) referenced_files = cls._find_linked_requirements_files(top_one) currdir = os.getcwd() normalized_referenced_files = [x[len(currdir) + 1:] for x in referenced_files] dependency_links = packaging.parse_dependency_links(referenced_files) return Requirements(requirements, normalized_referenced_files, dependency_links)
def find_consuming_projects(lib_name, repo_root, projects): """Filter the list of projects to only include entries that use the library. """ for p in projects: consumer = False for base in packaging.get_requirements_files(): req_file = os.path.join(repo_root, p, base) for req in packaging.parse_requirements([req_file]): try: parsed_req = pkg_resources.Requirement.parse(req) req_name = parsed_req.project_name except ValueError: continue if req_name == lib_name: consumer = True yield p break if consumer: break
def _process_requirements(cls, config, field, defaults): existing_requirements_files = cls._get_requirements( config, field, defaults) top_one = existing_requirements_files[ 0] if existing_requirements_files else None if not top_one: return Requirements([], [], []) with cls._cd_to_file(top_one) as ( dirname, filename): # # pbr doesn't guess cwd from file path requirements = packaging.parse_requirements([filename]) referenced_files = cls._find_linked_requirements_files(top_one) currdir = os.getcwd() normalized_referenced_files = [ x[len(currdir) + 1:] for x in referenced_files ] dependency_links = packaging.parse_dependency_links(referenced_files) return Requirements(requirements, normalized_referenced_files, dependency_links)
def test_parse_requirements(self): tmp_file = tempfile.NamedTemporaryFile() req_string = self.url if hasattr(self, 'name') and self.name: req_string = ("%s=%s" % (req_string, self.name)) if hasattr(self, 'editable') and self.editable: req_string = ("-e %s" % req_string) if hasattr(self, 'versioned') and self.versioned: req_string = ("%s-1.2.3" % req_string) if hasattr(self, 'versioned_1') and self.versioned_1: req_string = ("%s-1" % req_string) if hasattr(self, 'versioned_2') and self.versioned_2: req_string = ("%s-10" % req_string) if hasattr(self, 'versioned_3') and self.versioned_3: req_string = ("%s-10.2" % req_string) if hasattr(self, 'versioned_4') and self.versioned_4: req_string = ("%s-10.30.1" % req_string) if hasattr(self, 'versioned_5') and self.versioned_5: req_string = ("%s-10a1" % req_string) if hasattr(self, 'versioned_6') and self.versioned_6: req_string = ("%s-10.30.0.1alpha_release" % req_string) if hasattr(self, 'versioned_7') and self.versioned_7: req_string = ("%s-10alpha" % req_string) if hasattr(self, 'has_subdirectory') and self.has_subdirectory: req_string = ("%s&subdirectory=baz" % req_string) with open(tmp_file.name, 'w') as fh: fh.write(req_string) expected = self.expected if hasattr(self, 'name') and self.name: expected = [("%s%s" % (self.name, ex)) for ex in expected] self.assertEqual(expected, packaging.parse_requirements([tmp_file.name]))
def test_empty_requirements(self): actual = packaging.parse_requirements([]) self.assertEqual([], actual)
def test_parse_requirements_removes_argparse(self): with open(self.tmp_file, 'w') as fh: fh.write("argparse") if sys.version_info >= (2, 7): self.assertEqual([], packaging.parse_requirements([self.tmp_file]))
def test_parse_requirements_with_comments(self): with open(self.tmp_file, 'w') as fh: fh.write("# this is a comment\nfoobar\n# and another one\nfoobaz") self.assertEqual(['foobar', 'foobaz'], packaging.parse_requirements([self.tmp_file]))
def setup_cfg_to_setup_kwargs(config, script_args=()): """Processes the setup.cfg options and converts them to arguments accepted by setuptools' setup() function. """ kwargs = {} # Temporarily holds install_requires and extra_requires while we # parse env_markers. all_requirements = {} for arg in D1_D2_SETUP_ARGS: if len(D1_D2_SETUP_ARGS[arg]) == 2: # The distutils field name is different than distutils2's. section, option = D1_D2_SETUP_ARGS[arg] elif len(D1_D2_SETUP_ARGS[arg]) == 1: # The distutils field name is the same thant distutils2's. section = D1_D2_SETUP_ARGS[arg][0] option = arg in_cfg_value = has_get_option(config, section, option) if not in_cfg_value: # There is no such option in the setup.cfg if arg == "long_description": in_cfg_value = has_get_option(config, section, "description_file") if in_cfg_value: in_cfg_value = split_multiline(in_cfg_value) value = '' for filename in in_cfg_value: description_file = open(filename) try: value += description_file.read().strip() + '\n\n' finally: description_file.close() in_cfg_value = value else: continue if arg in CSV_FIELDS: in_cfg_value = split_csv(in_cfg_value) if arg in MULTI_FIELDS: in_cfg_value = split_multiline(in_cfg_value) elif arg in MAP_FIELDS: in_cfg_map = {} for i in split_multiline(in_cfg_value): k, v = i.split('=') in_cfg_map[k.strip()] = v.strip() in_cfg_value = in_cfg_map elif arg in BOOL_FIELDS: # Provide some flexibility here... if in_cfg_value.lower() in ('true', 't', '1', 'yes', 'y'): in_cfg_value = True else: in_cfg_value = False if in_cfg_value: if arg in ('install_requires', 'tests_require'): # Replaces PEP345-style version specs with the sort expected by # setuptools in_cfg_value = [_VERSION_SPEC_RE.sub(r'\1\2', pred) for pred in in_cfg_value] if arg == 'install_requires': # Split install_requires into package,env_marker tuples # These will be re-assembled later install_requires = [] requirement_pattern = '(?P<package>[^;]*);?(?P<env_marker>[^#]*?)(?:\s*#.*)?$' for requirement in in_cfg_value: m = re.match(requirement_pattern, requirement) requirement_package = m.group('package').strip() env_marker = m.group('env_marker').strip() install_requires.append((requirement_package,env_marker)) all_requirements[''] = install_requires elif arg == 'package_dir': in_cfg_value = {'': in_cfg_value} elif arg in ('package_data', 'data_files'): data_files = {} firstline = True prev = None for line in in_cfg_value: if '=' in line: key, value = line.split('=', 1) key, value = (key.strip(), value.strip()) if key in data_files: # Multiple duplicates of the same package name; # this is for backwards compatibility of the old # format prior to d2to1 0.2.6. prev = data_files[key] prev.extend(value.split()) else: prev = data_files[key.strip()] = value.split() elif firstline: raise errors.DistutilsOptionError( 'malformed package_data first line %r (misses ' '"=")' % line) else: prev.extend(line.strip().split()) firstline = False if arg == 'data_files': # the data_files value is a pointlessly different structure # from the package_data value data_files = data_files.items() in_cfg_value = data_files elif arg == 'cmdclass': cmdclass = {} dist = st_dist.Distribution() for cls_name in in_cfg_value: cls = resolve_name(cls_name) cmd = cls(dist) cmdclass[cmd.get_command_name()] = cls in_cfg_value = cmdclass kwargs[arg] = in_cfg_value # Transform requirements with embedded environment markers to # setuptools' supported marker-per-requirement format. # # install_requires are treated as a special case of extras, before # being put back in the expected place # # fred = # foo:marker # bar # -> {'fred': ['bar'], 'fred:marker':['foo']} if 'extras' in config: requirement_pattern = '(?P<package>[^:]*):?(?P<env_marker>[^#]*?)(?:\s*#.*)?$' extras = config['extras'] # Add contents of test-requirements, if any, into an extra named # 'test' if one does not already exist. if 'test' not in extras: from pbr import packaging extras['test'] = "\n".join(packaging.parse_requirements( packaging.TEST_REQUIREMENTS_FILES)).replace(';', ':') for extra in extras: extra_requirements = [] requirements = split_multiline(extras[extra]) for requirement in requirements: m = re.match(requirement_pattern, requirement) extras_value = m.group('package').strip() env_marker = m.group('env_marker') extra_requirements.append((extras_value,env_marker)) all_requirements[extra] = extra_requirements # Transform the full list of requirements into: # - install_requires, for those that have no extra and no # env_marker # - named extras, for those with an extra name (which may include # an env_marker) # - and as a special case, install_requires with an env_marker are # treated as named extras where the name is the empty string extras_require = {} for req_group in all_requirements: for requirement, env_marker in all_requirements[req_group]: if env_marker: extras_key = '%s:(%s)' % (req_group, env_marker) # We do not want to poison wheel creation with locally # evaluated markers. sdists always re-create the egg_info # and as such do not need guarded, and pip will never call # multiple setup.py commands at once. if 'bdist_wheel' not in script_args: try: if pkg_resources.evaluate_marker('(%s)' % env_marker): extras_key = req_group except SyntaxError: log.error( "Marker evaluation failed, see the following " "error. For more information see: " "http://docs.openstack.org/" "developer/pbr/compatibility.html#evaluate-marker" ) raise else: extras_key = req_group extras_require.setdefault(extras_key, []).append(requirement) kwargs['install_requires'] = extras_require.pop('', []) kwargs['extras_require'] = extras_require return kwargs
def test_parse_repo_url_requirements(self): result = packaging.parse_requirements([self.requirements]) self.assertEqual(result, ['Proj1', 'Proj2>=0.0.1', 'Proj3', 'Proj4>=0.0.2', 'Proj5', 'Proj>=0.0.3', 'Proj', 'Proj>=0.0.4', 'Proj', 'pypi-proj1', 'pypi-proj2'])
def test_parse_requirements_removes_index_lines(self): with open(self.tmp_file, 'w') as fh: fh.write("-f foobar") self.assertEqual([], packaging.parse_requirements([self.tmp_file]))
def test_parse_requirements_normal(self): with open(self.tmp_file, 'w') as fh: fh.write("foo\nbar") self.assertEqual(['foo', 'bar'], packaging.parse_requirements([self.tmp_file]))
], 'oslo.config.opts': [ 'nectar_osc = nectar_osc.config:list_opts', ], } setup( name='nectar-osc', version='0.3.0', description=('OpenStack client plugin for misc Nectar tooling'), author='Adrian Smith', author_email='*****@*****.**', url='https://github.com/NeCTAR-RC/nectar-osc', packages=[ 'nectar_osc', ], include_package_data=True, setup_requires=['pbr>=3.0.0'], install_requires=parse_requirements(), license="Apache", zip_safe=False, classifiers=( 'License :: OSI Approved :: Apache Software License', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Operating System :: OS Independent', ), entry_points=entry_points, )
def test_parse_requirements_keeps_versioned_ordereddict(self): self.useFixture(fixtures.MonkeyPatch('sys.version_info', (2, 6))) with open(self.tmp_file, 'w') as fh: fh.write("ordereddict==1.0.1") self.assertEqual(["ordereddict==1.0.1"], packaging.parse_requirements([self.tmp_file]))
def hook(self): self.config['version'] = packaging.get_version( self.config['name'], self.config.get('version', None)) packaging.append_text_list( self.config, 'requires_dist', packaging.parse_requirements())
def test_parse_requirements_with_versioned_http_egg_url(self): with open(self.tmp_file, 'w') as fh: fh.write("https://foo.com/zipball#egg=bar-4.2.1") self.assertEqual(['bar>=4.2.1'], packaging.parse_requirements([self.tmp_file]))
def setup_cfg_to_setup_kwargs(config, script_args=()): """Processes the setup.cfg options and converts them to arguments accepted by setuptools' setup() function. """ kwargs = {} # Temporarily holds install_requires and extra_requires while we # parse env_markers. all_requirements = {} for arg in D1_D2_SETUP_ARGS: if len(D1_D2_SETUP_ARGS[arg]) == 2: # The distutils field name is different than distutils2's. section, option = D1_D2_SETUP_ARGS[arg] elif len(D1_D2_SETUP_ARGS[arg]) == 1: # The distutils field name is the same thant distutils2's. section = D1_D2_SETUP_ARGS[arg][0] option = arg in_cfg_value = has_get_option(config, section, option) if not in_cfg_value: # There is no such option in the setup.cfg if arg == "long_description": in_cfg_value = has_get_option(config, section, "description_file") if in_cfg_value: in_cfg_value = split_multiline(in_cfg_value) value = '' for filename in in_cfg_value: description_file = open(filename) try: value += description_file.read().strip() + '\n\n' finally: description_file.close() in_cfg_value = value else: continue if arg in CSV_FIELDS: in_cfg_value = split_csv(in_cfg_value) if arg in MULTI_FIELDS: in_cfg_value = split_multiline(in_cfg_value) elif arg in MAP_FIELDS: in_cfg_map = {} for i in split_multiline(in_cfg_value): k, v = i.split('=') in_cfg_map[k.strip()] = v.strip() in_cfg_value = in_cfg_map elif arg in BOOL_FIELDS: # Provide some flexibility here... if in_cfg_value.lower() in ('true', 't', '1', 'yes', 'y'): in_cfg_value = True else: in_cfg_value = False if in_cfg_value: if arg in ('install_requires', 'tests_require'): # Replaces PEP345-style version specs with the sort expected by # setuptools in_cfg_value = [ _VERSION_SPEC_RE.sub(r'\1\2', pred) for pred in in_cfg_value ] if arg == 'install_requires': # Split install_requires into package,env_marker tuples # These will be re-assembled later install_requires = [] requirement_pattern = '(?P<package>[^;]*);?(?P<env_marker>[^#]*?)(?:\s*#.*)?$' for requirement in in_cfg_value: m = re.match(requirement_pattern, requirement) requirement_package = m.group('package').strip() env_marker = m.group('env_marker').strip() install_requires.append((requirement_package, env_marker)) all_requirements[''] = install_requires elif arg == 'package_dir': in_cfg_value = {'': in_cfg_value} elif arg in ('package_data', 'data_files'): data_files = {} firstline = True prev = None for line in in_cfg_value: if '=' in line: key, value = line.split('=', 1) key, value = (key.strip(), value.strip()) if key in data_files: # Multiple duplicates of the same package name; # this is for backwards compatibility of the old # format prior to d2to1 0.2.6. prev = data_files[key] prev.extend(value.split()) else: prev = data_files[key.strip()] = value.split() elif firstline: raise errors.DistutilsOptionError( 'malformed package_data first line %r (misses ' '"=")' % line) else: prev.extend(line.strip().split()) firstline = False if arg == 'data_files': # the data_files value is a pointlessly different structure # from the package_data value data_files = data_files.items() in_cfg_value = data_files elif arg == 'cmdclass': cmdclass = {} dist = st_dist.Distribution() for cls_name in in_cfg_value: cls = resolve_name(cls_name) cmd = cls(dist) cmdclass[cmd.get_command_name()] = cls in_cfg_value = cmdclass kwargs[arg] = in_cfg_value # Transform requirements with embedded environment markers to # setuptools' supported marker-per-requirement format. # # install_requires are treated as a special case of extras, before # being put back in the expected place # # fred = # foo:marker # bar # -> {'fred': ['bar'], 'fred:marker':['foo']} if 'extras' in config: requirement_pattern = '(?P<package>[^:]*):?(?P<env_marker>[^#]*?)(?:\s*#.*)?$' extras = config['extras'] # Add contents of test-requirements, if any, into an extra named # 'test' if one does not already exist. if 'test' not in extras: from pbr import packaging extras['test'] = "\n".join( packaging.parse_requirements( packaging.TEST_REQUIREMENTS_FILES)).replace(';', ':') for extra in extras: extra_requirements = [] requirements = split_multiline(extras[extra]) for requirement in requirements: m = re.match(requirement_pattern, requirement) extras_value = m.group('package').strip() env_marker = m.group('env_marker') extra_requirements.append((extras_value, env_marker)) all_requirements[extra] = extra_requirements # Transform the full list of requirements into: # - install_requires, for those that have no extra and no # env_marker # - named extras, for those with an extra name (which may include # an env_marker) # - and as a special case, install_requires with an env_marker are # treated as named extras where the name is the empty string extras_require = {} for req_group in all_requirements: for requirement, env_marker in all_requirements[req_group]: if env_marker: extras_key = '%s:(%s)' % (req_group, env_marker) # We do not want to poison wheel creation with locally # evaluated markers. sdists always re-create the egg_info # and as such do not need guarded, and pip will never call # multiple setup.py commands at once. if 'bdist_wheel' not in script_args: try: if pkg_resources.evaluate_marker('(%s)' % env_marker): extras_key = req_group except SyntaxError: log.error( "Marker evaluation failed, see the following " "error. For more information see: " "http://docs.openstack.org/" "developer/pbr/compatibility.html#evaluate-marker") raise else: extras_key = req_group extras_require.setdefault(extras_key, []).append(requirement) kwargs['install_requires'] = extras_require.pop('', []) kwargs['extras_require'] = extras_require return kwargs
def test_parse_requirements_with_versioned_git_egg_url(self): with open(self.tmp_file, 'w') as fh: fh.write("-e git://foo.com/zipball#egg=bar-1.2.4") self.assertEqual(['bar>=1.2.4'], packaging.parse_requirements([self.tmp_file]))
def test_parse_requirements_python_version(self): with open("requirements-py%d.txt" % sys.version_info[0], "w") as fh: fh.write("# this is a comment\nfoobar\n# and another one\nfoobaz") self.assertEqual(['foobar', 'foobaz'], packaging.parse_requirements())
def test_parse_requirements_override_with_env(self): with open(self.tmp_file, "w") as fh: fh.write("foo\nbar") self.useFixture(fixtures.EnvironmentVariable("PBR_REQUIREMENTS_FILES", self.tmp_file)) self.assertEqual(["foo", "bar"], packaging.parse_requirements())