def testSplitting(self): sample = """ x [Y] z a [b ] # foo c [ d] [q] v """ assert ( list(pkg_resources.split_sections(sample)) == [ (None, ["x"]), ("Y", ["z", "a"]), ("b", ["c"]), ("d", []), ("q", ["v"]), ] ) with pytest.raises(ValueError): list(pkg_resources.split_sections("[foo"))
def parse(self, stream): "Collects information file from a configuration file." for section, lines in split_sections(stream): if section: parse = getattr(self, "parse_"+section) for line in lines: parse(*line.split("=", 1)) if self.position and self.homes and not self.ownership: # Base the default starting ownership on the starting position. # This is more often the case than starting with all home centers. homes = set(p for power in self.homes for p in self.homes[power]) for name in self.position: owned = [] for unit in self.position[name]: province = unit.split()[1] if province in homes: homes.remove(province) owned.append(province) owned.sort() self.ownership[name] = owned self.ownership["UNO"] = list(sorted(homes)) if self.base: base = variants[self.base] if not self.start: self.start = base.start if not self.powers: self.powers = base.powers if not self.provinces: self.provinces = base.provinces if not self.homes: self.homes = base.homes if not self.ownership: self.ownership = base.ownership if not self.position: self.position = base.position if not self.borders: self.borders = base.borders self.rep = self.tokens()
def pkginfo_to_metadata(egg_info_path, pkginfo_path): """ Convert .egg-info directory with PKG-INFO to the Metadata 2.1 format """ pkg_info = read_pkg_info(pkginfo_path) pkg_info.replace_header('Metadata-Version', '2.1') # Those will be regenerated from `requires.txt`. del pkg_info['Provides-Extra'] del pkg_info['Requires-Dist'] requires_path = os.path.join(egg_info_path, 'requires.txt') if os.path.exists(requires_path): with open(requires_path) as requires_file: requires = requires_file.read() parsed_requirements = sorted(pkg_resources.split_sections(requires), key=lambda x: x[0] or '') for extra, reqs in parsed_requirements: for key, value in generate_requirements({extra: reqs}): if (key, value) not in pkg_info.items(): pkg_info[key] = value description = pkg_info['Description'] if description: pkg_info.set_payload(dedent_description(pkg_info)) del pkg_info['Description'] return pkg_info
def get_deb_depends_from_setuptools_requires(requirements): depends = [] # This will be the return value from this function. parsed_reqs=[] for extra,reqs in pkg_resources.split_sections(requirements): if extra: continue parsed_reqs.extend(pkg_resources.parse_requirements(reqs)) if not parsed_reqs: return depends if not os.path.exists('/usr/bin/apt-file'): raise ValueError('apt-file not in /usr/bin. Please install ' 'with: sudo apt-get install apt-file') # Ask apt-file for any packages which have a .egg-info file by # these names. # Note that apt-file appears to think that some packages # e.g. setuptools itself have "foo.egg-info/BLAH" files but not a # "foo.egg-info" directory. egginfore=("(/(%s)(?:-[^/]+)?(?:-py[0-9]\.[0-9.]+)?\.egg-info)" % '|'.join(req.project_name for req in parsed_reqs)) args = ["apt-file", "search", "--ignore-case", "--regexp", egginfore] try: cmd = subprocess.Popen(args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, universal_newlines=True) except Exception, le: log.error('ERROR running: %s', ' '.join(args)) raise RuntimeError('exception %s from subprocess %s' % (le,args))
def pkginfo_to_metadata(egg_info_path, pkginfo_path): """ Convert .egg-info directory with PKG-INFO to the Metadata 1.3 aka old-draft Metadata 2.0 format. """ pkg_info = read_pkg_info(pkginfo_path) pkg_info.replace_header('Metadata-Version', '2.0') requires_path = os.path.join(egg_info_path, 'requires.txt') if os.path.exists(requires_path): requires = open(requires_path).read() for extra, reqs in pkg_resources.split_sections(requires): condition = '' if extra and ':' in extra: # setuptools extra:condition syntax extra, condition = extra.split(':', 1) if extra: pkg_info['Provides-Extra'] = extra if condition: condition += " and " condition += 'extra == %s' % repr(extra) if condition: condition = '; ' + condition for new_req in convert_requirements(reqs): pkg_info['Requires-Dist'] = new_req + condition description = pkg_info['Description'] if description: pkg_info.set_payload(dedent_description(pkg_info)) del pkg_info['Description'] return pkg_info
def get_deb_depends_from_setuptools_requires(requirements, on_failure="warn"): """ Suppose you can't confidently figure out a .deb which satisfies a given requirement. If on_failure == 'warn', then log a warning. If on_failure == 'raise' then raise CantSatisfyRequirement exception. If on_failure == 'guess' then guess that python-$FOO will satisfy the dependency and that the Python version numbers will apply to the Debian packages (in addition to logging a warning message). """ assert on_failure in ("raise", "warn", "guess"), on_failure import pkg_resources depends = [] # This will be the return value from this function. parsed_reqs=[] for extra,reqs in pkg_resources.split_sections(requirements): if extra: continue parsed_reqs.extend(pkg_resources.parse_requirements(reqs)) if not parsed_reqs: return depends if not os.path.exists('/usr/bin/apt-file'): raise ValueError('apt-file not in /usr/bin. Please install ' 'with: sudo apt-get install apt-file') # Ask apt-file for any packages which have a .egg-info file by # these names. # Note that apt-file appears to think that some packages # e.g. setuptools itself have "foo.egg-info/BLAH" files but not a # "foo.egg-info" directory. egginfore=("(/(%s)(?:-[^/]+)?(?:-py[0-9]\.[0-9.]+)?\.egg-info)" % '|'.join(req.project_name.replace('-', '_') for req in parsed_reqs)) args = ["apt-file", "search", "--ignore-case", "--regexp", egginfore] if 1: # do dry run on apt-file dry_run_args = args[:] + ['--dummy','--non-interactive'] cmd = subprocess.Popen(dry_run_args,stderr=subprocess.PIPE) returncode = cmd.wait() if returncode: err_output = cmd.stderr.read() raise RuntimeError('Error running "apt-file search": ' + err_output.strip()) try: cmd = subprocess.Popen(args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, universal_newlines=True) except Exception, le: # TODO: catch rc=1 and "E: The cache directory is empty. You need to # run 'apt-file update' first.", and tell the user to follow those # instructions. log.error('ERROR running: %s', ' '.join(args)) raise RuntimeError('exception %s from subprocess %s' % (le,args))
def pkginfo_to_metadata(egg_info_path, pkginfo_path): """ Convert .egg-info directory with PKG-INFO to the Metadata 1.3 aka old-draft Metadata 2.0 format. """ pkg_info = read_pkg_info(pkginfo_path) pkg_info.replace_header('Metadata-Version', '2.0') requires_path = os.path.join(egg_info_path, 'requires.txt') if os.path.exists(requires_path): requires = open(requires_path).read() for extra, reqs in sorted(pkg_resources.split_sections(requires), key=lambda x: x[0] or ''): condition = '' if extra and ':' in extra: # setuptools extra:condition syntax extra, condition = extra.split(':', 1) if extra: pkg_info['Provides-Extra'] = extra if condition: condition += " and " condition += 'extra == %s' % repr(extra) if condition: condition = '; ' + condition for new_req in sorted(convert_requirements(reqs)): pkg_info['Requires-Dist'] = new_req + condition description = pkg_info['Description'] if description: pkg_info.set_payload(dedent_description(pkg_info)) del pkg_info['Description'] return pkg_info
def _collect(self): pkg_infos = list(self.path.glob("**/PKG-INFO")) if not pkg_infos: log.warning("%r has no PKG-INFO", self) return pkg_info = pkg_infos[0] # parse metadata parser = FeedParser() parser.feed(pkg_info.open().read()) message = parser.close() for key, value in message.items(): value = value.strip() if not value or value == "UNKNOWN": continue key = key.lower().replace("-", "_") if key in self.MULTI_KEYS: self.metadata.setdefault(key, set()).add(value) else: self.metadata[key] = value # parse requires requires_path = pkg_info.parent / "requires.txt" if requires_path.exists(): requires = requires_path.open().read() for extra, reqs in sorted(pkg_resources.split_sections(requires), key=lambda x: x[0] or ""): if extra is None: extra = "run" for req in reqs: self.add_requirement(req, extra)
def parse(self, stream): "Collects information file from a configuration file." for section, lines in split_sections(stream): if section: parse = getattr(self, "parse_" + section) for line in lines: parse(*line.split("=", 1)) if self.position and self.homes and not self.ownership: # Base the default starting ownership on the starting position. # This is more often the case than starting with all home centers. homes = set(p for power in self.homes for p in self.homes[power]) for name in self.position: owned = [] for unit in self.position[name]: province = unit.split()[1] if province in homes: homes.remove(province) owned.append(province) owned.sort() self.ownership[name] = owned self.ownership["UNO"] = list(sorted(homes)) if self.base: base = variants[self.base] if not self.start: self.start = base.start if not self.powers: self.powers = base.powers if not self.provinces: self.provinces = base.provinces if not self.homes: self.homes = base.homes if not self.ownership: self.ownership = base.ownership if not self.position: self.position = base.position if not self.borders: self.borders = base.borders self.rep = self.tokens()
def dep_map(requires_txt): """Parse dependency map. From setuptools.""" dm = collections.OrderedDict({None: []}) for extra, reqs in split_sections(requires_txt): if extra: extra = safe_extra(extra) dm.setdefault(extra, []).extend(parse_requirements(reqs)) return dm
def is_product_distribution(distribution): entry_meta = 'entry_points.txt' if distribution.has_metadata(entry_meta): inifile = distribution.get_metadata(entry_meta) sections = pkg_resources.split_sections(inifile) for section, content in sections: if section == entrypoint_group: return True
def testSplitting(self): sample = """ x [Y] z a [b ] # foo c [ d] [q] v """ self.assertEqual(list(pkg_resources.split_sections(sample)), [(None,["x"]), ("Y",["z","a"]), ("b",["c"]), ("d",[]), ("q",["v"])] ) self.assertRaises(ValueError,list,pkg_resources.split_sections("[foo"))
def test_requirement_parsing(self): tempdir = self.useFixture(fixtures.TempDir()).path requirements = os.path.join(tempdir, 'requirements.txt') with open(requirements, 'wt') as f: f.write(textwrap.dedent(six.u("""\ bar quux<1.0; python_version=='2.6' requests-aws>=0.1.4 # BSD License (3 clause) Routes>=1.12.3,!=2.0,!=2.1;python_version=='2.7' requests-kerberos>=0.6;python_version=='2.7' # MIT """))) setup_cfg = os.path.join(tempdir, 'setup.cfg') with open(setup_cfg, 'wt') as f: f.write(textwrap.dedent(six.u("""\ [metadata] name = test_reqparse [extras] test = foo baz>3.2 :python_version=='2.7' # MIT bar>3.3 :python_version=='2.7' # MIT # Apache """))) # pkg_resources.split_sections uses None as the title of an # anonymous section instead of the empty string. Weird. expected_requirements = { None: ['bar', 'requests-aws>=0.1.4'], ":(python_version=='2.6')": ['quux<1.0'], ":(python_version=='2.7')": ['Routes>=1.12.3,!=2.0,!=2.1', 'requests-kerberos>=0.6'], 'test': ['foo'], "test:(python_version=='2.7')": ['baz>3.2', 'bar>3.3'] } setup_py = os.path.join(tempdir, 'setup.py') with open(setup_py, 'wt') as f: f.write(textwrap.dedent(six.u("""\ #!/usr/bin/env python import setuptools setuptools.setup( setup_requires=['pbr'], pbr=True, ) """))) self._run_cmd(sys.executable, (setup_py, 'egg_info'), allow_fail=False, cwd=tempdir) egg_info = os.path.join(tempdir, 'test_reqparse.egg-info') requires_txt = os.path.join(egg_info, 'requires.txt') with open(requires_txt, 'rt') as requires: generated_requirements = dict( pkg_resources.split_sections(requires)) self.assertEqual(expected_requirements, generated_requirements)
def requirements_from_requires_file(requires_path): """Create a sanitized copy of `requires.txt`.""" # requires.txt can contain [extras_require] sections wich pip doesn't understand with open(requires_path, "r") as requires: section_name, extracted_requirements = next( pkg_resources.split_sections(requires)) if section_name is None: return extracted_requirements # no dependencies to install return None
def test_requirement_parsing(self): pkgs = { 'test_reqparse': { 'requirements.txt': textwrap.dedent("""\ bar quux<1.0; python_version=='2.6' requests-aws>=0.1.4 # BSD License (3 clause) Routes>=1.12.3,!=2.0,!=2.1;python_version=='2.7' requests-kerberos>=0.6;python_version=='2.7' # MIT """), 'setup.cfg': textwrap.dedent("""\ [metadata] name = test_reqparse [extras] test = foo baz>3.2 :python_version=='2.7' # MIT bar>3.3 :python_version=='2.7' # MIT # Apache """) }, } pkg_dirs = self.useFixture(CreatePackages(pkgs)).package_dirs pkg_dir = pkg_dirs['test_reqparse'] # pkg_resources.split_sections uses None as the title of an # anonymous section instead of the empty string. Weird. expected_requirements = { None: ['bar', 'requests-aws>=0.1.4'], ":(python_version=='2.6')": ['quux<1.0'], ":(python_version=='2.7')": ['Routes>=1.12.3,!=2.0,!=2.1', 'requests-kerberos>=0.6'], 'test': ['foo'], "test:(python_version=='2.7')": ['baz>3.2', 'bar>3.3'] } venv = self.useFixture(Venv('reqParse')) bin_python = venv.python # Two things are tested by this # 1) pbr properly parses markers from requiremnts.txt and setup.cfg # 2) bdist_wheel causes pbr to not evaluate markers self._run_cmd(bin_python, ('setup.py', 'bdist_wheel'), allow_fail=False, cwd=pkg_dir) egg_info = os.path.join(pkg_dir, 'test_reqparse.egg-info') requires_txt = os.path.join(egg_info, 'requires.txt') with open(requires_txt, 'rt') as requires: generated_requirements = dict( pkg_resources.split_sections(requires)) self.assertEqual(expected_requirements, generated_requirements)
def entry_points_from_dist(dist): if hasattr(dist, "entry_points"): from pkg_resources import split_sections if isinstance(dist.entry_points, basestring): entry_points = {} sections = split_sections(dist.entry_points) for group, lines in sections: group = group.strip() entry_points[group] = lines else: entry_points = dist.entry_points else: entry_points = {} return entry_points
def test_requirement_parsing(self): pkgs = { "test_reqparse": { "requirements.txt": textwrap.dedent( """\ bar quux<1.0; python_version=='2.6' requests-aws>=0.1.4 # BSD License (3 clause) Routes>=1.12.3,!=2.0,!=2.1;python_version=='2.7' requests-kerberos>=0.6;python_version=='2.7' # MIT """ ), "setup.cfg": textwrap.dedent( """\ [metadata] name = test_reqparse [extras] test = foo baz>3.2 :python_version=='2.7' # MIT bar>3.3 :python_version=='2.7' # MIT # Apache """ ), } } pkg_dirs = self.useFixture(CreatePackages(pkgs)).package_dirs pkg_dir = pkg_dirs["test_reqparse"] # pkg_resources.split_sections uses None as the title of an # anonymous section instead of the empty string. Weird. expected_requirements = { None: ["bar", "requests-aws>=0.1.4"], ":(python_version=='2.6')": ["quux<1.0"], ":(python_version=='2.7')": ["Routes>=1.12.3,!=2.0,!=2.1", "requests-kerberos>=0.6"], "test": ["foo"], "test:(python_version=='2.7')": ["baz>3.2", "bar>3.3"], } venv = self.useFixture(Venv("reqParse")) bin_python = venv.python # Two things are tested by this # 1) pbr properly parses markers from requiremnts.txt and setup.cfg # 2) bdist_wheel causes pbr to not evaluate markers self._run_cmd(bin_python, ("setup.py", "bdist_wheel"), allow_fail=False, cwd=pkg_dir) egg_info = os.path.join(pkg_dir, "test_reqparse.egg-info") requires_txt = os.path.join(egg_info, "requires.txt") with open(requires_txt, "rt") as requires: generated_requirements = dict(pkg_resources.split_sections(requires)) self.assertEqual(expected_requirements, generated_requirements)
def entry_points_from_dist(dist): if hasattr(dist, "entry_points"): from pkg_resources import split_sections if is_string(dist.entry_points): entry_points = {} sections = split_sections(dist.entry_points) for group, lines in sections: group = group.strip() entry_points[group] = lines else: entry_points = dist.entry_points else: entry_points = {} return entry_points
def _convert_metadata(distr): import pkg_resources as pkg_r meta = {} if distr.has_metadata("PKG-INFO"): s = distr.get_metadata("PKG-INFO") s = _repair_pkg_info(s) sections = pkg_r.split_sections(s) for section in sections: entries = section[1] for e in entries: if _g_pkginfo_key_re.match(e): toks = e.split(':', 1) k = toks[0].strip() v = toks[1].strip() meta[k] = v return meta
def parse_requirements(egg_info_directory): ''' Get required and optional requirements from egg-info directory Returns ------- {extra :: str or None : [pkg_resources.Requirement]} `extra` is the name of the group of optional dependencies or ``None`` if they are required. Like, ``setup(extras_require=...)`` with ``extras_require[None]=required_dependencies`. ''' all_requirements = defaultdict(list) for name in 'requires.txt', 'depends.txt': dependencies_file = egg_info_directory / name if dependencies_file.exists(): for extra, requirements in pkg_resources.split_sections( dependencies_file.read_text().splitlines()): all_requirements[extra].extend( pkg_resources.parse_requirements(requirements)) return all_requirements
def pkginfo_to_metadata(egg_info_path, pkginfo_path): """ Convert .egg-info directory with PKG-INFO to the Metadata 2.1 format """ pkg_info = read_pkg_info(pkginfo_path) pkg_info.replace_header('Metadata-Version', '2.1') requires_path = os.path.join(egg_info_path, 'requires.txt') if os.path.exists(requires_path): with open(requires_path) as requires_file: requires = requires_file.read() for extra, reqs in sorted(pkg_resources.split_sections(requires), key=lambda x: x[0] or ''): for item in generate_requirements({extra: reqs}): pkg_info[item[0]] = item[1] description = pkg_info['Description'] if description: pkg_info.set_payload(dedent_description(pkg_info)) del pkg_info['Description'] return pkg_info
def pkginfo_to_metadata(egg_info_path: str, pkginfo_path: str) -> Message: """ Convert .egg-info directory with PKG-INFO to the Metadata 2.1 format """ with open(pkginfo_path, encoding="utf-8") as headers: pkg_info = Parser().parse(headers) pkg_info.replace_header("Metadata-Version", "2.1") # Those will be regenerated from `requires.txt`. del pkg_info["Provides-Extra"] del pkg_info["Requires-Dist"] requires_path = os.path.join(egg_info_path, "requires.txt") if os.path.exists(requires_path): with open(requires_path) as requires_file: requires = requires_file.read() parsed_requirements = sorted(split_sections(requires), key=lambda x: x[0] or "") for extra, reqs in parsed_requirements: for key, value in generate_requirements({extra: reqs}): if (key, value) not in pkg_info.items(): pkg_info[key] = value description = pkg_info["Description"] if description: description_lines = pkg_info["Description"].splitlines() dedented_description = "\n".join( # if the first line of long_description is blank, # the first line here will be indented. ( description_lines[0].lstrip(), textwrap.dedent("\n".join(description_lines[1:])), "\n", ) ) pkg_info.set_payload(dedented_description) del pkg_info["Description"] return pkg_info
def pkginfo_to_metadata(egg_info_path, pkginfo_path): """ Convert .egg-info directory with PKG-INFO to the Metadata 1.3 aka old-draft Metadata 2.0 format. """ pkg_info = read_pkg_info(pkginfo_path) pkg_info.replace_header('Metadata-Version', '2.0') requires_path = os.path.join(egg_info_path, 'requires.txt') if os.path.exists(requires_path): requires = open(requires_path).read() for extra, reqs in pkg_resources.split_sections(requires): condition = '' if extra: pkg_info['Provides-Extra'] = extra condition = '; extra == %s' % repr(extra) for new_req in convert_requirements(reqs): pkg_info['Requires-Dist'] = new_req + condition description = pkg_info['Description'] if description: pkg_info.set_payload(dedent_description(pkg_info)) del pkg_info['Description'] return pkg_info
def _run_setup_file(filename): """ Transform and Run AST of the setup file """ try: import pkg_resources except ImportError: # pkg_resources install may be in progress pkg_resources = None namespace = { '_setup_stub_': _setup_stub_, '__file__': filename, '__name__': '__main__', 'setup_args': None, 'setup_kwargs': None, } source_folder = os.path.dirname(filename) tree = ArchiveFileInstaller._get_cooked_ast(filename) codeobj = compile(tree, filename, 'exec') # Some setup files import the package to be installed and sometimes opens a file # in the source folder. So we modify sys.path and change directory into source folder. saved_cwd = os.getcwd() saved_sys_path = sys.path[:] os.chdir(source_folder) sys.path.insert(0, source_folder) try: exec(codeobj, namespace, namespace) finally: os.chdir(saved_cwd) sys.path = saved_sys_path args, kwargs = sys.modules['setuptools']._setup_params_ # for k in sorted(kwargs.keys()): print('{}: {!r}'.format(k, kwargs[k])) if 'ext_modules' in kwargs: print('WARNING: Extension modules and skipped: {}'.format( kwargs['ext_modules'])) packages = kwargs['packages'] if 'packages' in kwargs else [] py_modules = kwargs['py_modules'] if 'py_modules' in kwargs else [] if not packages and not py_modules: raise PipError( 'failed to find packages or py_modules arguments in setup call' ) package_dirs = kwargs.get('package_dir', {}) use_2to3 = kwargs.get('use_2to3', False) and six.PY3 files_installed = [] # handle scripts # we handle them before the packages because they may be moved # while handling the packages scripts = kwargs.get("scripts", []) for script in scripts: print("Handling commandline script: {s}".format(s=script)) cmdname = script.replace(os.path.dirname(script), "").replace("/", "") if not "." in cmdname: cmdname += ".py" scriptpath = os.path.join(source_folder, script) with open(scriptpath, "r") as fin: content = fin.read() cmdpath = create_command(cmdname, content) files_installed.append(cmdpath) packages = ArchiveFileInstaller._consolidated_packages(packages) for p in sorted(packages): # folders or files under source root if p == '': # no packages just files from_folder = os.path.join(source_folder, package_dirs.get(p, '')) for f in ArchiveFileInstaller._find_package_files(from_folder): target_file = os.path.join(SITE_PACKAGES_FOLDER, f) ArchiveFileInstaller._safe_move( os.path.join(from_folder, f), target_file) files_installed.append(target_file) if use_2to3: _stash('2to3 -w {} > /dev/null'.format(target_file)) else: # packages target_dir = os.path.join(SITE_PACKAGES_FOLDER, p) if p in package_dirs: ArchiveFileInstaller._safe_move( os.path.join(source_folder, package_dirs[p]), target_dir) elif '' in package_dirs: ArchiveFileInstaller._safe_move( os.path.join(source_folder, package_dirs[''], p), target_dir) else: ArchiveFileInstaller._safe_move( os.path.join(source_folder, p), target_dir) files_installed.append(target_dir) if use_2to3: _stash( """find {} --name '.py' | xargs -n 1 -I %% 2to3 -w %% > /dev/null""" .format(target_dir)) py_modules = ArchiveFileInstaller._consolidated_packages(py_modules) for p in sorted( py_modules ): # files or folders where the file resides, e.g. ['file', 'folder.file'] if '' in package_dirs: p = os.path.join(package_dirs[''], p) if os.path.isdir(os.path.join(source_folder, p)): # folder target_dir = os.path.join(SITE_PACKAGES_FOLDER, p) ArchiveFileInstaller._safe_move(os.path.join(source_folder, p), target_dir) files_installed.append(target_dir) if use_2to3: _stash( """find {} --name '.py' | xargs -n 1 -I %% 2to3 -w %% > /dev/null""" .format(target_dir)) else: # file target_file = os.path.join(SITE_PACKAGES_FOLDER, p + '.py') ArchiveFileInstaller._safe_move( os.path.join(source_folder, p + '.py'), target_file) files_installed.append(target_file) if use_2to3: _stash('2to3 -w {} > /dev/null'.format(target_file)) # handle entry points entry_points = kwargs.get("entry_points", {}) if isinstance(entry_points, (str, unicode)): if pkg_resources is not None: entry_points = { s: c for s, c in pkg_resources.split_sections(entry_points) } else: print( "Warning: pkg_resources not available, skipping entry_points definitions." ) entry_points = {} for epn in entry_points: ep = entry_points[epn] if isinstance(ep, (str, unicode)): ep = [ep] if epn == "console_scripts": for dec in ep: name, loc = dec.replace(" ", "").split("=") modname, funcname = loc.split(":") if not name.endswith(".py"): name += ".py" desc = kwargs.get("description", "") path = create_command( name, """'''{d}''' from {m} import {n} if __name__ == "__main__": {n}() """.format( m=modname, n=funcname, d=desc, ), ) files_installed.append(path) else: print("Warning: passing entry points for '{n}'.".format(n=epn)) # Recursively Handle dependencies dependencies = kwargs.get('install_requires', []) return files_installed, dependencies
def setuptools_requires(vers, filename, file_data): hard_requirements = [x for x in vers.requirements if not x.approximate] if hard_requirements: # We have hard requirements, we assume they take precedence over # approximate requirements return # Determine the type of compression to use compression = os.path.splitext(filename)[1][1:] # Normalize tgz to just gz if compression == "tgz": compression = "gz" # short circuit on some invalid sdist types that PyPI somehow has if compression in set(["rpm", "egg", "deb"]): return if compression not in set(["gz", "bz2", "zip"]): raise ValueError( "Invalid compression type %s for %s" % (compression, filename) ) # Shove our file_data into a BytesIO so we can treat it like a file archive = io.BytesIO(file_data) # Normalize requirements, provides, and obsoletes back to empty vers.requirements = [] vers.provides = [] vers.obsoletes = [] # Extract the requires.txt from the file_data if compression == "zip": try: zipf = zipfile.ZipFile(archive) except zipfile.BadZipfile: # invalid archive return try: files = fnmatch.filter(zipf.namelist(), "*.egg-info/requires.txt") except IOError: return if not files: # requires.txt doesn't exist return # Figure out which requires.txt is closest to the root files.sort(key=lambda x: len(x.split("/"))) # Grab the first requires.txt rfilename = files.pop(0) # Extract the requires.txt from the zip archive requires = zipf.open(rfilename) elif compression in set(["gz", "bz2"]): try: mode = "r:%s" % compression tar = tarfile.open(filename, mode=mode, fileobj=archive) except tarfile.ReadError: # Invalid archive return try: files = fnmatch.filter(tar.getnames(), "*.egg-info/requires.txt") except IOError: return if not files: # requires.txt doesn't exist return # Figure out which requires.txt is closest to the root files.sort(key=lambda x: len(x.split("/"))) # Grab the first requires.txt rfilename = files.pop(0) # Extract the requires.txt from the tar archive requires = tar.extractfile(rfilename) for section, reqs in pkg_resources.split_sections(requires): for req in pkg_resources.parse_requirements(reqs): requirement = Requirement(name=req.project_name, approximate=True) # If we have any version modifiers, add them if req.specs: requirement.versions = ["".join(x) for x in req.specs] # If we have any section add is as an extras if section is not None: requirement.environment = "extra = '%s'" % section # Add this Requirement to the version vers.requirements.append(requirement)
def test_requirement_parsing(self): pkgs = { 'test_reqparse': { 'requirements.txt': textwrap.dedent("""\ bar quux<1.0; python_version=='2.6' requests-aws>=0.1.4 # BSD License (3 clause) Routes>=1.12.3,!=2.0,!=2.1;python_version=='2.7' requests-kerberos>=0.6;python_version=='2.7' # MIT """), 'setup.cfg': textwrap.dedent("""\ [metadata] name = test_reqparse [extras] test = foo baz>3.2 :python_version=='2.7' # MIT bar>3.3 :python_version=='2.7' # MIT # Apache """)}, } pkg_dirs = self.useFixture(CreatePackages(pkgs)).package_dirs pkg_dir = pkg_dirs['test_reqparse'] # pkg_resources.split_sections uses None as the title of an # anonymous section instead of the empty string. Weird. expected_requirements = { None: ['bar', 'requests-aws>=0.1.4'], ":(python_version=='2.6')": ['quux<1.0'], ":(python_version=='2.7')": ['Routes!=2.0,!=2.1,>=1.12.3', 'requests-kerberos>=0.6'], 'test': ['foo'], "test:(python_version=='2.7')": ['baz>3.2', 'bar>3.3'] } venv = self.useFixture(Venv('reqParse')) bin_python = venv.python # Two things are tested by this # 1) pbr properly parses markers from requiremnts.txt and setup.cfg # 2) bdist_wheel causes pbr to not evaluate markers self._run_cmd(bin_python, ('setup.py', 'bdist_wheel'), allow_fail=False, cwd=pkg_dir) egg_info = os.path.join(pkg_dir, 'test_reqparse.egg-info') requires_txt = os.path.join(egg_info, 'requires.txt') with open(requires_txt, 'rt') as requires: generated_requirements = dict( pkg_resources.split_sections(requires)) # NOTE(dhellmann): We have to spell out the comparison because # the rendering for version specifiers in a range is not # consistent across versions of setuptools. for section, expected in expected_requirements.items(): exp_parsed = [ pkg_resources.Requirement.parse(s) for s in expected ] gen_parsed = [ pkg_resources.Requirement.parse(s) for s in generated_requirements[section] ] self.assertEqual(exp_parsed, gen_parsed)
def parse_requirestxt(requires): return [{'requires': reqset, 'environment': env[1:] if env else env} for env, reqset in split_sections(requires)]
def _run_setup_file(self, filename): """ Transform and Run AST of the setup file """ try: import pkg_resources except ImportError: # pkg_resources install may be in progress pkg_resources = None namespace = { '_setup_stub_': _setup_stub_, '__file__': filename, '__name__': '__main__', 'setup_args': None, 'setup_kwargs': None, } source_folder = os.path.dirname(filename) tree = ArchiveFileInstaller._get_cooked_ast(filename) codeobj = compile(tree, filename, 'exec') # Some setup files import the package to be installed and sometimes opens a file # in the source folder. So we modify sys.path and change directory into source folder. saved_cwd = os.getcwd() saved_sys_path = sys.path[:] os.chdir(source_folder) sys.path.insert(0, source_folder) try: exec(codeobj, namespace, namespace) finally: os.chdir(saved_cwd) sys.path = saved_sys_path args, kwargs = sys.modules['setuptools']._setup_params_ # for k in sorted(kwargs.keys()): print('{}: {!r}'.format(k, kwargs[k])) if 'ext_modules' in kwargs: print('WARNING: Extension modules and skipped: {}'.format(kwargs['ext_modules'])) packages = kwargs['packages'] if 'packages' in kwargs else [] py_modules = kwargs['py_modules'] if 'py_modules' in kwargs else [] if not packages and not py_modules: raise PipError('failed to find packages or py_modules arguments in setup call') package_dirs = kwargs.get('package_dir', {}) use_2to3 = kwargs.get('use_2to3', False) and six.PY3 files_installed = [] # handle scripts # we handle them before the packages because they may be moved # while handling the packages scripts = kwargs.get("scripts", []) for script in scripts: if self.verbose: print("Handling commandline script: {s}".format(s=script)) cmdname = script.replace(os.path.dirname(script), "").replace("/", "") if not "." in cmdname: cmdname += ".py" scriptpath = os.path.join(source_folder, script) with open(scriptpath, "r") as fin: content = fin.read() cmdpath = create_command(cmdname, content) files_installed.append(cmdpath) packages = ArchiveFileInstaller._consolidated_packages(packages) for p in sorted(packages): # folders or files under source root if p == '': # no packages just files from_folder = os.path.join(source_folder, package_dirs.get(p, '')) for f in ArchiveFileInstaller._find_package_files(from_folder): target_file = os.path.join(SITE_PACKAGES_FOLDER, f) ArchiveFileInstaller._safe_move( os.path.join(from_folder, f), target_file ) files_installed.append(target_file) if use_2to3: _stash('2to3 -w {} > /dev/null'.format(target_file)) else: # packages target_dir = os.path.join(SITE_PACKAGES_FOLDER, p) if p in package_dirs: ArchiveFileInstaller._safe_move( os.path.join(source_folder, package_dirs[p]), target_dir ) elif '' in package_dirs: ArchiveFileInstaller._safe_move( os.path.join(source_folder, package_dirs[''], p), target_dir ) else: ArchiveFileInstaller._safe_move( os.path.join(source_folder, p), target_dir ) files_installed.append(target_dir) if use_2to3: _stash("""find {} --name '.py' | xargs -n 1 -I %% 2to3 -w %% > /dev/null""".format(target_dir)) py_modules = ArchiveFileInstaller._consolidated_packages(py_modules) for p in sorted(py_modules): # files or folders where the file resides, e.g. ['file', 'folder.file'] if '' in package_dirs: p = os.path.join(package_dirs[''], p) if os.path.isdir(os.path.join(source_folder, p)): # folder target_dir = os.path.join(SITE_PACKAGES_FOLDER, p) ArchiveFileInstaller._safe_move( os.path.join(source_folder, p), target_dir ) files_installed.append(target_dir) if use_2to3: _stash("""find {} --name '.py' | xargs -n 1 -I %% 2to3 -w %% > /dev/null""".format(target_dir)) else: # file target_file = os.path.join(SITE_PACKAGES_FOLDER, p + '.py') ArchiveFileInstaller._safe_move( os.path.join(source_folder, p + '.py'), target_file ) files_installed.append(target_file) if use_2to3: _stash('2to3 -w {} > /dev/null'.format(target_file)) # handle entry points entry_points = kwargs.get("entry_points", {}) if isinstance(entry_points, (str, unicode)): if pkg_resources is not None: entry_points = {s: c for s, c in pkg_resources.split_sections(entry_points)} else: print("Warning: pkg_resources not available, skipping entry_points definitions.") entry_points = {} for epn in entry_points: if self.verbose: print("Handling entrypoints for: " + epn) ep = entry_points[epn] if isinstance(ep, (str, unicode)): ep = [ep] if epn == "console_scripts": for dec in ep: name, loc = dec.replace(" ", "").split("=") modname, funcname = loc.split(":") if not name.endswith(".py"): name += ".py" desc = kwargs.get("description", "") path = create_command( name, b"""'''{d}''' from {m} import {n} if __name__ == "__main__": {n}() """.format( m=modname, n=funcname, d=desc, ), ) files_installed.append(path) else: print("Warning: passing entry points for '{n}'.".format(n=epn)) # Recursively Handle dependencies dependencies = kwargs.get('install_requires', []) return files_installed, dependencies
def entry_points(self): sections = pkg_resources.split_sections( self._read_metadata('entry_points.txt')) return [(section, lines) for section, lines in sections if lines]
def setuptools_requires(vers, filename, file_data): hard_requirements = [x for x in vers.requirements if not x.approximate] if hard_requirements: # We have hard requirements, we assume they take precedence over # approximate requirements return # Determine the type of compression to use compression = os.path.splitext(filename)[1][1:] # Normalize tgz to just gz if compression == "tgz": compression = "gz" # short circuit on some invalid sdist types that PyPI somehow has if compression in set(["rpm", "egg", "deb"]): return if compression not in set(["gz", "bz2", "zip"]): raise ValueError("Invalid compression type %s for %s" % (compression, filename)) # Shove our file_data into a BytesIO so we can treat it like a file archive = io.BytesIO(file_data) # Normalize requirements, provides, and obsoletes back to empty vers.requirements = [] vers.provides = [] vers.obsoletes = [] # Extract the requires.txt from the file_data if compression == "zip": try: zipf = zipfile.ZipFile(archive) except zipfile.BadZipfile: # invalid archive return try: files = fnmatch.filter(zipf.namelist(), "*.egg-info/requires.txt") except IOError: return if not files: # requires.txt doesn't exist return # Figure out which requires.txt is closest to the root files.sort(key=lambda x: len(x.split("/"))) # Grab the first requires.txt rfilename = files.pop(0) # Extract the requires.txt from the zip archive requires = zipf.open(rfilename) elif compression in set(["gz", "bz2"]): try: mode = "r:%s" % compression tar = tarfile.open(filename, mode=mode, fileobj=archive) except tarfile.ReadError: # Invalid archive return try: files = fnmatch.filter(tar.getnames(), "*.egg-info/requires.txt") except IOError: return if not files: # requires.txt doesn't exist return # Figure out which requires.txt is closest to the root files.sort(key=lambda x: len(x.split("/"))) # Grab the first requires.txt rfilename = files.pop(0) # Extract the requires.txt from the tar archive requires = tar.extractfile(rfilename) for section, reqs in pkg_resources.split_sections(requires): for req in pkg_resources.parse_requirements(reqs): requirement = Requirement(name=req.project_name, approximate=True) # If we have any version modifiers, add them if req.specs: requirement.versions = ["".join(x) for x in req.specs] # If we have any section add is as an extras if section is not None: requirement.environment = "extra = '%s'" % section # Add this Requirement to the version vers.requirements.append(requirement)
def test_requirement_parsing(self): tempdir = self.useFixture(fixtures.TempDir()).path requirements = os.path.join(tempdir, "requirements.txt") with open(requirements, "wt") as f: f.write( textwrap.dedent( six.u( """\ bar quux<1.0; python_version=='2.6' """ ) ) ) setup_cfg = os.path.join(tempdir, "setup.cfg") with open(setup_cfg, "wt") as f: f.write( textwrap.dedent( six.u( """\ [metadata] name = test_reqparse [extras] test = foo baz>3.2 :python_version=='2.7' """ ) ) ) # pkg_resources.split_sections uses None as the title of an # anonymous section instead of the empty string. Weird. expected_requirements = { None: ["bar"], ":(python_version=='2.6')": ["quux<1.0"], "test:(python_version=='2.7')": ["baz>3.2"], "test": ["foo"], } setup_py = os.path.join(tempdir, "setup.py") with open(setup_py, "wt") as f: f.write( textwrap.dedent( six.u( """\ #!/usr/bin/env python import setuptools setuptools.setup( setup_requires=['pbr'], pbr=True, ) """ ) ) ) self._run_cmd(sys.executable, (setup_py, "egg_info"), allow_fail=False, cwd=tempdir) egg_info = os.path.join(tempdir, "test_reqparse.egg-info") requires_txt = os.path.join(egg_info, "requires.txt") with open(requires_txt, "rt") as requires: generated_requirements = dict(pkg_resources.split_sections(requires)) self.assertEqual(expected_requirements, generated_requirements)
def get_deb_depends_from_setuptools_requires(requirements, on_failure="warn"): """ Suppose you can't confidently figure out a .deb which satisfies a given requirement. If on_failure == 'warn', then log a warning. If on_failure == 'raise' then raise CantSatisfyRequirement exception. If on_failure == 'guess' then guess that python-$FOO will satisfy the dependency and that the Python version numbers will apply to the Debian packages (in addition to logging a warning message). """ assert on_failure in ("raise", "warn", "guess"), on_failure import pkg_resources depends = [] # This will be the return value from this function. parsed_reqs = [] for extra, reqs in pkg_resources.split_sections(requirements): if extra: continue parsed_reqs.extend(pkg_resources.parse_requirements(reqs)) if not parsed_reqs: return depends if not os.path.exists('/usr/bin/apt-file'): raise ValueError('apt-file not in /usr/bin. Please install ' 'with: sudo apt-get install apt-file') # Ask apt-file for any packages which have a .egg-info file by # these names. # Note that apt-file appears to think that some packages # e.g. setuptools itself have "foo.egg-info/BLAH" files but not a # "foo.egg-info" directory. egginfore = ( "(/(%s)(?:-[^/]+)?(?:-py[0-9]\.[0-9.]+)?\.egg-info)" % '|'.join(req.project_name.replace('-', '_') for req in parsed_reqs)) args = ["apt-file", "search", "--ignore-case", "--regexp", egginfore] if 1: # do dry run on apt-file dry_run_args = args[:] + ['--dummy', '--non-interactive'] cmd = subprocess.Popen(dry_run_args, stderr=subprocess.PIPE) returncode = cmd.wait() if returncode: err_output = cmd.stderr.read() raise RuntimeError('Error running "apt-file search": ' + err_output.strip()) try: cmd = subprocess.Popen(args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, universal_newlines=True) except Exception, le: # TODO: catch rc=1 and "E: The cache directory is empty. You need to # run 'apt-file update' first.", and tell the user to follow those # instructions. log.error('ERROR running: %s', ' '.join(args)) raise RuntimeError('exception %s from subprocess %s' % (le, args))