def discover_host_3rd_party_modules(cls): """Scan host site-packages for 3rd party modules.""" try: site_packages_dirs = site.getsitepackagess() site_packages_dirs.append(site.getusersitepackages()) except AttributeError as e: LOG.error("Couldn't get site packages: %s", e) return for site_dir in site_packages_dirs: try: dir_entries = os.listdir(site_dir) except IOError: continue for egg in (x for x in dir_entries if x.endswith(".egg-info")): pkg_info_path = os.path.join(site_dir, egg, "PKG-INFO") modules_path = os.path.join(site_dir, egg, "top_level.txt") if not os.path.isfile(pkg_info_path): continue with open(pkg_info_path) as f: name = next( iter( line.split(":")[1].strip() for line in yield_lines(f.readlines()) if line.lower().startswith("name:")), "") with open(modules_path) as f: modules = list(yield_lines(f.readlines())) cls.known_host_3rd_parties[project2module(name)] = modules
def verify(self, requirements_path=None): """ requirements.txtの内容と差異が無いか確認する. :type requirements_path: string :rtype: bool """ self.requirements_exist(requirements_path) with open(requirements_path) as requirements: packages = yield_lines(requirements) for package in packages: if self._check_import_file(package, requirements_path): continue package_data = self._data_split(package) try: get_distribution(package_data['package']) except VersionConflict: self.terminate = True current_package = get_distribution(package_data['name']) print("{comment}: {name}({before_ver}) => ({after_ver})".format( comment=WARNING_VERSION_CONFLICT, name=colored(package_data['name'], COLOR_INFO), before_ver=colored(current_package.version, COLOR_INFO), after_ver=colored(package_data['version'], COLOR_INFO))) except DistributionNotFound: self.terminate = True print("{comment}: {name}({ver})".format( comment=WARNING_NOT_FOUND, name=colored(package_data['name'], COLOR_INFO), ver=colored(package_data['version'], COLOR_INFO))) return self.terminate
def pre_build(): build_requirements = self.requirements.get("build-requires") if build_requirements: utils.log_iterable(build_requirements, header="Installing build requirements", logger=LOG) self.helper.transaction(install_pkgs=build_requirements, tracewriter=self.tracewriter) build_requirements = '' try: build_requirements = sh.load_file(self.rpm_build_requires_filename) except IOError as e: if e.errno != errno.ENOENT: raise build_requirements = set(pkg_resources.yield_lines(build_requirements)) for repo_name in self.REPOS: repo_dir = sh.joinpths(self.anvil_repo_dir, self.SRC_REPOS[repo_name]) matched_paths = [] paths = list_src_rpms(repo_dir) envra_details = self.envra_helper.explode(*paths) for (path, envra_detail) in zip(paths, envra_details): package_name = envra_detail.get('name') if package_name in build_requirements: matched_paths.append(path) build_requirements.discard(package_name) if matched_paths: with sh.remove_before(self.prebuild_dir) as prebuild_dir: if not sh.isdir(prebuild_dir): sh.mkdirslist(prebuild_dir, tracewriter=self.tracewriter) for path in matched_paths: sh.move(path, sh.joinpths(prebuild_dir, sh.basename(path))) build(prebuild_dir, repo_name, 'Prebuilding %s RPM packages from their SRPMs' ' for repo %s using %s jobs') return build_requirements
def _write_requirements(stream, reqs): lines = yield_lines(reqs or ()) def append_cr(line): return line + '\n' lines = map(append_cr, lines) stream.writelines(lines)
def postinstall(self, dist): """ call postinstall scripts """ print("Post installation") if (dist): pkg_resources.require(dist.project_name) sys.path.append(dist.location) try: lstr = dist.get_metadata("postinstall_scripts.txt") except: lstr = [] # Add pywin32 path if ('win32' in sys.platform): try: win32dir = pj(get_base_dir('pywin32'), 'pywin32_system32') if (win32dir not in os.environ['PATH']): os.environ['PATH'] += ";" + win32dir except: print("!!Error : pywin32 package not found. Please install it before.") # process postinstall for s in pkg_resources.yield_lines(lstr): print("Executing %s" % (s)) try: module = __import__(s, globals(), locals(), s.split('.')) module.install() except Exception as e: print("Warning : Cannot execute %s" % (s,)) print(e)
def get_egg_info(pkg_name, info_key): """Return as a generator the egg-infos contained in info_key.""" dist = pkg_resources.get_distribution(pkg_name) try: lstr = dist.get_metadata(info_key) except: lstr = "" return pkg_resources.yield_lines(lstr)
def testYielding(self): for inp, out in [ ([], []), ('x', ['x']), ([[]], []), (' x\n y', ['x', 'y']), (['x\n\n', 'y'], ['x', 'y']), ]: self.assertEqual(list(pkg_resources.yield_lines(inp)), out)
def get_package_metadata(self): dist = pkg_resources.get_distribution(self.package) lines = dist.get_metadata('PKG-INFO') metadata = {} for line in pkg_resources.yield_lines(lines): key, value = line.split(':', 1) key, value = key.strip(), value.strip() if key in METADATA: metadata[key.lower()] = value return metadata
def write_test_info(cmd, basename, filename): dist = cmd.distribution test_module = getattr(dist, 'test_module', None) test_suite = getattr(dist, 'test_suite', None) test_loader = getattr(dist, 'test_loader', None) tests_require = '\n '.join(yield_lines(dist.tests_require or ())) cmd.write_or_delete_file("test_info", filename, _TEMPLATE % (test_module, test_suite, test_loader, tests_require, ))
def parse_reqs(strs): lines = iter(pkg_resources.yield_lines(strs)) for line in lines: if ' #' in line: line = line[:line.find(' #')] if line.endswith('\\'): line = line[:-2].strip() try: line += next(lines) except StopIteration: return yield Requirement(*parse_reqs_line(line))
def write_test_info(cmd, basename, filename): dist = cmd.distribution test_module = getattr(dist, 'test_module', None) test_suite = getattr(dist, 'test_suite', None) test_loader = getattr(dist, 'test_loader', None) tests_require = '\n '.join(yield_lines(dist.tests_require or ())) cmd.write_or_delete_file( "test_info", filename, _TEMPLATE % ( test_module, test_suite, test_loader, tests_require, ))
def split_requirements(strs): """Yield ``Requirement`` objects for each specification in `strs` `strs` must be a string, or a (possibly-nested) iterable thereof. """ # create a steppable iterator, so we can handle \-continuations lines = iter(yield_lines(strs)) for line in lines: # Drop comments -- a hash without a space may be in a URL. if ' #' in line: line = line[:line.find(' #')] # If there is a line continuation, drop it, and append the next line. if line.endswith('\\'): line = line[:-2].strip() line += next(lines) yield line.strip()
def _safe_parse_requirements( requirements: Union[str, Iterable[str]] ) -> Set[pkg_resources.Requirement]: """Safely parse a requirement or set of requirements. This effectively replaces pkg_resources.parse_requirements, which blows up with a ValueError as soon as it encounters a requirement it cannot parse (e.g. `-r requirements.txt`). This way we can still extract all the parseable requirements out of a set containing some unparseable requirements. """ parseable_requirements = set() for requirement in pkg_resources.yield_lines(requirements): try: parseable_requirements.add(pkg_resources.Requirement.parse(requirement)) except ValueError: continue return parseable_requirements
def parse_requirements(filepath): variables = { "__python_requires__": None, "__requires__": None, } try: with open(filepath, encoding='utf-8') as fp: for line in fp: m = re.fullmatch( r'\s*#\s*python\s*((?:[=<>!~]=|[<>]|===)\s*\S(?:.*\S)?)\s*', line, flags=re.I, ) if m: variables["__python_requires__"] = m.group(1) break fp.seek(0) variables["__requires__"] = list(yield_lines(fp)) except FileNotFoundError: pass return variables
def postinstall(self, dist): """ call postinstall scripts """ print("Post installation") if (dist): pkg_resources.require(dist.project_name) sys.path.append(dist.location) try: lstr = dist.get_metadata("postinstall_scripts.txt") except: lstr = [] # Add pywin32 path if ('win32' in sys.platform): try: win32dir = pj(get_base_dir('pywin32'), 'pywin32_system32') if (win32dir not in os.environ['PATH']): os.environ['PATH'] += ";" + win32dir except: print( "!!Error : pywin32 package not found. Please install it before." ) # process postinstall for s in pkg_resources.yield_lines(lstr): print("Executing %s" % (s)) try: module = __import__(s, globals(), locals(), s.split('.')) module.install() except Exception as e: print("Warning : Cannot execute %s" % (s, )) print(e)
def get_metadata_lines(self, name): return yield_lines(self.get_metadata(name))
def _write_requirements(stream, reqs): lines = yield_lines(reqs or ()) append_cr = lambda line: line + "\n" lines = map(append_cr, sorted(lines)) stream.writelines(lines)
def get_install_requires(): with open("requirements-minimal.txt", "r") as f: minimal_reqs = list(yield_lines(f.read())) return minimal_reqs
def write_requirements(cmd, basename, filename): dist = cmd.distribution data = ['\n'.join(yield_lines(dist.install_requires or ()))] for extra, reqs in list((dist.extras_require or {}).items()): data.append('\n\n[%s]\n%s' % (extra, '\n'.join(yield_lines(reqs)))) cmd.write_or_delete_file("requirements", filename, ''.join(data))
def _write_requirements(stream, reqs): lines = yield_lines(reqs or ()) append_cr = lambda line: line + '\n' lines = map(append_cr, lines) stream.writelines(lines)
rpm_cmd.append('--quiet') rpm_cmd.append(self.spec_file) self.spawn(rpm_cmd) if not self.dry_run: if not self.binary_only: for srpm in iglob(path.join(rpm_dir['SRPMS'], '*.rpm')): self.move_file(srpm, self.dist_dir) if not self.source_only: for rpm in iglob(path.join(rpm_dir['RPMS'], '*/*.rpm')): self.move_file(rpm, self.dist_dir) # run() with open(path.join(here, 'requirements.txt')) as f1, \ open(path.join(here, 'openHPC_web_project/requirements.txt')) as f2: install_requires = list( set(chain( yield_lines(f1.read()), yield_lines(f2.read()), ))) setup(install_requires=install_requires, zip_safe=False, python_requires='~=2.7.0', cmdclass={'bdist_rpm': bdist_rpm})
def _readlines(self): raw_data = resource_string('ttgn.pokedex', 'migrations/data/{}'.format(self.filename)) yield from yield_lines(raw_data.decode('utf-8'))
def get_metadata_lines(self, name): return pkg_resources.yield_lines(self.get_metadata(name))
def readlines(fp): return list(yield_lines(fp))
def _read_metadata(self, name): """ Read a .txt format metadata file from .whl file. """ content = self.metadata_files.get(name, b'') content = content.decode('utf-8') return pkg_resources.yield_lines(content)
def testYielding(self): for inp, out in [([], []), ("x", ["x"]), ([[]], []), (" x\n y", ["x", "y"]), (["x\n\n", "y"], ["x", "y"])]: assert list(pkg_resources.yield_lines(inp)) == out
def _write_requirements(stream, reqs): lines = yield_lines(reqs or ())
def find_word(w): for line in yield_lines(resource_string(data.__name__, 'de-en.txt')): line_u = line.decode('utf-8') if re.match(lookup_ptn, line_u): return line_u raise WordNotFoundException
def write_requirements(cmd, basename, filename): dist = cmd.distribution data = ['\n'.join(yield_lines(dist.install_requires or ()))] for extra,reqs in (dist.extras_require or {}).items(): data.append('\n\n[{0!s}]\n{1!s}'.format(extra, '\n'.join(yield_lines(reqs)))) cmd.write_or_delete_file("requirements", filename, ''.join(data))
def write_requirements(cmd, basename, filename): dist = cmd.distribution data = ["\n".join(yield_lines(dist.install_requires or ()))] for extra, reqs in (dist.extras_require or {}).items(): data.append("\n\n[%s]\n%s" % (extra, "\n".join(yield_lines(reqs)))) cmd.write_or_delete_file("requirements", filename, "".join(data))
def get_install_requires(): with open('requirements.txt', 'r') as f: minimal_reqs = list(yield_lines(f.read())) return minimal_reqs
def get_metadata_lines(self,name): return pkg_resources.yield_lines(self.get_metadata(name))
rpm_cmd.append(self.spec_file) self.spawn(rpm_cmd) if not self.dry_run: if not self.binary_only: for srpm in iglob(path.join(rpm_dir['SRPMS'], '*.rpm')): self.move_file(srpm, self.dist_dir) if not self.source_only: for rpm in iglob(path.join(rpm_dir['RPMS'], '*/*.rpm')): self.move_file(rpm, self.dist_dir) with open(path.join(here, 'requirements.txt')) as f: install_requires = list(yield_lines(f.read())) with open(path.join(here, 'test-requirements.txt')) as f: tests_require = list(yield_lines(f.read())) setup( packages=find_packages(include=[ 'antilles*', ], ), namespace_packages=['antilles', 'antilles.agent'], include_package_data=True, install_requires=install_requires, zip_safe=False, ext_modules=[ Extension(name='antilles.agent.sms._pysms', sources=['src/pysms.pyx', 'src/at.c', 'src/serialport.c'],
def testYielding(self): for inp,out in [ ([], []), ('x',['x']), ([[]],[]), (' x\n y', ['x','y']), (['x\n\n','y'], ['x','y']), ]: self.assertEqual(list(pkg_resources.yield_lines(inp)),out)
def from_file(cls, path): for line in pkg_resources.yield_lines(open(path)): try: yield cls.from_line(line) except pkg_resources.RequirementParseError as exc: log.warning("line %r of %r raised %r", line, path, exc)