def _get_meta(self, path): data = pkginfo.get_metadata(path) if data: return data else: print "Couldn't get metadata from %s. Not added to chishop" % os.path.basename(path) return None
def _run(self, **kwargs): replier = self.message.reply_text me = pkginfo.get_metadata(self.what) if not me: replier("I am not really sure what I am.", threaded=True, prefixed=False) else: lines = _format_pkg(me, self.pkg_info_attrs) if lines: replier = self.message.reply_attachments attachment = { 'pretext': "I am the following:", 'text': "\n".join(lines), 'mrkdwn_in': ['text'], } replier(text=' ', log=LOG, attachments=[attachment], link_names=True, as_user=True, channel=self.message.body.channel, thread_ts=self.message.body.ts) else: replier("I am not really sure what I am.", threaded=True, prefixed=False)
def dependencies(self): """ Read the dependencies in the local file :return: same format as in the "packages" config file parameter """ if self._dependencies is None: self._dependencies = {} metadata = pkginfo.get_metadata(self._local_file) if metadata is not None: for require in metadata.requires_dist: version, env_marker = PypiPackage._parse_requirement( require) version = packaging.requirements.Requirement(version) if env_marker is not None: if not PypiPackage.evaluate_env_marker( env_marker, self._environment): continue if version.name not in self._dependencies: self._dependencies[version.name] = set() specifier = str(version.specifier).strip() if specifier == "": specifier = "latest" if specifier not in self._dependencies[version.name]: self._dependencies[version.name].add(specifier) return self._dependencies
def mine_wheel_metadata_full_download(job: Job) -> Union[Result, Exception]: print(f"Bucket {job.bucket} - Job {job.nr} - {job.name}:{job.ver}") for _ in range(5): try: with NamedTemporaryFile(suffix='.whl') as f: resp = requests.get(job.url, headers=headers) if resp.status_code == 404: return requests.HTTPError() if resp.status_code in [503, 502]: try: resp.raise_for_status() except: traceback.print_exc() raise Retry resp.raise_for_status() with open(f.name, 'wb') as f_write: f_write.write(resp.content) metadata = pkginfo.get_metadata(f.name) return Result( job=job, requires_dist=metadata.requires_dist, provides_extras=metadata.provides_extras, requires_external=metadata.requires_external, requires_python=metadata.requires_python, ) except Retry: sleep(10) except zipfile.BadZipFile as e: return e except Exception: print(f"Problem with {job.name}:{job.ver}") traceback.print_exc() raise
def get_pkginfo(archivepath): if str(archivepath).endswith(".doc.zip"): return BasenameMeta(archivepath) import pkginfo info = pkginfo.get_metadata(str(archivepath)) return info
def metadata(self): self.log.debug("searching metadata") info = pkginfo.get_metadata(self.dist_path) if info is None: raise Exception("failed to get metadata") data = vars(info) data.pop("filename", None) return data
def _extract_metadata(whl_file): logger.debug("searching metadata", whl_file=whl_file) info = pkginfo.get_metadata(whl_file) if info is None: raise Exception("failed to get metadata") data = vars(info) data.pop("filename", None) return data
def version(request): from django.conf import settings import glob, pkginfo whisper_version = pkginfo.get_metadata('whisper').version api_version = pkginfo.get_metadata('graphiteapi').version carbon_pkginfo = glob.glob('/opt/graphite/lib/carbon*egg-info')[-1] try: carbon_version = pkginfo.get_metadata(carbon_pkginfo).version except: carbon_version = False if not api_version: from graphiteapi import VERSION api_version = '.'.join(map(str, VERSION)) return { 'graphite': settings.WEBAPP_VERSION, 'carbon': carbon_version, 'whisper': whisper_version, 'api': api_version, }
def _extract_metadata(wheel_fname): wheel_fname = os.path.abspath(wheel_fname) logger.debug("Searching metadata in %s", wheel_fname) if not os.path.exists(wheel_fname): raise RuntimeError("File not found: {}".format(wheel_fname)) info = get_metadata(wheel_fname) if info is None: raise RuntimeError( "Failed to extract metadata: {}".format(wheel_fname)) data = vars(info) data.pop("filename", None) return data
def _get_metadata(self, release): try: metadata = pkginfo.get_metadata(release).__dict__ except Exception: # bad archive metadata = {} md5_hash = md5() with open(release, 'rb') as fp: for content in iter(lambda: fp.read(io.DEFAULT_BUFFER_SIZE), b''): md5_hash.update(content) metadata.update({'md5_digest': md5_hash.hexdigest()}) return metadata
def get_pkginfo(archivepath): if str(archivepath).endswith(".doc.zip"): return BasenameMeta(archivepath) if archivepath.ext == ".whl": # workaround for https://bugs.launchpad.net/pkginfo/+bug/1227788 import twine.wheel wheel = twine.wheel.Wheel(str(archivepath)) wheel.parse(wheel.read()) return wheel import pkginfo info = pkginfo.get_metadata(str(archivepath)) return info
def get_package_info(pkgpath): """ Provide a subset of the package metadata to merge into the Concourse resource metadata. """ pkgmeta = pkginfo.get_metadata(pkgpath) result = { 'version': pkgmeta.version, 'metadata': { 'package_name': pkgmeta.name, 'summary': pkgmeta.summary, 'home_page': pkgmeta.home_page, 'platforms': ', '.join(pkgmeta.platforms), 'requires_python': pkgmeta.requires_python, } } return result
def _introspect(self): """ Get the pkginfo metadata and monkeypatch where required """ metadata = pkginfo.get_metadata(self.dist.path) if not metadata: raise InvalidDistribution(self.dist.path) metadata_full = metadata.read() if metadata.classifiers == () and 'Classifier' in metadata_full: metadata.classifiers = self._parse_classifiers(metadata_full) if metadata.keywords and not isinstance(metadata.keywords, (tuple, list)): keywords = metadata.keywords.split(',') if len(keywords) == 1: keywords = metadata.keywords.split(' ') metadata.keywords = keywords return metadata
def find_packages_missing_on_pypi(path): import pkginfo requires = [] if path.endswith(".whl"): requires = list(filter(lambda_filter_azure_pkg, pkginfo.get_metadata(path).requires_dist)) else: _, _, _, requires = parse_setup(path) # parse pkg name and spec pkg_spec_dict = dict(parse_require(req) for req in requires) logging.info("Package requirement: {}".format(pkg_spec_dict)) # find if version is available on pypi missing_packages = ["{0}{1}".format(pkg, pkg_spec_dict[pkg]) for pkg in pkg_spec_dict.keys() if not is_required_version_on_pypi(pkg, pkg_spec_dict[pkg])] if missing_packages: logging.error("Packages not found on PyPI: {}".format(missing_packages)) return missing_packages
def _get_metadata(self, release): path = os.path.join(str(pathlib.Path().absolute()), release.split('/')[1]) self.s3.download_file(Filename=path, Bucket=self.bucket, Key=release) try: metadata = pkginfo.get_metadata(path).__dict__ except Exception: # bad archive metadata = {} md5_hash = md5() with open(path, 'rb') as fp: for content in iter(lambda: fp.read(io.DEFAULT_BUFFER_SIZE), b''): md5_hash.update(content) # pragma: no cover metadata.update({'md5_digest': md5_hash.hexdigest()}) os.remove(path) return metadata
def main(args=None): """Entry point for pkginfo tool """ options, paths = _parse_options(args) format = getattr(options, 'output', 'simple') formatter = _FORMATTERS[format](options) for path in paths: meta = get_metadata(path, options.metadata_version) if meta is None: continue if options.download_url_prefix: if meta.download_url is None: filename = os.path.basename(path) meta.download_url = '%s/%s' % (options.download_url_prefix, filename) formatter(meta) formatter.finish()
def test_download(): # type: () -> None project1_sdist = create_sdist(name="project1", version="1.0.0", extras_require={"foo": ["project2"]}) project2_wheel = build_wheel( name="project2", version="2.0.0", # This is the last version of setuptools compatible with Python 2.7. install_reqs=["setuptools==44.1.0"], ) downloaded_by_target = defaultdict(list) for local_distribution in download( requirements=["{}[foo]".format(project1_sdist)], find_links=[os.path.dirname(project2_wheel)], ): distribution = pkginfo.get_metadata(local_distribution.path) downloaded_by_target[local_distribution.target].append(distribution) assert 1 == len(downloaded_by_target) target, distributions = downloaded_by_target.popitem() assert DistributionTarget.current() == target distributions_by_name = { distribution.name: distribution for distribution in distributions } assert 3 == len(distributions_by_name) def assert_dist(project_name, dist_type, version): dist = distributions_by_name[project_name] assert dist_type is type(dist) assert version == dist.version assert_dist("project1", pkginfo.SDist, "1.0.0") assert_dist("project2", pkginfo.Wheel, "2.0.0") assert_dist("setuptools", pkginfo.Wheel, "44.1.0")
def test_download(): project1_sdist = create_sdist(name='project1', version='1.0.0', extras_require={'foo': ['project2']}) project2_wheel = build_wheel( name='project2', version='2.0.0', # This is the last version of setuptools compatible with Python 2.7. install_reqs=['setuptools==44.1.0']) downloaded_by_target = defaultdict(list) for local_distribution in download( requirements=['{}[foo]'.format(project1_sdist)], find_links=[os.path.dirname(project2_wheel)]): distribution = pkginfo.get_metadata(local_distribution.path) downloaded_by_target[local_distribution.target].append(distribution) assert 1 == len(downloaded_by_target) target, distributions = downloaded_by_target.popitem() assert DistributionTarget.current() == target distributions_by_name = { distribution.name: distribution for distribution in distributions } assert 3 == len(distributions_by_name) def assert_dist(project_name, dist_type, version): dist = distributions_by_name[project_name] assert dist_type is type(dist) assert version == dist.version assert_dist('project1', pkginfo.SDist, '1.0.0') assert_dist('project2', pkginfo.Wheel, '2.0.0') assert_dist('setuptools', pkginfo.Wheel, '44.1.0')
def get_version(): pkg = pkginfo.get_metadata("powny") return (pkg.version if pkg is not None else "<unknown>")
def get_package_version(pkgpath): metadata = pkginfo.get_metadata(pkgpath) return LooseVersion(metadata.version)
def migrate(self, recipe_dir: str, attrs: "AttrsTypedDict", **kwargs: Any) -> None: source_url = attrs.get("url") or attrs.get("source", {}).get("url") version = attrs.get("new_version", "") or attrs.get("version", "") if not version: return None wheel_url, wheel_file = self.determine_wheel(source_url, version) if wheel_url is None: return None # parse the versions from the wheel wheel_packages = {} with tempfile.TemporaryDirectory() as tmpdir, indir(tmpdir): resp = requests.get(wheel_url) with open(wheel_file, "wb") as fp: for chunk in resp.iter_content(chunk_size=2**16): fp.write(chunk) import pkginfo import pkg_resources wheel_metadata = pkginfo.get_metadata(wheel_file) wheel_metadata.extractMetadata() for dep in wheel_metadata.requires_dist: parsed_req = pkg_resources.Requirement.parse(dep) # ignore extras, and markers # map pypi name to the conda name, with fallback to pypi name conda_name = pypi_conda_mapping().get(parsed_req.name, parsed_req.name) wheel_packages[conda_name] = str(parsed_req.specifier) if not wheel_packages: return None handled_packages = set() with indir(recipe_dir): with open("meta.yaml") as f: lines = f.readlines() in_reqs = False for i, line in enumerate(lines): if line.strip().startswith("requirements:"): in_reqs = True continue if in_reqs and len(line) > 0 and line[0] != " ": in_reqs = False if not in_reqs: continue if line.strip().startswith("run:"): # This doesn't really account for comments in blocks j = i + 1 # Find this block in the source file while j < len(lines): if lines[j].strip().startswith("-"): spaces = len(lines[j]) - len(lines[j].lstrip()) elif lines[j].strip().startswith("#"): pass elif lines[j].strip().startswith("{%"): pass else: break j = j + 1 new_line = " " * spaces for line_index in range(i + 1, j): line = lines[line_index] if not line.strip().startswith("-"): continue line = lines[line_index].strip().strip("-").strip() pkg_name, *_ = line.split() if pkg_name in wheel_packages: lines[line_index] = (" " * spaces + "- " + pkg_name + " " + wheel_packages[pkg_name] + "\n") handled_packages.add(pkg_name) # There are unhandled packages. Since these might not be on conda-forge add them, # but leave them commented out for pkg_name in sorted( set(wheel_packages) - handled_packages): # TODO: add to pr text saying that we discovered new deps new_line = (" " * spaces + "# - " + pkg_name + " " + wheel_packages[pkg_name] + "\n") handled_packages.add(pkg_name) lines.insert(j, new_line) break with open("meta.yaml", "w") as f: f.write("".join(lines))
def metadata(self) -> pkginfo.Wheel: return pkginfo.get_metadata(self.path)
def get_directory_details(path, pbr_version=None): if not sh.isdir(path): raise IOError("Can not detail non-existent directory %s" % (path)) # Check if we already got the details of this dir previously with EGGS_DETAILED_LOCK: path = sh.abspth(path) cache_key = "d:%s" % (sh.abspth(path)) if cache_key in EGGS_DETAILED: return EGGS_DETAILED[cache_key] details = None skip_paths = [ sh.joinpths(path, "PKG-INFO"), sh.joinpths(path, "EGG-INFO"), ] skip_paths.extend(glob.glob(sh.joinpths(path, "*.egg-info"))) if any(sh.exists(a_path) for a_path in skip_paths): # Some packages seem to not support the 'egg_info' call and # provide there own path/file that contains this information # already, so just use it if we can get at it... # # Ie for pyyaml3.x: # # error: invalid command 'egg_info' details = pkginfo.Develop(path) if not details or not details.name: cmd = [sys.executable, 'setup.py', 'egg_info'] if pbr_version: env_overrides = { "PBR_VERSION": str(pbr_version), } else: env_overrides = {} sh.execute(cmd, cwd=path, env_overrides=env_overrides) details = pkginfo.get_metadata(path) if not details or not details.name: raise RuntimeError("No egg detail information discovered" " at '%s'" % path) egg_details = { 'req': create_requirement(details.name, version=details.version), } for attr_name in [ 'description', 'author', 'version', 'name', 'summary' ]: egg_details[attr_name] = getattr(details, attr_name) for attr_name in ['description', 'author', 'summary']: attr_value = egg_details[attr_name] if isinstance(attr_value, six.text_type): # Fix any unicode which will cause unicode decode failures... # versions or names shouldn't be unicode, and the rest # we don't really care about being unicode (since its # just used for logging right now anyway...). # # The reason this is done is that 'elasticsearch' seems to # have a unicode author name, and that causes the log_object # to blowup, so just avoid that by replacing this information # in the first place. egg_details[attr_name] = attr_value.encode("ascii", errors='replace') LOG.debug("Extracted '%s' egg detail information:", path) utils.log_object(egg_details, logger=LOG, level=logging.DEBUG) EGGS_DETAILED[cache_key] = egg_details return egg_details
def get_directory_details(path, pbr_version=None): if not sh.isdir(path): raise IOError("Can not detail non-existent directory %s" % (path)) # Check if we already got the details of this dir previously with EGGS_DETAILED_LOCK: path = sh.abspth(path) cache_key = "d:%s" % (sh.abspth(path)) if cache_key in EGGS_DETAILED: return EGGS_DETAILED[cache_key] details = None skip_paths = [ sh.joinpths(path, "PKG-INFO"), sh.joinpths(path, "EGG-INFO"), ] skip_paths.extend(glob.glob(sh.joinpths(path, "*.egg-info"))) if any(sh.exists(a_path) for a_path in skip_paths): # Some packages seem to not support the 'egg_info' call and # provide there own path/file that contains this information # already, so just use it if we can get at it... # # Ie for pyyaml3.x: # # error: invalid command 'egg_info' details = pkginfo.Develop(path) if not details or not details.name: cmd = [sys.executable, 'setup.py', 'egg_info'] if pbr_version: env_overrides = { "PBR_VERSION": str(pbr_version), } else: env_overrides = {} sh.execute(cmd, cwd=path, env_overrides=env_overrides) details = pkginfo.get_metadata(path) if not details or not details.name: raise RuntimeError("No egg detail information discovered" " at '%s'" % path) egg_details = { 'req': create_requirement(details.name, version=details.version), } for attr_name in ['description', 'author', 'version', 'name', 'summary']: egg_details[attr_name] = getattr(details, attr_name) for attr_name in ['description', 'author', 'summary']: attr_value = egg_details[attr_name] if isinstance(attr_value, six.text_type): # Fix any unicode which will cause unicode decode failures... # versions or names shouldn't be unicode, and the rest # we don't really care about being unicode (since its # just used for logging right now anyway...). # # The reason this is done is that 'elasticsearch' seems to # have a unicode author name, and that causes the log_object # to blowup, so just avoid that by replacing this information # in the first place. egg_details[attr_name] = attr_value.encode("ascii", errors='replace') LOG.debug("Extracted '%s' egg detail information:", path) utils.log_object(egg_details, logger=LOG, level=logging.DEBUG) EGGS_DETAILED[cache_key] = egg_details return egg_details
def generate(ctx, distribution, entrypoint, cache, clear_cache): """Generates yaml config file for shipment.""" if distribution is None: click.echo('\n'.join(getobedients())) ctx.exit() dist = pkg_resources.get_distribution(distribution) if entrypoint is None: # Show all "obedient" entrypoints for package for entrypoint in dist.get_entry_map('obedient').keys(): click.echo(entrypoint) ctx.exit() getlogger().info("generating config", distribution=distribution, entrypoint=entrypoint) assert dist is not None, "Could not load distribution for {}".format( distribution) if entrypoint is None: entrypoint = list(dist.get_entry_map('obedient').keys())[0] getlogger().debug("autodetected entrypoint is %s", entrypoint) func = dist.load_entry_point('obedient', entrypoint) assert func is not None, "Could not load entrypoint {} from distribution {}".format( entrypoint, distribution) import pkginfo meta = pkginfo.get_metadata(distribution) if cache: import requests_cache with requests_cache.enabled(): if clear_cache: requests_cache.clear() shipment = func() else: getlogger().info('loading containers without cache') shipment = func() shipment.version = meta.version shipment.author = meta.author shipment.author_email = meta.author_email shipment.home_page = meta.home_page shipment.dominator_version = getversion() import tzlocal shipment.timestamp = datetime.datetime.now(tz=tzlocal.get_localzone()) getlogger().debug("retrieving image ids") for image in shipment.images: if not isinstance(image, SourceImage): with utils.addcontext(logger=logging.getLogger('dominator.image'), image=image): if image.getid() is None: image.pull() if image.getid() is None: raise RuntimeError( "Could not find id for image {}".format(image)) click.echo_via_pager(yaml.dump(shipment))
if __name__ == "__main__": with open("ignore", "r") as f: ignored_names = f.read().splitlines() whl_folder = "wheels" whl_files = [ file for file in listdir(whl_folder) if isfile(join(whl_folder, file)) and file.endswith(".whl") ] whl_metadatas = [] name_to_version = dict() name_to_file = dict() for file in whl_files: metadata = get_metadata(join(whl_folder, file)) version = get_version.parse(metadata.version) if metadata.name not in ignored_names and ( ((current_version := name_to_version.get(metadata.name, None)) is None) or version > current_version): whl_metadatas.append(metadata) name_to_file[metadata.name] = file name_to_version[metadata.name] = version requirements = dict( (clean_name(md.name), [clean_name(req.split()[0]) for req in md.requires_dist]) for md in whl_metadatas) whl_order = []
def metadata(self): return pkginfo.get_metadata(self.path())