def vulnerable_version_range(self) -> List[specifiers.SpecifierSet]: # Try using ranges first to avoid clutter. affected_ecosystem_ranges = list( filter( _is_supported_range_type, self._json.get("affects", {}).get("ranges", []), ) ) if len(affected_ecosystem_ranges): ranges = [] for affected_range in affected_ecosystem_ranges: _constraints = [] if "introduced" in affected_range: v = affected_range.get("introduced") _constraints.append(f">={v}") if "fixed" in affected_range: v = affected_range.get("fixed") _constraints.append(f"<{v}") ranges.append( specifiers.SpecifierSet(",".join(_constraints), prereleases=True) ) return ranges # Try using versions (default). affected_versions = self._json.get("affects", {}).get("versions", []) if len(affected_versions): return [ specifiers.SpecifierSet(f"=={x}", prereleases=True) for x in affected_versions ] return [specifiers.SpecifierSet(">=0", prereleases=True)]
def vulnerable_version_range(self) -> List[specifiers.SpecifierSet]: affected_range = self._json["affected_range"] if not affected_range: return [specifiers.SpecifierSet(">=0.0.0", prereleases=True)] return [ specifiers.SpecifierSet(x, prereleases=True) for x in affected_range.split("||") ]
def reqVersion(valu, reqver, exc=s_exc.BadVersion, mesg='Provided version does not match required version.'): ''' Require a given version tuple is valid for a given requirements string. Args: valu Optional[Tuple[int, int, int]]: Major, minor and patch value to check. reqver (str): A requirements version string. exc (s_exc.SynErr): The synerr class to raise. mesg (str): The message to pass in the exception. Returns: None: If the value is in bounds of minver and maxver. Raises: s_exc.BadVersion: If a precondition is incorrect or a version value is out of bounds. ''' if valu is None: mesg = 'Version value is missing. ' + mesg raise exc(mesg=mesg, valu=valu, reqver=reqver) spec = p_specifiers.SpecifierSet(reqver) verstr = fmtVersion(*valu) vers = p_version.Version(verstr) if vers not in spec: raise exc(mesg=mesg, valu=valu, verstr=verstr, reqver=reqver)
def do_list(args: argparse.Namespace) -> None: """ List installed models. :param args: the arguments passed on the command line :type args: :class:`argparse.Namespace` object :rtype: None """ registry = _find_all_installed_packages() for model_loc in registry.models(): try: pack = Package(model_loc.path) pack_vers = pack.metadata['vers'] if args.outdated or args.uptodate: remote = RemoteModelIndex(org=args.org) all_versions = remote.list_package_vers(pack.name) specifier = specifiers.SpecifierSet(f">{pack_vers}") update_vers = list(specifier.filter(all_versions)) if args.outdated and update_vers: print(f"{model_loc.name}-{update_vers[0]} [{pack_vers}]") if args.uptodate and not update_vers: print(f"{model_loc.name}-{pack_vers}") else: print(f"{model_loc.name}-{pack_vers}") except Exception as err: if args.verbose > 1: _log.warning(str(err))
def satisfied(reqs, name, version): if name not in reqs: return True for pkg in global_reqs.values(): for constraint, _ in pkg: spec = specifiers.SpecifierSet(constraint.specifiers) if spec.contains(version): return True return False
def validate_version_specifier(self, value): """ Check that the Version Specifier is valid. """ try: specifiers.SpecifierSet(value) except specifiers.InvalidSpecifier as err: raise serializers.ValidationError(err) return value
def _fetch_specified_metadata(remote, project_specifiers): """ Fetch metadata for content units matching project specifiers from the remote. Args: project_specifiers (dict): Information about a project and which versions of a project to filter Returns: list: of contentunit metadata. """ remote_units = [] for project in project_specifiers: digests = python_models.DistributionDigest.objects.filter( project_specifier=project) metadata_url = urljoin(remote.url, 'pypi/%s/json' % project.name) downloader = remote.get_downloader(metadata_url) downloader.fetch() metadata = json.load(open(downloader.path)) for version, packages in metadata['releases'].items(): for package in packages: # If neither specifiers nor digests have been set, then we should add the unit if not project.version_specifier and not digests.exists(): remote_units.append( parse_metadata(metadata['info'], version, package)) continue specifier = specifiers.SpecifierSet(project.version_specifier) # Note: SpecifierSet("").contains(version) will return true for released versions # SpecifierSet("").contains('3.0.0') returns True # SpecifierSet("").contains('3.0.0b1') returns False if specifier.contains(version): # add the package if the project specifier does not have an associated digest if not digests.exists(): remote_units.append( parse_metadata(metadata['info'], version, package)) # otherwise check each digest to see if it matches the specifier else: for type, digest in package['digests'].items(): if digests.filter(type=type, digest=digest).exists(): remote_units.append( parse_metadata(metadata['info'], version, package)) break return remote_units
def satisfied(reqs, name, version, failures): if name not in reqs: return True tested = [] for constraint, _ in reqs[name]: spec = specifiers.SpecifierSet(constraint.specifiers) if spec.contains(version): return True tested.append(constraint.specifiers) failures.append('Constraint for %s==%s does not match requirement %s' % (name, version, tested)) return False
def version_supported(self, pyvers): m = re.search(r"(?:py|cp)%s" % pyvers.major, self.python_tag) if not m: return False requires_python = self.metadata.requires_python if self.metadata.requires_python is None: # The package provides no information return True requires_python_specifier = specifiers.SpecifierSet(requires_python) python_version = version.parse(str(pyvers)) return python_version in requires_python_specifier
def satisfied(reqs, name, version, failures): if name not in reqs: return True tested = [] for constraint, _ in reqs[name]: spec = specifiers.SpecifierSet(constraint.specifiers) # pre-releases are allowed by policy but discouraged if spec.contains(version, prereleases=True): return True tested.append(constraint.specifiers) failures.append('Constraint %s for %s does not match requirement %s' % (version, name, tested)) return False
def vulnerable_version_range(self) -> specifiers.SpecifierSet: items = self._json["node"]["vulnerableVersionRange"].split(",") if len(items) > 2: raise ValueError(f"Found more than 2 version specifiers!") vulnerable_ranges = [] for value in items: value = value.strip() if value.startswith("= "): vulnerable_ranges.append(value.replace("= ", "==")) else: vulnerable_ranges.append(value) return specifiers.SpecifierSet(",".join(vulnerable_ranges), prereleases=True)
def is_installable_file(path): """Determine if a path can potentially be installed""" from ._compat import is_installable_dir, is_archive_file from packaging import specifiers if ( hasattr(path, "keys") and any(key for key in path.keys() if key in ["file", "path"]) ): path = urlparse(path["file"]).path if "file" in path else path["path"] if not isinstance(path, six.string_types) or path == "*": return False # If the string starts with a valid specifier operator, test if it is a valid # specifier set before making a path object (to avoid breaking windows) if any(path.startswith(spec) for spec in "!=<>~"): try: specifiers.SpecifierSet(path) # If this is not a valid specifier, just move on and try it as a path except specifiers.InvalidSpecifier: pass else: return False parsed = urlparse(path) if parsed.scheme == 'file': path = parsed.path if not os.path.exists(os.path.abspath(path)): return False lookup_path = Path(path) absolute_path = "{0}".format(lookup_path.absolute()) if lookup_path.is_dir() and is_installable_dir(absolute_path): return True elif lookup_path.is_file() and is_archive_file(absolute_path): return True return False
def is_installable_file(path): # type: (PipfileType) -> bool """Determine if a path can potentially be installed""" from packaging import specifiers if isinstance(path, Mapping): path = convert_entry_to_path(path) # If the string starts with a valid specifier operator, test if it is a valid # specifier set before making a path object (to avoid breaking windows) if any(path.startswith(spec) for spec in "!=<>~"): try: specifiers.SpecifierSet(path) # If this is not a valid specifier, just move on and try it as a path except specifiers.InvalidSpecifier: pass else: return False parsed = urlparse(path) is_local = (not parsed.scheme or parsed.scheme == "file" or (len(parsed.scheme) == 1 and os.name == "nt")) if parsed.scheme and parsed.scheme == "file": path = vistir.compat.fs_decode(vistir.path.url_to_path(path)) normalized_path = vistir.path.normalize_path(path) if is_local and not os.path.exists(normalized_path): return False is_archive = pip_shims.shims.is_archive_file(normalized_path) is_local_project = os.path.isdir(normalized_path) and is_installable_dir( normalized_path) if is_local and is_local_project or is_archive: return True if not is_local and pip_shims.shims.is_archive_file(parsed.path): return True return False
def validate_lower_constraints(req_list, constraints, blacklist): """Return True if there is an error. :param reqs: RequirementsList for the head of the branch :param constraints: Parsed lower-constraints.txt or None """ if constraints is None: return False parsed_constraints = requirement.parse(constraints) failed = False for fname, freqs in req_list.reqs_by_file.items(): if fname == 'doc/requirements.txt': # Skip things that are not needed for unit or functional # tests. continue print("Validating lower constraints of {}".format(fname)) for name, reqs in freqs.items(): if name in blacklist: continue if name not in parsed_constraints: print('Package {!r} is used in {} ' 'but not in lower-constraints.txt'.format(name, fname)) failed = True continue for req in reqs: spec = specifiers.SpecifierSet(req.specifiers) # FIXME(dhellmann): This will only find constraints # where the markers match the requirements list # exactly, so we can't do things like use different # constrained versions for different versions of # python 3 if the requirement range is expressed as # python_version>3.0. We can support different # versions if there is a different requirement # specification for each version of python. I don't # really know how smart we want this to be, because # I'm not sure we want to support extremely # complicated dependency sets. constraint_setting = _find_constraint( req, parsed_constraints[name], ) if not constraint_setting: print('Unable to find constraint for {} ' 'matching {!r} or without any markers.'.format( name, req.markers)) failed = True continue version = constraint_setting.specifiers.lstrip('=') if not spec.contains(version): print('Package {!r} is constrained to {} ' 'which is incompatible with the settings {} ' 'from {}.'.format(name, version, req, fname)) failed = True min = [s for s in req.specifiers.split(',') if '>' in s] if not min: # No minimum specified. Ignore this and let some # other validation trap the error. continue expected = min[0].lstrip('>=') if version != expected: print('Package {!r} is constrained to {} ' 'which does not match ' 'the minimum version specifier {} in {}'.format( name, version, expected, fname)) failed = True return failed
def matches(vers, cmprvers): ''' Check if a version string matches a version comparison string. ''' spec = p_specifiers.SpecifierSet(cmprvers) return p_version.Version(vers) in spec
def vulnerable_version_range(self) -> List[specifiers.SpecifierSet]: return [ specifiers.SpecifierSet(v, prereleases=True) for v in self._json["specs"] ]
def do_install(args: argparse.Namespace) -> None: """ Install new models in macsyfinder local models repository. :param args: the arguments passed on the command line :type args: :class:`argparse.Namespace` object :rtype: None """ if os.path.exists(args.package): remote = False pack_name, inst_vers = parse_arch_path(args.package) user_req = requirements.Requirement(f"{pack_name}=={inst_vers}") else: remote = True user_req = requirements.Requirement(args.package) pack_name = user_req.name inst_pack_loc = _find_installed_package(pack_name) if inst_pack_loc: pack = Package(inst_pack_loc.path) try: local_vers = version.Version(pack.metadata['vers']) except FileNotFoundError: _log.error(f"{pack_name} locally installed is corrupted.") _log.warning(f"You can fix it by removing '{inst_pack_loc.path}'.") sys.tracebacklimit = 0 raise RuntimeError() from None else: local_vers = None user_specifier = user_req.specifier if not user_specifier and inst_pack_loc: # the user do not request for a specific version # and there already a version installed locally user_specifier = specifiers.SpecifierSet(f">{local_vers}") if remote: try: all_available_versions = _get_remote_available_versions(pack_name, args.org) except (ValueError, MacsyDataLimitError) as err: _log.error(str(err)) sys.tracebacklimit = 0 raise ValueError from None else: all_available_versions = [inst_vers] compatible_version = list(user_specifier.filter(all_available_versions)) if not compatible_version and local_vers: target_vers = version.Version(all_available_versions[0]) if target_vers == local_vers and not args.force: _log.warning(f"Requirement already satisfied: {pack_name}{user_specifier} in {pack.path}.\n" f"To force installation use option -f --force-reinstall.") return None elif target_vers < local_vers and not args.force: _log.warning(f"{pack_name} ({local_vers}) is already installed.\n" f"To downgrade to {target_vers} use option -f --force-reinstall.") return None else: # target_vers == local_vers and args.force: # target_vers < local_vers and args.force: pass elif not compatible_version: # No compatible version and not local version _log.warning(f"Could not find version that satisfied '{pack_name}{user_specifier}'") return None else: # it exists at least one compatible version target_vers = version.Version(compatible_version[0]) if inst_pack_loc: if target_vers > local_vers and not args.upgrade: _log.warning(f"{pack_name} ({local_vers}) is already installed but {target_vers} version is available.\n" f"To install it please run 'macsydata install --upgrade {pack_name}'") return None elif target_vers == local_vers and not args.force: _log.warning(f"Requirement already satisfied: {pack_name}{user_specifier} in {pack.path}.\n" f"To force installation use option -f --force-reinstall.") return None else: # target_vers > local_vers and args.upgrade: # I have to install a new package pass # if i'm here it's mean I have to install a new package if remote: _log.info(f"Downloading {pack_name} ({target_vers}).") model_index = RemoteModelIndex(org=args.org, cache=args.cache) _log.debug(f"call download with pack_name={pack_name}, vers={target_vers}") arch_path = model_index.download(pack_name, str(target_vers)) else: model_index = LocalModelIndex(cache=args.cache) arch_path = args.package _log.info(f"Extracting {pack_name} ({target_vers}).") cached_pack = model_index.unarchive_package(arch_path) if args.user: dest = os.path.realpath(os.path.join(os.path.expanduser('~'), '.macsyfinder', 'data')) if os.path.exists(dest) and not os.path.isdir(dest): raise RuntimeError("'{}' already exist and is not a directory.") elif not os.path.exists(dest): os.makedirs(dest) else: defaults = MacsyDefaults() config = Config(defaults, argparse.Namespace()) dest = config.models_dir() if inst_pack_loc: old_pack_path = f"{inst_pack_loc.path}.old" shutil.move(inst_pack_loc.path, old_pack_path) _log.info(f"Installing {pack_name} ({target_vers}) in {dest}") try: shutil.move(cached_pack, dest) except PermissionError as err: _log.error(f"{dest} is not writable: {err}") _log.warning(f"Maybe you can use --user option to install in your HOME.") sys.tracebacklimit = 0 raise ValueError() from None _log.info("Cleaning.") shutil.rmtree(pathlib.Path(cached_pack).parent) if inst_pack_loc: shutil.rmtree(old_pack_path) _log.info(f"The models {pack_name} ({target_vers}) have been installed successfully.")
async def get_relevant_packages(self, metadata, includes, excludes, prereleases): """ Provided project metadata and specifiers, return the matching packages. Compare the defined specifiers against the project metadata and create a deduplicated list of metadata for the packages matching the criteria. Args: metadata (dict): Metadata about the project from PyPI. includes (iterable): An iterable of project_specifiers for package versions to include. excludes (iterable): An iterable of project_specifiers for package versions to exclude. prereleases (bool): Whether or not to include pre-release package versions in the sync. Returns: list: List of dictionaries containing Python package metadata """ # The set of project release metadata, in the format {"version": [package1, package2, ...]} releases = metadata['releases'] # The packages we want to return remote_packages = [] # Delete versions/packages matching the exclude specifiers. for exclude_specifier in excludes: # Fast path: If one of the specifiers matches all versions and we don't have any # digests to reference, clear the whole dict, we're done. if not exclude_specifier.version_specifier: releases.clear() break # Slow path: We have to check all the metadata. for version, packages in list( releases.items()): # Prevent iterator invalidation. specifier = specifiers.SpecifierSet( exclude_specifier.version_specifier, prereleases=prereleases) # First check the version specifer, if it matches, check the digests and delete # matching packages. If there are no digests, delete them all. if specifier.contains(version): del releases[version] for version, packages in releases.items(): for include_specifier in includes: # Fast path: If one of the specifiers matches all versions and we don't have any # digests to reference, return all of the packages for the version. if prereleases and not include_specifier.version_specifier: for package in packages: remote_packages.append( parse_metadata(metadata['info'], version, package)) # This breaks the inner loop, e.g. don't check any other include_specifiers. # We want to continue the outer loop. break specifier = specifiers.SpecifierSet( include_specifier.version_specifier, prereleases=prereleases) # First check the version specifer, if it matches, check the digests and include # matching packages. If there are no digests, include them all. if specifier.contains(version): for package in packages: remote_packages.append( parse_metadata(metadata['info'], version, package)) return remote_packages