def test_tilda(specv, version, ok): spec = RangeSpecifier('~' + specv) assert (version in spec) is ok # Ruby's pessimistic operator (~>) has the same behavior spec = RangeSpecifier('~>' + specv) assert (version in spec) is ok
def categorize_versions( all_versions: Set[str], aff_version_range: str, fixed_version_range: str, ) -> Tuple[Set[str], Set[str]]: """ Seperate list of affected versions and unaffected versions from all versions using the ranges specified. :return: impacted, resolved versions """ if not all_versions: # NPM registry has no data regarding this package, we skip these return set(), set() aff_spec = RangeSpecifier(aff_version_range) fix_spec = RangeSpecifier(fixed_version_range) aff_ver, fix_ver = set(), set() # Unaffected version is that version which is in the fixed_version_range # or which is absent in the aff_version_range for ver in all_versions: if ver in fix_spec or ver not in aff_spec: fix_ver.add(ver) else: aff_ver.add(ver) return aff_ver, fix_ver
def _load_advisory(self, path: str) -> Optional[Advisory]: record = load_toml(path) advisory = record.get("advisory", {}) crate_name = advisory["package"] references = [] if advisory.get("url"): references.append(Reference(url=advisory["url"])) all_versions = self.crates_api.get(crate_name) affected_ranges = { RangeSpecifier(r) for r in chain.from_iterable( record.get("affected", {}).get("functions", {}).values()) } unaffected_ranges = { RangeSpecifier(r) for r in record.get("versions", {}).get("unaffected", []) } resolved_ranges = { RangeSpecifier(r) for r in record.get("versions", {}).get("patched", []) } unaffected, affected = categorize_versions(all_versions, unaffected_ranges, affected_ranges, resolved_ranges) impacted_purls = { PackageURL(type="cargo", name=crate_name, version=v) for v in affected } resolved_purls = { PackageURL(type="cargo", name=crate_name, version=v) for v in unaffected } cve_id = None if "aliases" in advisory: for alias in advisory["aliases"]: if alias.startswith("CVE-"): cve_id = alias break references.append( Reference( reference_id=advisory["id"], url="https://rustsec.org/advisories/{}.html".format( advisory["id"]), )) return Advisory( summary=advisory.get("description", ""), impacted_package_urls=impacted_purls, resolved_package_urls=resolved_purls, cve_id=cve_id, vuln_references=references, )
def extract_vuln_pkgs(self, vuln_info): vuln_status, version_infos = vuln_info.split(": ") if "none" in version_infos: return {} version_ranges = [] windows_only = False for version_info in version_infos.split(", "): if "-" not in version_info: # These are discrete versions version_ranges.append(RangeSpecifier(version_info[0])) continue windows_only = "nginx/Windows" in version_info version_info = version_info.replace("nginx/Windows", "") lower_bound, upper_bound = version_info.split("-") version_ranges.append( RangeSpecifier(f">={lower_bound},<={upper_bound}")) valid_versions = find_valid_versions( self.version_api.get("nginx/nginx"), version_ranges) qualifiers = {} if windows_only: qualifiers["os"] = "windows" return { PackageURL(type="generic", name="nginx", version=version, qualifiers=qualifiers) for version in valid_versions }
def test_get_version_ranges_from_state(self): assert len(self.parsed_oval.oval_document.getStates()) == 2 state_1 = self.parsed_oval.oval_document.getStates()[0] state_2 = self.parsed_oval.oval_document.getStates()[1] exp_range_1 = RangeSpecifier("<1.14-2") exp_range_2 = RangeSpecifier("<0.2.8.9-1ubuntu1") assert self.parsed_oval.get_version_ranges_from_state(state_1) == exp_range_1 assert self.parsed_oval.get_version_ranges_from_state(state_2) == exp_range_2
def test_get_data(self): expected_data = [ { "test_data": [ { "package_list": ["cacti"], "version_ranges": RangeSpecifier("<1.2.11-lp151.3.6"), }, { "package_list": ["cacti-spine"], "version_ranges": RangeSpecifier("<1.2.11-lp151.3.6"), }, ], "description": '\n Cacti 0.8.7e and earlier allows remote authenticated administrators to gain privileges by modifying the "Data Input Method" for the "Linux - Get Memory Usage" setting to contain arbitrary commands.\n ', "vuln_id": "CVE-2009-4112", "reference_urls": { "https://bugzilla.suse.com/1122535", "https://bugzilla.suse.com/558664", "http://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2009-4112", "https://www.suse.com/security/cve/CVE-2009-4112.html", }, }, { "test_data": [ { "package_list": ["apache2-mod_perl"], "version_ranges": RangeSpecifier("<2.0.11-lp151.3.3"), }, { "package_list": ["apache2-mod_perl-devel"], "version_ranges": RangeSpecifier("<2.0.11-lp151.3.3"), }, ], "description": "\n mod_perl 2.0 through 2.0.10 allows attackers to execute arbitrary Perl code by placing it in a user-owned .htaccess file, because (contrary to the documentation) there is no configuration option that permits Perl code for the administrator's control of HTTP request processing without also permitting unprivileged users to run Perl code in the context of the user account that runs Apache HTTP Server processes.\n ", "vuln_id": "CVE-2011-2767", "reference_urls": { "https://bugzilla.suse.com/1156944", "https://www.suse.com/security/cve/CVE-2011-2767.html", "http://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2011-2767", }, }, ] assert expected_data == self.parsed_oval.get_data()
def test_python_compat(pdep: str, prel: str, ok: bool): dep = Dependency( raw_name='pathlib2', constraint=None, repo=None, marker=Markers(RangeSpecifier(pdep).to_marker('python_version')), ) release = Release( raw_name='pathlib2', version='2.3.3', time=None, python=RangeSpecifier(prel), ) dep.groups = [Group(number=1, releases=[release])] assert dep.python_compat is ok
def categorize_versions( package_name: str, all_versions: Set[str], version_specs: Iterable[str], ) -> Tuple[Set[PackageURL], Set[PackageURL]]: """ :return: impacted, resolved purls """ impacted_versions, impacted_purls = set(), set() ranges = [RangeSpecifier(s) for s in version_specs] for version in all_versions: if any([version in r for r in ranges]): impacted_versions.add(version) impacted_purls.add( PackageURL( name=package_name, type="pypi", version=version, )) resolved_purls = set() for version in all_versions - impacted_versions: resolved_purls.add( PackageURL(name=package_name, type="pypi", version=version)) return impacted_purls, resolved_purls
def get_version_ranges_from_state( self, state: OvalState) -> Optional[RangeSpecifier]: """ Return a version range(s)? from a state """ for var in state.element: operation = var.get("operation") if not operation: continue operand = self.translations.get(operation) or "" if not operand: continue version = var.text or "" if not version: continue version_range = operand + version version_range = version_range.replace("only", "").strip() # 0: is default epoch, remove it version_range = version_range.replace("0:", "").strip() x_version_ranges = { "<2.0.x": "2.0.x", "<3.x": "3.x", "<4.6.x": "4.6.x", "<8.0.x": "8.0.x", "<8.x": "8.x", } if version_range in x_version_ranges: version_range = x_version_ranges[version_range] return RangeSpecifier(version_range)
def __init__(self, source, spec): """ source (Dependency) spec (str, LegacySpecifier, Specifier) """ self._specs = {source.name: RangeSpecifier(spec)} self._groups = {source.name: source.group.number}
def loads(self, content: str) -> RootDependency: doc = tomlkit.parse(content) deps = [] root = RootDependency() repo = RepositoriesRegistry() if 'source' in doc: for repo_info in doc['source']: repo.add_repo(name=repo_info['name'], url=repo_info['url']) repo.attach_config() python = doc.get('requires', {}).get('python_version', '') if python not in {'', '*'}: root.python = RangeSpecifier('==' + python) for section, is_dev in [('packages', False), ('dev-packages', True)]: for name, content in doc.get(section, {}).items(): subdeps = self._make_deps(root, name, content) if isinstance(content, dict) and 'index' in content: dep_repo = repo.make(name=content['index']) for dep in subdeps: if isinstance(dep.repo, WarehouseBaseRepo): dep.repo = dep_repo for dep in subdeps: # Pipfile doesn't support any other envs dep.envs = {'dev'} if is_dev else {'main'} deps.extend(subdeps) root.attach_dependencies(deps) return root
def parse_version_ranges(string): """ This method yields Rangespecifier objects obtained by parsing `string`. >> list(parse_version_ranges("Affects: 9.0.0.M1 to 9.0.0.M9")) [RangeSpecifier(<=9.0.0.M9,>=9.0.0.M1)] >> list(parse_version_ranges("Affects: 9.0.0.M1")) [RangeSpecifier(>=9.0.0.M1<=9.0.0.M1)] >> list(parse_version_ranges("Affects: 9.0.0.M1 to 9.0.0.M9, 1.2.3 to 3.4.5")) [RangeSpecifier(<=9.0.0.M9,>=9.0.0.M1), RangeSpecifier(<=3.4.5,>=1.2.3)] """ version_rng_txt = string.split("Affects:")[-1].strip() version_ranges = version_rng_txt.split(",") for version_range in version_ranges: if "to" in version_range: lower_bound, upper_bound = version_range.split("to") elif "-" in version_range and not any( [i.isalpha() for i in version_range]): lower_bound, upper_bound = version_range.split("-") else: lower_bound = upper_bound = version_range yield RangeSpecifier(">=" + lower_bound + "<=" + upper_bound)
def loads(self, content) -> RootDependency: doc = json.loads(content, object_pairs_hook=OrderedDict) deps = [] root = RootDependency() repo = RepositoriesRegistry() for repo_info in doc.get('_meta', {}).get('sources', []): repo.add_repo(name=repo_info['name'], url=repo_info['url']) repo.attach_config() python = doc.get('_meta', {}).get('requires', {}).get('python_version', '') if python not in {'', '*'}: root.python = RangeSpecifier('==' + python) for section, is_dev in [('default', False), ('develop', True)]: for name, content in doc.get(section, {}).items(): subdeps = self._make_deps(root, name, content) # set repo if 'index' in content: dep_repo = repo.make(name=content['index']) else: dep_repo = repo for dep in subdeps: if isinstance(dep.repo, WarehouseBaseRepo): dep.repo = dep_repo # set envs for dep in subdeps: dep.envs = {'dev'} if is_dev else {'main'} deps.extend(subdeps) root.attach_dependencies(deps) return root
def loads(self, content) -> RootDependency: doc = tomlkit.parse(content) root = RootDependency( package=PackageRoot(path=self.project_path or Path()), ) root.python = RangeSpecifier( doc.get('metadata', {}).get('python-versions', '*')) # get repositories root.repo = RepositoriesRegistry() if doc.get('source'): for source in doc['source']: root.repo.add_repo(url=source['url'], name=source['name']) root.repo.attach_config() envs = defaultdict(set) for extra, deps in doc.get('extras', {}).items(): for dep in deps: envs[dep].add(extra) for content in doc.get('package', []): # category can be "dev" or "main" envs[content['name']].add(content['category']) deps = [] for content in doc.get('package', []): deps.extend( self._make_deps( root=root, content=content, envs=envs[content['name']], repo=root.repo, )) root.attach_dependencies(deps) return root
def loads(self, content: str) -> RootDependency: doc = yaml_load(content) # make root root = RootDependency( package=PackageRoot(path=self.project_path or Path()), ) if 'name' in doc: root.raw_name = doc['name'] root.package.name = doc['name'] root.repo = CondaRepo(channels=doc.get('channels', [])) # make dependencies for req in doc.get('dependencies', []): parsed = root.repo.parse_req(req) if parsed['name'] == 'python': if parsed.get('version', '*') not in ('*', ''): spec = '.'.join((parsed['version'].split('.') + ['*', '*'])[:3]) root.python = RangeSpecifier(spec) continue root.attach_dependencies(DependencyMaker.from_params( raw_name=parsed['name'], constraint=parsed.get('version', '*'), source=root, repo=root.repo, )) return root
def vulns(self) -> Dict[str, Tuple[SafetyVulnInfo, ...]]: cache = JSONCache('pyup.io', ttl=3600 * 24) records = cache.load() if records is None: with requests_session() as session: response = session.get(self.url) response.raise_for_status() records = response.json() cache.dump(records) vulns = dict() for name, subrecords in records.items(): package_vulns = [] for record in subrecords: links = tuple(REX_LINK.findall(record['advisory'])) description = REX_LINK.sub('', record['advisory']) if record['cve']: link = 'https://nvd.nist.gov/vuln/detail/' + record['cve'] links += (link, ) package_vulns.append( SafetyVulnInfo( name=name, description=description, links=links, specifier=RangeSpecifier(' || '.join(record['specs'])), )) vulns[name] = tuple(package_vulns) return vulns
def loads(self, content: str) -> RootDependency: doc = tomlkit.parse(content) deps = [] root = RootDependency(self._get_name(content=content)) repos = dict() if 'source' in doc: for repo in doc['source']: repos[repo['name']] = repo['url'] python = doc.get('requires', {}).get('python_version', '') if python not in {'', '*'}: root.python = RangeSpecifier('==' + python) for section, is_dev in [('packages', False), ('dev-packages', True)]: for name, content in doc.get(section, {}).items(): subdeps = self._make_deps(root, name, content) if 'index' in content: repo_name = content.get('index') for dep in subdeps: dep.repo = WareHouseRepo( name=repo_name, url=repos[repo_name], ) for dep in subdeps: # Pipfile doesn't support any other envs dep.envs = {'dev'} if is_dev else {'main'} deps.extend(subdeps) root.attach_dependencies(deps) return root
def __init__(self, source: Union['Dependency', 'ExtraDependency', 'RootDependency'], spec: Any) -> None: """ source (Dependency) spec (str, LegacySpecifier, Specifier) """ self._specs = {source.name: RangeSpecifier(spec)} self._groups = {source.name: source.group.number}
def get_releases(self, dep) -> tuple: links = self._get_links(name=dep.base_name) releases_info = dict() for link in links: name, version = self._parse_name(link['name']) if canonicalize_name(name) != canonicalize_name(dep.base_name): logger.warning('bad dist name', extra=dict( dist_name=link['name'], package_name=dep.base_name, reason='package name does not match', )) continue if not version: logger.warning('bad dist name', extra=dict( dist_name=link['name'], package_name=dep.base_name, reason='no version specified', )) continue if version not in releases_info: releases_info[version] = dict(hashes=[], pythons=[]) if link['digest']: releases_info[version]['hashes'].append(link['digest']) if link['python']: releases_info[version]['pythons'].append(link['python']) # init releases releases = [] prereleases = [] for version, info in releases_info.items(): # ignore version if no files for release release = Release( raw_name=dep.raw_name, version=version, time=datetime(1970, 1, 1, 0, 0), python=RangeSpecifier(' || '.join(info['pythons'])), hashes=tuple(info['hashes']), extra=dep.extra, ) # filter prereleases if needed if release.version.is_prerelease: prereleases.append(release) if not self.prereleases and not dep.prereleases: continue releases.append(release) # special case for black: if there is no releases, but found some # prereleases, implicitly allow prereleases for this package if not releases and prereleases: releases = prereleases releases.sort(reverse=True) return tuple(releases)
def apply(self, dep, spec) -> None: if dep.name in self._groups: # don't apply same group twice if self._groups[dep.name] == dep.group.number: return # unapply old group of this package: self.unapply(dep.name) # save params self._specs[dep.name] = RangeSpecifier(spec) self._groups[dep.name] = dep.group.number
def get_releases(self, dep) -> tuple: # retrieve data cache = JSONCache( 'warehouse-simple', urlparse(self.url).hostname, 'links', dep.base_name, ttl=config['cache']['ttl'], ) links = cache.load() if links is None: links = list(self._get_links(name=dep.base_name)) cache.dump(links) releases_info = dict() for link in links: name, version = self._parse_name(link['name']) if canonicalize_name(name) != dep.name: continue if not version: continue if version not in releases_info: releases_info[version] = dict(hashes=[], pythons=[]) if link['digest']: releases_info[version]['hashes'].append(link['digest']) if link['python']: releases_info[version]['pythons'].append(link['python']) # init releases releases = [] prereleases = [] for version, info in releases_info.items(): # ignore version if no files for release release = Release( raw_name=dep.raw_name, version=version, time=datetime(1970, 1, 1, 0, 0), python=RangeSpecifier(' || '.join(info['pythons'])), hashes=tuple(info['hashes']), extra=dep.extra, ) # filter prereleases if needed if release.version.is_prerelease: prereleases.append(release) if not self.prereleases and not dep.prereleases: continue releases.append(release) # special case for black: if there is no releases, but found some # prereleases, implicitly allow prereleases for this package if not releases and prereleases: releases = prereleases releases.sort(reverse=True) return tuple(releases)
def test_python_compat(pdep: str, prel: str, ok: bool): dep = Dependency( raw_name='pathlib2', constraint=None, repo=None, marker=MarkerTracker().apply( source='root', markers=RangeSpecifier(pdep).to_marker('python_version'), ), ) assert 'python_version' in str(dep.marker) assert bool(dep.marker) is True release = Release( raw_name='pathlib2', version='2.3.3', time=None, python=RangeSpecifier(prel), ) dep.groups = [Group(number=1, releases=[release])] assert dep.python_compat is ok
def test_categorize_versions(): flatbuffers_versions = MOCKED_CRATES_API_VERSIONS["flatbuffers"] unaffected_ranges = {RangeSpecifier("< 0.4.0")} affected_ranges = {RangeSpecifier(">= 0.4.0"), RangeSpecifier("<= 0.6.0")} resolved_ranges = {RangeSpecifier(">= 0.6.1")} unaffected_versions, affected_versions = categorize_versions( set(flatbuffers_versions), unaffected_ranges, affected_ranges, resolved_ranges, ) assert len(unaffected_versions) == 2 assert "0.3.0" in unaffected_versions assert "0.6.5" in unaffected_versions assert len(affected_versions) == 1 assert "0.5.0" in affected_versions
def test_categorize_versions(): flatbuffers_versions = MOCKED_CRATES_API_VERSIONS['flatbuffers'] unaffected_ranges = {RangeSpecifier('< 0.4.0')} affected_ranges = {RangeSpecifier('>= 0.4.0'), RangeSpecifier('<= 0.6.0')} resolved_ranges = {RangeSpecifier('>= 0.6.1')} unaffected_versions, affected_versions = categorize_versions( set(flatbuffers_versions), unaffected_ranges, affected_ranges, resolved_ranges, ) assert len(unaffected_versions) == 2 assert '0.3.0' in unaffected_versions assert '0.6.5' in unaffected_versions assert len(affected_versions) == 1 assert '0.5.0' in affected_versions
def to_version_ranges(version_range_text): version_ranges = [] range_expressions = version_range_text.split(",") for range_expression in range_expressions: if "to" in range_expression: # eg range_expression == "3.2.0 to 3.2.1" lower_bound, upper_bound = range_expression.split("to") lower_bound = f">={lower_bound}" upper_bound = f"<={upper_bound}" version_ranges.append( RangeSpecifier(f"{lower_bound},{upper_bound}")) elif "and later" in range_expression: # eg range_expression == "2.1.1 and later" range_expression = range_expression.replace("and later", "") version_ranges.append(RangeSpecifier(f">={range_expression}")) else: # eg range_expression == "3.0.0" version_ranges.append(RangeSpecifier(range_expression)) return version_ranges
def __ior__(self, other): if not isinstance(other, Constraint): return NotImplemented for name, group in other._groups.items(): self._groups[name] = group spec = other._specs[name] if name in self._specs: self._specs[name] = RangeSpecifier( str(self._specs[name]) + '||' + str(spec)) else: self._specs[name] = spec return self
def test_get_data(self): expected_data = [ { "test_data": [{ "package_list": ["potrace"], "version_ranges": RangeSpecifier("<1.14-2"), }], "description": "Heap-based buffer overflow in the bm_readbody_bmp function in bitmap_io.c in potrace before 1.13 allows remote attackers to have unspecified impact via a crafted BMP image, a different vulnerability than CVE-2016-8698, CVE-2016-8699, CVE-2016-8700, CVE-2016-8701, and CVE-2016-8702.", "vuln_id": "CVE-2016-8703", "reference_urls": { 'http://people.canonical.com/~ubuntu-security/cve/2016/CVE-2016-8703.html', 'https://blogs.gentoo.org/ago/2016/08/08/potrace-multiplesix-heap-based-buffer-overflow-in-bm_readbody_bmp-bitmap_io-c/', 'https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2016-8703' } }, { "test_data": [{ "package_list": ["tor"], "version_ranges": RangeSpecifier("<0.2.8.9-1ubuntu1"), }], "description": "Tor before 0.2.8.9 and 0.2.9.x before 0.2.9.4-alpha had internal functions that were entitled to expect that buf_t data had NUL termination, but the implementation of or/buffers.c did not ensure that NUL termination was present, which allows remote attackers to cause a denial of service (client, hidden service, relay, or authority crash) via crafted data.", "vuln_id": "CVE-2016-8860", "reference_urls": { 'http://people.canonical.com/~ubuntu-security/cve/2016/CVE-2016-8860.html', 'https://trac.torproject.org/projects/tor/ticket/20384', 'https://blog.torproject.org/blog/tor-0289-released-important-fixes', 'https://github.com/torproject/tor/commit/3cea86eb2fbb65949673eb4ba8ebb695c87a57ce', 'http://www.openwall.com/lists/oss-security/2016/10/18/11', 'https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2016-8860', } }, ] assert expected_data == self.parsed_oval.get_data()
def get_versionsrngs_from_state( self, state: OvalState) -> Optional[RangeSpecifier]: """ returns all related version ranges within a state """ for var in state.element: if var.get('operation'): if var.get('operation') not in self.translations: continue operand = self.translations[var.get('operation')] version = var.text version_range = operand + version return RangeSpecifier(version_range)
def _make_deps(self, root, name: str, content, envs: set) -> List[Dependency]: if isinstance(content, str): deps = [ Dependency( raw_name=name, constraint=Constraint(root, content), repo=root.repo, envs=envs, ) ] return deps # get link url = content.get('file') or content.get('path') if url and not url.startswith('http'): url = str(self._make_dependency_path_absolute(Path(url))) if not url and 'git' in content: url = 'git+' + content['git'] rev = content.get('rev') or content.get('branch') or content.get('tag') if rev: url += '@' + rev # make marker markers = [] # https://www.python.org/dev/peps/pep-0496/ if 'platform' in content: markers.append('sys_platform == "{}" '.format(content['platform'])) if 'python' in content: markers.append( RangeSpecifier(content['python']).to_marker('python_version')) markers = ' and '.join(markers) # poetry plans to remove the allows-prereleases key, # and will only use the allow-prereleases key instead. is_prereleases = content.get('allows-prereleases', False) or content.get( 'allow-prereleases', False) deps = DependencyMaker.from_params( raw_name=name, constraint=Constraint(root, content.get('version', '')), source=root, extras=set(content.get('extras', [])), marker=markers or None, url=url, editable=content.get('develop', False), envs=envs, prereleases=is_prereleases, ) return deps
def get_pkg_versions_from_ranges(self, version_range_list): """Takes a list of version ranges(affected) of a package as parameter and returns a tuple of safe package versions and vulnerable package versions""" all_version = self.version_api.get("istio/istio") safe_pkg_versions = [] vuln_pkg_versions = [] version_ranges = [RangeSpecifier(r) for r in version_range_list] for version in all_version: if any([version in v for v in version_ranges]): vuln_pkg_versions.append(version) safe_pkg_versions = set(all_version) - set(vuln_pkg_versions) return safe_pkg_versions, vuln_pkg_versions