def build_dep(name, version, scope, is_runtime=True, is_optional=False): """ Return DependentPackage from the provided data. """ # TODO: these can be more complex for SDKs # https://dart.dev/tools/pub/dependencies#dependency-sources if isinstance(version, dict) and 'sdk' in version: # {'sdk': 'flutter'} type of deps.... # which is a wart that we keep as a requiremnet version = ', '.join(': '.join([k, str(v)]) for k, v in version.items()) if version.replace('.', '').isdigit(): # version is pinned exactly if it is only made of dots and digits purl = PackageURL(type='pubspec', name=name, version=version) is_resolved = True else: purl = PackageURL(type='pubspec', name=name) is_resolved = False dep = models.DependentPackage( purl=purl.to_string(), requirement=version, scope=scope, is_runtime=is_runtime, is_optional=is_optional, is_resolved=is_resolved, ) return dep
def categorize_versions( package_name: str, all_versions: Set[str], version_specs: Iterable[str], ) -> Tuple[Set[PackageURL], Set[PackageURL]]: """ :return: impacted, resolved purls """ impacted_versions, impacted_purls = set(), [] vurl_specs = [] for version_spec in version_specs: vurl_specs.append(VersionSpecifier.from_scheme_version_spec_string("pypi", version_spec)) for version in all_versions: try: version_object = PYPIVersion(version) except: continue if any([version_object in vurl_spec for vurl_spec in vurl_specs]): impacted_versions.add(version) impacted_purls.append( PackageURL( name=package_name, type="pypi", version=version, ) ) resolved_purls = [] for version in all_versions - impacted_versions: resolved_purls.append(PackageURL(name=package_name, type="pypi", version=version)) return impacted_purls, resolved_purls
def process_file(self, path) -> List[Advisory]: with open(path) as f: json_doc = json.load(f) if self.vuln_id_from_desc(json_doc["description"]): vuln_id = self.vuln_id_from_desc(json_doc["description"]) else: return affected_purls = set() fixed_purls = set() for pkg in json_doc["packages"]: affected_purls.add( PackageURL(name=pkg["id"], version=pkg["affected"], type="nuget") ) fixed_purls.add(PackageURL(name=pkg["id"], version=pkg["fix"], type="nuget")) vuln_reference = [ Reference( url=json_doc["link"], ) ] return Advisory( summary=json_doc["description"], impacted_package_urls=affected_purls, resolved_package_urls=fixed_purls, vulnerability_id=vuln_id, references=vuln_reference, )
def test_load_advisory(self): md_path = os.path.join(TEST_DATA, "RUSTSEC-2021-0032.md") data = self.data_src._load_advisory(md_path) expected_data = Advisory( summary="", vulnerability_id="CVE-2021-28033", affected_packages=[ AffectedPackage( vulnerable_package=PackageURL( type="cargo", name="byte_struct", version="0.6.0", ), patched_package=PackageURL( type="cargo", name="byte_struct", version="0.6.1", ), ) ], references=[ Reference( reference_id="", url="https://github.com/wwylele/byte-struct-rs/issues/1", severities=[], ), Reference( reference_id="RUSTSEC-2021-0032", url="https://rustsec.org/advisories/RUSTSEC-2021-0032.html", severities=[], ), ], ) assert expected_data == data
def test_process_file(self): exp_data = [ Advisory( summary=( 'A command injection vulnerability in ' 'Subversion may allow remote\n ' 'attackers to execute arbitrary code.\n '), impacted_package_urls={ PackageURL( type='ebuild', namespace='dev-vcs', name='subversion', version='0.1.1', qualifiers=OrderedDict(), subpath=None)}, resolved_package_urls={ PackageURL( type='ebuild', namespace='dev-vcs', name='subversion', version='1.9.7', qualifiers=OrderedDict(), subpath=None)}, vuln_references=[ Reference( url='https://security.gentoo.org/glsa/201709-09', reference_id='GLSA-201709-09')], cve_id='CVE-2017-9800')] found_data = self.data_src.process_file(TEST_DATA) assert exp_data == found_data
def test_create_purl(self): purl1 = PackageURL(name="ffmpeg", type="test", version="1.2.0") assert purl1 == self.oval_data_src.create_purl( pkg_name="ffmpeg", pkg_version="1.2.0", pkg_data={"type": "test"} ) purl2 = PackageURL( name="notepad", type="example", version="7.9.6", namespace="ns", qualifiers={"distro": "sample"}, subpath="root", ) assert purl2 == self.oval_data_src.create_purl( pkg_name="notepad", pkg_version="7.9.6", pkg_data={ "namespace": "ns", "qualifiers": {"distro": "sample"}, "subpath": "root", "type": "example", }, )
def parse(cls, location): godeps = Godep(location) if godeps.import_path: # we create a purl from the import path to parse ns/name nicely purl = PackageURL.from_string(f'pkg:golang/{godeps.import_path}') namespace = purl.namespace name = purl.name else: namespace = None name = None dependencies = [] deps = godeps.dependencies or [] for dep in deps: dependencies.append( models.DependentPackage( purl=str( PackageURL.from_string( f'pkg:golang/{dep.import_path}')), extracted_requirement=dep.revision, scope='Deps', is_runtime=True, is_optional=False, is_resolved=False, )) yield models.PackageData( datasource_id=cls.datasource_id, type=cls.default_package_type, namespace=namespace, name=name, primary_language=cls.default_primary_language, dependencies=dependencies, )
def test_affected_and_safe_purls(self): exp_affected = { PackageURL( type="ebuild", namespace="dev-vcs", name="subversion", version="0.1.1", qualifiers=OrderedDict(), subpath=None, ) } exp_safe = { PackageURL( type="ebuild", namespace="dev-vcs", name="subversion", version="1.9.7", qualifiers=OrderedDict(), subpath=None, ) } aff, safe = GentooDataSource.affected_and_safe_purls(self.affected) assert aff == exp_affected assert safe == exp_safe
def process_file(self, path) -> List[Advisory]: with open(path) as f: json_doc = json.load(f) if self.vuln_id_from_desc(json_doc["description"]): vuln_id = self.vuln_id_from_desc(json_doc["description"]) else: return affected_purls = set() fixed_purls = set() for pkg in json_doc['packages']: affected_purls.add( PackageURL(name=pkg['id'], version=pkg['affected'], type='nuget')) fixed_purls.add( PackageURL(name=pkg['id'], version=pkg['fix'], type='nuget')) vuln_reference = [Reference(url=json_doc['link'], )] return Advisory(summary=json_doc['description'], impacted_package_urls=affected_purls, resolved_package_urls=fixed_purls, cve_id=vuln_id, vuln_references=vuln_reference)
def get_package_dict(os_guess, package): ''' Given a package format, namespace and package object return a CycloneDX JSON dictionary representation of the package. ''' package_dict = { 'name': package.name, 'version': package.version, 'type': 'application', } purl_type = package.pkg_format purl_namespace = cyclonedx_common.get_purl_namespace( os_guess, package.pkg_format) if purl_type: purl_name = cyclonedx_common.get_purl_name(package.name, package.pkg_format) purl = PackageURL(purl_type, purl_namespace, purl_name, package.version) if purl_type == "apk": # Update purl to remove "apk" from the string purl = PackageURL(purl_namespace, purl_name, package.version) package_dict['purl'] = str(purl) if package.pkg_license: package_dict['licenses'] = [ cyclonedx_common.get_license_from_name(package.pkg_license) ] if package.pkg_licenses: package_dict['evidence'] = {'licenses': []} for pkg_license in package.pkg_licenses: package_dict['evidence']['licenses'].append( cyclonedx_common.get_license_from_name(pkg_license)) return package_dict
def parse_with_dparse(location): is_dir = filetype.is_dir(location) if is_dir: return file_name = fileutils.file_name(location) dependency_type = get_dependency_type(file_name) if dependency_type not in (filetypes.requirements_txt, filetypes.conda_yml, filetypes.tox_ini, filetypes.pipfile, filetypes.pipfile_lock): return if py2: mode = 'rb' else: mode = 'r' with open(location, mode) as f: content = f.read() df = dparse.parse(content, file_type=dependency_type) df_dependencies = df.dependencies if not df_dependencies: return package_dependencies = [] for df_dependency in df_dependencies: specs = list(df_dependency.specs._specs) is_resolved = False requirement = None purl = PackageURL( type='pypi', name=df_dependency.name ).to_string() if specs: requirement = str(df_dependency.specs) for spec in specs: operator = spec.operator version = spec.version if any(operator == element for element in ('==', '===')): is_resolved = True purl = PackageURL( type='pypi', name=df_dependency.name, version=version ).to_string() package_dependencies.append( models.DependentPackage( purl=purl, scope='dependencies', is_runtime=True, is_optional=False, is_resolved=is_resolved, requirement=requirement ) ) return package_dependencies
def get_purls(pkg_dict): purls = set() for pkg_name in pkg_dict.get("sources", []): version = pkg_dict["sources"][pkg_name]["version"] # The db sometimes contains entries like {'postgresql': {'version': ''}} # This `if` ignores such entries if not version: continue purls.add( PackageURL( name=pkg_name, version=version, type="deb", namespace="ubuntu", )) for pkg_name in pkg_dict["binaries"]: version = pkg_dict["binaries"][pkg_name]["version"] # The db sometimes contains entries like {'postgresql': {'version': ''}} # This `if` ignores such entries if not version: continue purls.add( PackageURL( name=pkg_name, version=version, type="deb", namespace="ubuntu", )) return purls
def test_to_dict_optionally_returns_qualifiers_as_string(self): purl = PackageURL( type='maven', namespace='org.apache', name='commons-logging', version='12.3', qualifiers='this=12&that=13', subpath='this/is/a/path', ) expected = OrderedDict([('type', 'maven'), ('namespace', 'org.apache'), ('name', 'commons-logging'), ('version', '12.3'), ('qualifiers', OrderedDict([ ('that', '13'), ('this', '12'), ])), ('subpath', 'this/is/a/path')]) assert expected == purl.to_dict() expected = OrderedDict([('type', u'maven'), ('namespace', u'org.apache'), ('name', u'commons-logging'), ('version', u'12.3'), ('qualifiers', u'that=13&this=12'), ('subpath', u'this/is/a/path')]) assert expected == purl.to_dict(encode=True)
def categorize_versions( package_name: str, all_versions: Set[str], version_specs: Iterable[str], ) -> Tuple[Set[PackageURL], Set[PackageURL]]: """ :return: impacted, resolved purls """ impacted_versions, impacted_purls = set(), set() ranges = [RangeSpecifier(s) for s in version_specs] for version in all_versions: if any([version in r for r in ranges]): impacted_versions.add(version) impacted_purls.add( PackageURL( name=package_name, type="pypi", version=version, )) resolved_purls = set() for version in all_versions - impacted_versions: resolved_purls.add( PackageURL(name=package_name, type="pypi", version=version)) return impacted_purls, resolved_purls
def process_file(self, path) -> List[Advisory]: with open(path) as f: json_doc = json.load(f) if self.vuln_id_from_desc(json_doc["description"]): vuln_id = self.vuln_id_from_desc(json_doc["description"]) else: return affected_packages = [] for pkg in json_doc["packages"]: affected_packages.append( AffectedPackage( vulnerable_package=PackageURL(name=pkg["id"], version=pkg["affected"], type="nuget"), patched_package=PackageURL(name=pkg["id"], version=pkg["fix"], type="nuget"), )) vuln_reference = [Reference(url=json_doc["link"], )] return Advisory( vulnerability_id=vuln_id, summary=json_doc["description"], affected_packages=affected_packages, references=vuln_reference, )
def build_packages_from_gemfile_lock(gemfile_lock): """ Yield RubyGem Packages from a given GemfileLockParser `gemfile_lock` """ package_dependencies = [] for _, gem in gemfile_lock.all_gems.items(): package_dependencies.append( models.DependentPackage( purl=PackageURL(type='gem', name=gem.name, version=gem.version).to_string(), requirement=', '.join(gem.requirements), scope='dependencies', is_runtime=True, is_optional=False, is_resolved=True, )) yield RubyGem(dependencies=package_dependencies) for _, gem in gemfile_lock.all_gems.items(): deps = [] for _dep_name, dep in gem.dependencies.items(): deps.append( models.DependentPackage( purl=PackageURL(type='gem', name=dep.name, version=dep.version).to_string(), requirement=', '.join(dep.requirements), scope='dependencies', is_runtime=True, is_optional=False, is_resolved=True, )) yield RubyGem(name=gem.name, version=gem.version, dependencies=deps)
def _load_advisory(self, path: str) -> Optional[Advisory]: record = load_toml(path) advisory = record.get("advisory", {}) crate_name = advisory["package"] references = [] if advisory.get("url"): references.append(Reference(url=advisory["url"])) all_versions = self.crates_api.get(crate_name) affected_ranges = { RangeSpecifier(r) for r in chain.from_iterable( record.get("affected", {}).get("functions", {}).values()) } unaffected_ranges = { RangeSpecifier(r) for r in record.get("versions", {}).get("unaffected", []) } resolved_ranges = { RangeSpecifier(r) for r in record.get("versions", {}).get("patched", []) } unaffected, affected = categorize_versions(all_versions, unaffected_ranges, affected_ranges, resolved_ranges) impacted_purls = { PackageURL(type="cargo", name=crate_name, version=v) for v in affected } resolved_purls = { PackageURL(type="cargo", name=crate_name, version=v) for v in unaffected } cve_id = None if "aliases" in advisory: for alias in advisory["aliases"]: if alias.startswith("CVE-"): cve_id = alias break references.append( Reference( reference_id=advisory["id"], url="https://rustsec.org/advisories/{}.html".format( advisory["id"]), )) return Advisory( summary=advisory.get("description", ""), impacted_package_urls=impacted_purls, resolved_package_urls=resolved_purls, cve_id=cve_id, vuln_references=references, )
def to_advisories(data): advisories = [] for issue in data: resolved_packages = [] impacted_packages = [] for info in issue: if info.tag == "cve": cve = info.attrib["name"] if info.tag == "title": summary = info.text if info.tag == "fixed": resolved_packages.append( PackageURL(type="apache", name="httpd", version=info.attrib["version"])) if info.tag == "affects" or info.tag == "maybeaffects": impacted_packages.append( PackageURL(type="apache", name="httpd", version=info.attrib["version"])) advisories.append( Advisory( cve_id=cve, summary=summary, impacted_package_urls=impacted_packages, resolved_package_urls=resolved_packages, )) return advisories
def to_advisories(xml_response: str) -> Set[Advisory]: advisories = [] pkg_name = "openssl" pkg_type = "generic" root = ET.fromstring(xml_response) for element in root: if element.tag == "issue": cve_id = "" summary = "" safe_pkg_versions = [] vuln_pkg_versions = [] ref_urls = [] for info in element: if info.tag == "cve": if info.attrib.get("name"): cve_id = "CVE-" + info.attrib.get("name") else: continue if info.tag == "affects": # Vulnerable package versions vuln_pkg_versions.append(info.attrib.get("version")) if info.tag == "fixed": # Fixed package versions safe_pkg_versions.append(info.attrib.get("version")) if info: commit_hash = info[0].attrib["hash"] ref_urls.append( Reference( url= "https://github.com/openssl/openssl/commit/" + commit_hash)) if info.tag == "description": # Description summary = re.sub(r"\s+", " ", info.text).strip() safe_purls = [ PackageURL(name=pkg_name, type=pkg_type, version=version) for version in safe_pkg_versions ] vuln_purls = [ PackageURL(name=pkg_name, type=pkg_type, version=version) for version in vuln_pkg_versions ] advisory = Advisory( vulnerability_id=cve_id, summary=summary, affected_packages=nearest_patched_package( vuln_purls, safe_purls), references=ref_urls, ) advisories.append(advisory) return advisories
def recognize(cls, location): """ Yield one or more Package manifest objects given a file ``location`` pointing to a package archive, manifest or similar. """ data = cls.read_podfile_lock(location) pods = data['PODS'] pod_deps = [] for pod in pods: if isinstance(pod, dict): for main_pod, _dep_pods in pod.items(): podname, namespace, version = get_data_from_pods(main_pod) purl = PackageURL( type='pods', namespace=namespace, name=podname, version=version, ).to_string() pod_deps.append( models.DependentPackage( purl=purl, scope='requires-dev', requirement=version, is_runtime=False, is_optional=True, is_resolved=True, ) ) elif isinstance(pod, str): podname, namespace, version = get_data_from_pods(pod) purl = PackageURL( type='pods', namespace=namespace, name=podname, version=version, ).to_string() pod_deps.append( models.DependentPackage( purl=purl, scope='requires-dev', requirement=version, is_runtime=False, is_optional=True, is_resolved=True, ) ) yield cls( dependencies=pod_deps, declared_license=None, )
def test_qualifiers_must_be_key_value_pairs(self): purl = 'pkg:maven/org.apache.xmlgraphics/[email protected]?this+is+not+a+key_value' try: PackageURL.from_string(purl) self.fail('Failed to raise exception for invalid qualifiers') except ValueError as ve: assert 'Invalid qualifier. Must be a string of key=value pairs' in str( ve)
def _parse(self, record) -> List[Advisory]: advisories = [] for cve_id in record["issues"]: affected_packages = [] for name in record["packages"]: impacted_purls, resolved_purls = [], [] impacted_purls.append( PackageURL( name=name, type="pacman", namespace="archlinux", version=record["affected"], ) ) if record["fixed"]: resolved_purls.append( PackageURL( name=name, type="pacman", namespace="archlinux", version=record["fixed"], ) ) affected_packages.extend(nearest_patched_package(impacted_purls, resolved_purls)) references = [] references.append( Reference( reference_id=record["name"], url="https://security.archlinux.org/{}".format(record["name"]), severities=[ VulnerabilitySeverity( system=scoring_systems["avgs"], value=record["severity"] ) ], ) ) for ref in record["advisories"]: references.append( Reference( reference_id=ref, url="https://security.archlinux.org/{}".format(ref), ) ) advisories.append( Advisory( vulnerability_id=cve_id, summary="", affected_packages=affected_packages, references=references, ) ) return advisories
def _parse(self, pkg_name: str, records: Mapping[str, Any]) -> List[Advisory]: advisories = [] for cve_id, record in records.items(): impacted_purls, resolved_purls = set(), set() if not cve_id.startswith("CVE"): continue # vulnerabilities starting with something else may not be public yet # see for instance https://web.archive.org/web/20201215213725/https://security-tracker.debian.org/tracker/TEMP-0000000-A2EB44 # nopep8 # TODO: this would need to be revisited though to ensure we are not missing out on anything # nopep8 for release_name, release_record in record["releases"].items(): if not release_record.get("repositories", {}).get(release_name): continue purl = PackageURL( name=pkg_name, type="deb", namespace="debian", version=release_record["repositories"][release_name], qualifiers={"distro": release_name}, ) if release_record.get("status", "") == "resolved": resolved_purls.add(purl) else: impacted_purls.add(purl) if "fixed_version" in release_record: resolved_purls.add( PackageURL( name=pkg_name, type="deb", namespace="debian", version=release_record["fixed_version"], qualifiers={"distro": release_name}, )) references = [] debianbug = record.get("debianbug") if debianbug: bug_url = f"https://bugs.debian.org/cgi-bin/bugreport.cgi?bug={debianbug}" references.append( Reference(url=bug_url, reference_id=debianbug)) advisories.append( Advisory( vulnerability_id=cve_id, summary=record.get("description", ""), impacted_package_urls=impacted_purls, resolved_package_urls=resolved_purls, references=references, )) return advisories
def to_advisories(self, apache_tomcat_advisory_html): advisories = [] page_soup = BeautifulSoup(apache_tomcat_advisory_html, features="lxml") pageh3s = page_soup.find_all("h3") vuln_headings = [ i for i in pageh3s if "Fixed in Apache Tomcat" in i.text ] for data in vuln_headings: fixed_version = data.text.split( "Fixed in Apache Tomcat")[-1].strip() details_div = data.find_next_sibling() for anchor_tag in details_div.find_all("a"): if "cve.mitre.org" not in anchor_tag["href"]: continue cve_id = re.search(r"CVE-\d*-\d*", anchor_tag.text).group() references = [] affected_packages = [] paragraph = anchor_tag.find_parent() while paragraph and "Affects:" not in paragraph.text: for ref in paragraph.find_all("a"): references.append(Reference(url=ref["href"])) paragraph = paragraph.find_next_sibling() if not paragraph: # At the end of details_div continue for version_range in parse_version_ranges(paragraph.text): affected_packages.extend([ PackageURL(type="maven", namespace="apache", name="tomcat", version=version) for version in self.version_api.get("org.apache.tomcat:tomcat") if version in version_range ]) fixed_package = [ PackageURL(type="maven", namespace="apache", name="tomcat", version=fixed_version) ] advisories.append( Advisory( summary="", impacted_package_urls=affected_packages, resolved_package_urls=fixed_package, vulnerability_id=cve_id, vuln_references=references, )) return advisories
def to_advisories(data): advisories = [] soup = BeautifulSoup(data, features="lxml") table = soup.select("table")[0] for row in table.select("tbody tr"): ref_col, affected_col, fixed_col, severity_score_col, desc_col = row.select("td") summary = desc_col.text pkg_qualifiers = {} if "windows" in summary.lower(): pkg_qualifiers = {"os": "windows"} affected_packages = [ PackageURL( type="generic", name="postgresql", version=version.strip(), qualifiers=pkg_qualifiers, ) for version in affected_col.text.split(",") ] fixed_packages = [ PackageURL( type="generic", name="postgresql", version=version.strip(), qualifiers=pkg_qualifiers, ) for version in fixed_col.text.split(",") ] try: cve_id = ref_col.select("nobr")[0].text # This is for the anomaly in https://www.postgresql.org/support/security/8.1/ 's # last entry except IndexError: pass references = [] for a_tag in ref_col.select("a"): link = a_tag.attrs["href"] if link.startswith("/about/news/"): # Convert postgresql official announcements to absolute url. link = urljoin(BASE_URL, link) references.append(Reference(url=link)) advisories.append( Advisory( vulnerability_id=cve_id, summary=summary, references=references, impacted_package_urls=affected_packages, resolved_package_urls=fixed_packages, ) ) return advisories
def deps_mapper(deps, package, field_name): """ Handle deps such as dependencies, devDependencies, peerDependencies, optionalDependencies return a tuple of (dep type, list of deps) https://docs.npmjs.com/files/package.json#dependencies https://docs.npmjs.com/files/package.json#peerdependencies https://docs.npmjs.com/files/package.json#devdependencies https://docs.npmjs.com/files/package.json#optionaldependencies """ npm_dependency_scopes_attributes = { 'dependencies': dict(is_runtime=True, is_optional=False), 'devDependencies': dict(is_runtime=False, is_optional=True), 'peerDependencies': dict(is_runtime=True, is_optional=False), 'optionalDependencies': dict(is_runtime=True, is_optional=True), } dependencies = package.dependencies deps_by_name = {} if field_name == 'optionalDependencies': # optionalDependencies override the dependencies with the same name # so we build a map of name->dep object for use later for d in dependencies: if d.scope != 'dependencies': continue purl = PackageURL.from_string(d.purl) npm_name = purl.name if purl.namespace: npm_name = '/'.join([purl.namespace, purl.name]) deps_by_name[npm_name] = d for fqname, requirement in deps.items(): ns, name = split_scoped_package_name(fqname) if not name: continue purl = PackageURL(type='npm', namespace=ns, name=name).to_string() # optionalDependencies override the dependencies with the same name # https://docs.npmjs.com/files/package.json#optionaldependencies # therefore we update/override the dependency of the same name overridable = deps_by_name.get(fqname) if overridable and field_name == 'optionalDependencies': overridable.purl = purl overridable.is_optional = True overridable.scope = field_name else: dependency_attributes = npm_dependency_scopes_attributes.get(field_name, dict()) dep = models.DependentPackage( purl=purl, scope=field_name, extracted_requirement=requirement, **dependency_attributes ) dependencies.append(dep) return package
def process_response(self) -> List[Advisory]: adv_list = [] for ecosystem in self.advisories: self.set_version_api(ecosystem) pkg_type = ecosystem.lower() for resp_page in self.advisories[ecosystem]: for adv in resp_page["data"]["securityVulnerabilities"]["edges"]: name = adv["node"]["package"]["name"] if self.process_name(ecosystem, name): ns, pkg_name = self.process_name(ecosystem, name) else: continue aff_range = adv["node"]["vulnerableVersionRange"] aff_vers, unaff_vers = self.categorize_versions( aff_range, self.version_api.get(name) ) affected_purls = { PackageURL(name=pkg_name, namespace=ns, version=version, type=pkg_type) for version in aff_vers } unaffected_purls = { PackageURL(name=pkg_name, namespace=ns, version=version, type=pkg_type) for version in unaff_vers } cve_ids = set() vuln_references = [] vuln_desc = adv["node"]["advisory"]["summary"] for vuln in adv["node"]["advisory"]["identifiers"]: if vuln["type"] == "CVE": cve_ids.add(vuln["value"]) elif vuln["type"] == "GHSA": ghsa = vuln['value'] vuln_references.append(Reference( reference_id=ghsa, url="https://github.com/advisories/{}".format( ghsa) )) for cve_id in cve_ids: adv_list.append( Advisory( cve_id=cve_id, summary=vuln_desc, impacted_package_urls=affected_purls, resolved_package_urls=unaffected_purls, vuln_references=vuln_references, ) ) return adv_list
def parse_with_dparse(location, dependency_type=None): """ Return a list of DependentPackage built from a dparse-supported dependency manifest such as requirements.txt, Conda manifest or Pipfile.lock files, or return an empty list. """ with open(location) as f: content = f.read() dep_file = dparse.parse(content, file_type=dependency_type) if not dep_file: return [] dependent_packages = [] for dependency in dep_file.dependencies: requirement = dependency.name is_resolved = False purl = PackageURL(type='pypi', name=dependency.name) # note: dparse.dependencies.Dependency.specs comes from # packaging.requirements.Requirement.specifier # which in turn is a packaging.specifiers.SpecifierSet objects # and a SpecifierSet._specs is a set of either: # packaging.specifiers.Specifier or packaging.specifiers.LegacySpecifier # and each of these have a .operator and .version property # a packaging.specifiers.SpecifierSet specifiers_set = dependency.specs # a list of packaging.specifiers.Specifier specifiers = specifiers_set._specs if specifiers: # SpecifierSet stringifies to comma-separated sorted Specifiers requirement = str(specifiers_set) # are we pinned e.g. resolved? if len(specifiers) == 1: specifier = list(specifiers)[0] if specifier.operator in ('==', '==='): is_resolved = True purl = purl._replace(version=specifier.version) dependent_packages.append( models.DependentPackage( purl=purl.to_string(), # are we always this scope? what if we have requirements-dev.txt? scope='install', is_runtime=True, is_optional=False, is_resolved=is_resolved, requirement=requirement ) ) return dependent_packages
def process_file(self, path) -> List[Advisory]: record = load_yaml(path) package_name = record.get("gem") if not package_name: return if "cve" in record: cve_id = "CVE-{}".format(record["cve"]) else: return safe_version_ranges = record.get("patched_versions", []) # this case happens when the advisory contain only 'patched_versions' field # and it has value None(i.e it is empty :( ). if not safe_version_ranges: safe_version_ranges = [] safe_version_ranges += record.get("unaffected_versions", []) safe_version_ranges = [i for i in safe_version_ranges if i] if not getattr(self, "pkg_manager_api", None): self.pkg_manager_api = RubyVersionAPI() all_vers = self.pkg_manager_api.get(package_name) safe_versions, affected_versions = self.categorize_versions( all_vers, safe_version_ranges) impacted_purls = { PackageURL( name=package_name, type="gem", version=version, ) for version in affected_versions } resolved_purls = { PackageURL( name=package_name, type="gem", version=version, ) for version in safe_versions } references = [] if record.get("url"): references.append(Reference(url=record.get("url"))) return Advisory( summary=record.get("description", ""), impacted_package_urls=impacted_purls, resolved_package_urls=resolved_purls, references=references, vulnerability_id=cve_id, )
def build_xcode_package_from_lockfile(podfile_lock_data): """ Return a Package object from a data mapping obtained from a podfile.lock """ pods = podfile_lock_data['PODS'] pod_deps = [] for pod in pods: if isinstance(pod, dict): for main_pod, _dep_pods in pod.items(): podname, namespace, version = get_data_from_pods(main_pod) purl = PackageURL( type='pods', namespace=namespace, name=podname, version=version, ).to_string() pod_deps.append( models.DependentPackage( purl=purl, scope='requires-dev', requirement=version, is_runtime=False, is_optional=True, is_resolved=True, )) elif isinstance(pod, str): podname, namespace, version = get_data_from_pods(pod) purl = PackageURL( type='pods', namespace=namespace, name=podname, version=version, ).to_string() pod_deps.append( models.DependentPackage( purl=purl, scope='requires-dev', requirement=version, is_runtime=False, is_optional=True, is_resolved=True, )) yield CocoapodsPackage( dependencies=pod_deps, declared_license=None, )