def test_vuln_data(): test_cve_data = os.path.join(os.path.dirname(os.path.realpath(__file__)), "data", "cve_data.json") with open(test_cve_data, "r") as fp: json_data = json.loads(fp.read()) nvdlatest = NvdSource() return nvdlatest.convert(json_data)
def main(): args = build_args() print(at_logo, flush=True) LOG.info("Vulnerability database loaded from {}".format( config.vulndb_bin_file)) if args.cache: for s in [GitHubSource(), NvdSource()]: LOG.info("Refreshing {}".format(s.__class__.__name__)) s.refresh() elif args.sync: for s in [GitHubSource(), NvdSource()]: LOG.info("Syncing {}".format(s.__class__.__name__)) s.download_recent() elif args.search: db = dbLib.get() search_list = re.split(r"[,|;]", args.search) for pkg_info in search_list: pstr = re.split(r"[:=@]", pkg_info) if pstr: if len(pstr) == 2 and dbLib.index_search(*pstr): results = dbLib.pkg_search(db, *pstr) print_results(results) elif len(pstr) == 3: results = dbLib.vendor_pkg_search(db, *pstr) print_results(results) else: print("No vulnerability found!")
def test_convert(test_cve_json): nvdlatest = NvdSource() data = nvdlatest.convert(test_cve_json) assert len(data) == 385 for v in data: details = v.details for detail in details: assert detail assert detail.severity assert detail.package assert detail.package_type
def convert(self, adv_data): ret_data = [] assigner = "@npm" for k, v in adv_data.get("advisories").items(): if v["deleted"]: continue # Iterate the cve list if available cves = v.get("cves") if not cves: cves = [v.get("id")] for cve_id in cves: publishedDate = v["created"] lastModifiedDate = v["updated"] # FIXME: This should include overview and recommendation description = v.get("title", "").replace("`", "") # FIXME: This should include references references = [{"name": "npm advisory", "url": v.get("url")}] severity = v.get("severity") vendor = "npm" product = v["module_name"] score, severity, vectorString, attackComplexity = get_default_cve_data( severity) cwe_id = v.get("cwe") version = v["vulnerable_versions"] version_ranges = self.get_version_ranges(version) for ver in version_ranges: tdata = config.CVE_TPL % dict( cve_id=cve_id, cwe_id=cwe_id, assigner=assigner, references=json.dumps(references), description=description, vectorString=vectorString, vendor=vendor, product=product, version="*", version_start=ver[0], version_end=ver[1], severity=severity, attackComplexity=attackComplexity, score=score, publishedDate=publishedDate, lastModifiedDate=lastModifiedDate, ) vuln = NvdSource.convert_vuln( json.loads(tdata, strict=False)) ret_data.append(vuln) return ret_data
def main(): args = build_args() print(at_logo, flush=True) db = dbLib.get() run_cacher = args.cache summary = None if not dbLib.index_count(db["index_file"]): run_cacher = True else: LOG.info("Vulnerability database loaded from {}".format(config.vulndb_bin_file)) sources_list = [NvdSource()] if os.environ.get("GITHUB_TOKEN"): sources_list.insert(0, GitHubSource()) else: LOG.info( "To use GitHub advisory source please set the environment variable GITHUB_TOKEN!" ) if run_cacher: for s in sources_list: LOG.info("Refreshing {}".format(s.__class__.__name__)) s.refresh() elif args.sync: for s in sources_list: LOG.info("Syncing {}".format(s.__class__.__name__)) s.download_recent() LOG.debug( "Vulnerability database contains {} records".format( dbLib.index_count(db["index_file"]) ) ) if args.bom: if not os.path.isfile(args.bom): LOG.error("Invalid bom file specified: {}".format(args.bom)) return LOG.debug("Scanning using the bom file {}".format(args.bom)) pkg_list = get_pkg_list(args.bom) summary = scan(db, pkg_list, args.report_file) # proj_type = utils.detect_project_type(args.src_dir) if summary and not args.noerror: # Hard coded build break logic for now if summary.get("CRITICAL") > 0: sys.exit(1)
def convert(self, cve_data): """Convert the GitHub advisory data into Vulnerability objects TODO: Fix version information is getting ignored since the CVE Json format does not support this attribute """ ret_data = [] if cve_data.get("errors"): return ret_data, None if cve_data.get("message") and cve_data.get( "message") == "Bad credentials": LOG.warning("GITHUB_TOKEN environment variable is invalid!") return ret_data, None page_info = cve_data["data"]["securityAdvisories"]["pageInfo"] for cve in cve_data["data"]["securityAdvisories"]["nodes"]: cve_id = None assigner = "*****@*****.**" references = [] for r in cve["references"]: references.append({"url": r["url"], "name": r["url"]}) for id in cve["identifiers"]: if id["type"] == "CVE": cve_id = id["value"] if not cve_id: cve_id = cve["ghsaId"] assigner = "@github" for p in cve["vulnerabilities"]["nodes"]: vendor = p["package"]["ecosystem"] product = p["package"]["name"] if ":" in product: tmpA = product.split(":") # This extract's the correct vendor based on the namespace # Eg: org.springframework:spring-webflux would result in # vendor: org.springframework # product: spring-webflux vendor = tmpA[0] product = tmpA[len(tmpA) - 1] version = p["vulnerableVersionRange"] version_start, version_end = self.get_version_range(version) vectorString = "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H" score = 9.0 severity = p["severity"] attackComplexity = severity if p["severity"] == "LOW": score = 2.0 attackComplexity = "HIGH" vectorString = "CVSS:3.1/AV:N/AC:H/PR:N/UI:N/S:U/C:L/I:N/A:N" elif p["severity"] == "MODERATE": score = 5.0 severity = "MEDIUM" vectorString = "CVSS:3.1/AV:N/AC:H/PR:N/UI:N/S:U/C:L/I:L/A:L" elif p["severity"] == "HIGH": score = 7.5 attackComplexity = "LOW" vectorString = "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:L/I:L/A:L" tdata = CVE_TPL % dict( cve_id=cve_id, cwe_id="UNKNOWN", assigner=assigner, references=json.dumps(references), description=cve["summary"], vectorString=vectorString, vendor=vendor.lower(), product=product.lower(), version="*", version_start=version_start, version_end=version_end, severity=severity, attackComplexity=attackComplexity, score=score, publishedDate=cve["publishedAt"], lastModifiedDate=cve["updatedAt"], ) vuln = NvdSource.convert_vuln(json.loads(tdata)) ret_data.append(vuln) return ret_data, page_info
def test_download_all(): nvdlatest = NvdSource() data = nvdlatest.download_all() assert len(data) > 128000
def test_nvd_download(): nvdlatest = NvdSource() data = nvdlatest.download_recent() assert len(data) > 300