def parse_findings(data, scanner): """ Parse findings """ try: findings = GitleaksScanParser(data, scanner).items # Make finding instances for item in findings: description = item["description"] if scanner.config.get("additional_text", None): description = scanner.config.get( "additional_text") + "\n\n" + description finding = SastFinding( title=item["title"], description=[ "\n\n".join([ description, f"**File to review:** {markdown.markdown_escape(item['file_path'])}" ]) ]) finding.set_meta("tool", scanner.get_name()) finding.set_meta("severity", item["severity"]) finding.set_meta("legacy.file", item["file_path"]) endpoints = list() if item["file_path"]: endpoints.append(Endpoint(raw=item["file_path"])) finding.set_meta("endpoints", endpoints) log.debug(f"Endpoints: {finding.get_meta('endpoints')}") scanner.findings.append(finding) except: # pylint: disable=W0702 log.exception("Failed to parse findings")
def parse_findings(data, scanner): """ Parse findings """ # Get deps deps = get_dependencies( scanner.config.get("code"), scanner.config.get("add_devdep", False) ) # Parse JSON using legacy parser findings = NpmScanParser(data, deps).items # Make finding instances for item in findings: finding = SastFinding( title=item["title"], description=[ "\n\n".join([ item['description'], f"**URL:** {item['url']}", f"**CWE:** {markdown.markdown_escape(item['cwe'])}", f"**References:** {item['references']}", f"**File to review:** {item['file_path']}" ]) ] ) finding.set_meta("tool", scanner.get_name()) finding.set_meta("severity", constants.NPM_SEVERITY_MAPPING[item["severity"]]) finding.set_meta("legacy.file", item["file_path"]) endpoints = list() finding.set_meta("endpoints", endpoints) log.debug(f"Endpoints: {finding.get_meta('endpoints')}") scanner.findings.append(finding)
def parse_findings(output_file, scanner): # pylint: disable=E,W,R,C """ Parse findings (code from dusty 1.0) """ # Parse HTML report using legacy parser filtered_statuses = scanner.config.get( "filtered_statuses", constants.PTAI_DEFAULT_FILTERED_STATUSES) if isinstance(filtered_statuses, str): filtered_statuses = [ item.strip() for item in filtered_statuses.split(",") ] findings = PTAIScanParser(output_file, filtered_statuses).items for item in findings: finding = SastFinding( title=item["title"], description=[ html.escape( markdown.markdown_escape(item["description"].replace( " ", ""))) + f"\n\n**File to review:** {markdown.markdown_escape(item['file_path'])}" ] + [html.escape(data) for data in item["steps_to_reproduce"]]) finding.set_meta("tool", scanner.get_name()) finding.set_meta("severity", constants.PTAI_SEVERITIES[item["severity"]]) finding.set_meta("legacy.file", item["file_path"]) finding.set_meta( "endpoints", [namedtuple("Endpoint", ["raw"])(raw=item["file_path"])]) log.debug(f"Endpoints: {finding.get_meta('endpoints')}") scanner.findings.append(finding)
def parse_findings(filename, scanner): """ Parse findings """ # Parse JSON using legacy parser findings = DependencyCheckParser(filename).items # Make finding instances for item in findings: finding = SastFinding( title=item["title"], description=[ "\n\n".join([ "Vulnerable dependency was found. Please upgrade component or check that vulnerable functionality is not used.", # pylint: disable=C0301 markdown.markdown_escape(item["description"]), f"**File to review:** {markdown.markdown_escape(item['file_path'])}" ]) ] + item["steps_to_reproduce"]) finding.set_meta("tool", scanner.get_name()) finding.set_meta("severity", item["severity"]) finding.set_meta("legacy.file", item["file_path"]) endpoints = list() if item["file_path"]: endpoints.append( namedtuple("Endpoint", ["raw"])(raw=item["file_path"])) finding.set_meta("endpoints", endpoints) log.debug(f"Endpoints: {finding.get_meta('endpoints')}") scanner.findings.append(finding)
def parse_findings(filename, scanner): """ Parse findings """ # Load JSON try: with open(filename, "r") as file: data = json.load(file) except: # pylint: disable=W0702 log.exception("Failed to load report JSON") return # Load CWE map cwe_map = json.loads( pkg_resources.resource_string( "dusty", f"{'/'.join(__name__.split('.')[1:-1])}/data/cwe_map_v4.2.json")) # Parse JSON if not isinstance(data, dict) or "vulnerabilities" not in data: log.info("No data in report") return # Make finding instances for item in data["vulnerabilities"]: vuln_severity = cvss_to_severity(item.get("cvss", 0.0)) vuln_cwe = item.get("cwe", "Vulnerability") # vuln_cwe_title = cwe_map[vuln_cwe] if vuln_cwe in cwe_map else vuln_cwe vuln_file_title = f" in {item.get('classMessage')}" if "classMessage" in item else "" vuln_title = f"{vuln_cwe_title}{vuln_file_title}" # vuln_file = item.get("classMessage", "").rsplit(" (", 1)[0] # vuln_info_chunks = list() if "longMessage" in item: vuln_info_chunks.append( markdown.markdown_escape(item["longMessage"])) if "shortMessage" in item: vuln_info_chunks.append( markdown.markdown_escape(item["shortMessage"])) vuln_info_chunks.append( f"**Class:** {markdown.markdown_escape(item['classMessage'])}") vuln_info_chunks.append( f"**Method:** {markdown.markdown_escape(item['method'])}") if "affectedFiles" in item: vuln_info_chunks.append( f"**Files:** {markdown.markdown_escape(', '.join(item['affectedFiles']))}" ) # finding = SastFinding(title=vuln_title, description=["\n\n".join(vuln_info_chunks)]) finding.set_meta("tool", scanner.get_name()) finding.set_meta("severity", vuln_severity) finding.set_meta("legacy.file", vuln_file) endpoints = list() if vuln_file: endpoints.append(namedtuple("Endpoint", ["raw"])(raw=vuln_file)) finding.set_meta("endpoints", endpoints) log.debug(f"Endpoints: {finding.get_meta('endpoints')}") scanner.findings.append(finding)
def parse_findings(data, scanner): """ Parse findings """ # Parse JSON using legacy parser findings = GosecOutputParser(data).items # Make finding instances for item in findings: finding = SastFinding( title=item["title"], description=[ "\n\n".join([ item['description'], f"**File to review:** {markdown.markdown_escape(item['file_path'])}" ]) ] + item["steps_to_reproduce"] ) finding.set_meta("tool", scanner.get_name()) finding.set_meta("severity", constants.GOSEC_SEVERITY_MAPPING[item["severity"]]) finding.set_meta("legacy.file", item["file_path"]) finding.set_meta("endpoints", [namedtuple("Endpoint", ["raw"])(raw=item["file_path"])]) log.debug(f"Endpoints: {finding.get_meta('endpoints')}") scanner.findings.append(finding)
def parse_findings(data, scanner): """ Parse findings """ # Parse JSON using legacy parser findings = SafetyScanParser(data).items # Make finding instances for item in findings: finding = SastFinding( title=item["title"], description=[markdown.markdown_escape(item["description"])]) finding.set_meta("tool", scanner.get_name()) finding.set_meta("severity", "Medium") scanner.findings.append(finding)
def parse_findings(filename, scanner): """ Parse findings """ # Parse JSON using legacy parser findings = SpotbugsParser(filename).items # Make finding instances for item in findings: finding = SastFinding( title=item["title"], description=[ "\n\n".join([ item["description"], f"**Category:** {markdown.markdown_escape(item['category'])}", f"**File to review:** {markdown.markdown_escape(item['file_path'])}" \ f":{item['line']}" ]) ] + item["steps_to_reproduce"] ) finding.set_meta("tool", scanner.get_name()) finding.set_meta("severity", constants.SPOTBUGS_SEVERITIES[item["severity"]]) finding.set_meta("legacy.file", item["file_path"]) finding.set_meta("legacy.line", item["line"]) endpoints = list() if item["file_path"]: endpoints.append(namedtuple("Endpoint", ["raw"])(raw=item["file_path"])) finding.set_meta("endpoints", endpoints) log.debug(f"Endpoints: {finding.get_meta('endpoints')}") scanner.findings.append(finding)
def parse_findings(data, scanner): """ Parse findings """ # Parse JSON using legacy parser findings = BanditParser(data).items # Make finding instances for item in findings: finding = SastFinding( title=item["title"], description=[ "\n\n".join([ f"```\n{item['description']}\n```", f"**Mitigation:** {markdown.markdown_escape(item['mitigation'])}", f"**Impact:** {markdown.markdown_escape(item['impact'])}", f"**References:** {markdown.markdown_escape(item['references'])}", f"**File to review:** {markdown.markdown_escape(item['file_path'])}" \ f":{item['line']}" ]) ] ) # Better bandit finding titles/descriptions database = json.load( pkg_resources.resource_stream( "dusty", f"{'/'.join(__name__.split('.')[1:-1])}/data/findings.json")) if item["bandit_id"] in database: db_item = database[item["bandit_id"]] finding.set_meta("rewrite_title_to", db_item["title"]) if db_item.get("description", None): finding.description[0] = "\n\n".join([ markdown.markdown_escape(db_item["description"]), finding.description[0] ]) # Other meta finding.set_meta("tool", scanner.get_name()) finding.set_meta("severity", constants.BANDIT_SEVERITIES[item["severity"]]) finding.set_meta("legacy.file", item["file_path"]) finding.set_meta("legacy.line", item["line"]) finding.set_meta( "endpoints", [namedtuple("Endpoint", ["raw"])(raw=item["file_path"])]) log.debug(f"Endpoints: {finding.get_meta('endpoints')}") scanner.findings.append(finding)
def parse_findings(result, scanner): """ Parse findings """ # Parse JSON using legacy parser findings = NodeJsScanParser(result).items # Make finding instances for item in findings: finding = SastFinding( title=item["title"], description=[ "\n\n".join([ markdown.markdown_escape(item['description']), f"**File to review:** {markdown.markdown_escape(item['file_path'])}" \ f":{item['line']}" ]) ] + item["steps_to_reproduce"] ) finding.set_meta("tool", scanner.get_name()) finding.set_meta("severity", item["severity"]) finding.set_meta("legacy.file", item["file_path"]) finding.set_meta("legacy.line", item["line"]) endpoints = list() if item["file_path"]: endpoints.append( namedtuple("Endpoint", ["raw"])(raw=item["file_path"])) finding.set_meta("endpoints", endpoints) log.debug(f"Endpoints: {finding.get_meta('endpoints')}") scanner.findings.append(finding)
def parse_findings(filename, scanner): """ Parse findings """ # Load JSON try: with open(filename, "r") as file: data = json.load(file) except: # pylint: disable=W0702 log.exception("Failed to load report JSON") return # Parse JSON if not isinstance(data, dict) or "results" not in data: log.info("No data in report") return # Make finding instances for item in data["results"]: vuln_data = item.get("extra", dict()) vuln_meta = vuln_data.get("metadata", dict()) # vuln_rule = item["check_id"] vuln_file = item["path"] vuln_info = vuln_data.get("message", "") vuln_severity = map_severity(vuln_data.get("severity", "")) # vuln_cwe_owasp_title = vuln_meta.get("cwe", "") if not vuln_cwe_owasp_title: vuln_cwe_owasp_title = vuln_meta.get("owasp", "") if not vuln_cwe_owasp_title: vuln_cwe_owasp_title = "Vulnerability" # vuln_title = f"{vuln_cwe_owasp_title} in {vuln_file}" # vuln_info_chunks = list() if vuln_info: vuln_info_chunks.append(markdown.markdown_escape(vuln_info)) vuln_info_chunks.append( f"**Rule:** {markdown.markdown_escape(vuln_rule)}") if "source-rule-url" in vuln_meta: vuln_info_chunks.append( f"**Rule source:** {markdown.markdown_escape(vuln_meta['source-rule-url'])}" ) if "cwe" in vuln_meta: vuln_info_chunks.append( f"**CWE:** {markdown.markdown_escape(vuln_meta['cwe'])}") if "owasp" in vuln_meta: vuln_info_chunks.append( f"**OWASP:** {markdown.markdown_escape(vuln_meta['owasp'])}") vuln_info_chunks.append( f"**File:** {markdown.markdown_escape(vuln_file)}") if "start" in item and "line" in item["start"]: vuln_info_chunks.append( f"**Start line:** {markdown.markdown_escape(str(item['start']['line']))}" ) if "end" in item and "line" in item["end"]: vuln_info_chunks.append( f"**End line:** {markdown.markdown_escape(str(item['end']['line']))}" ) if "lines" in vuln_data: vuln_info_chunks.append( f"**Lines:** {markdown.markdown_escape(vuln_data['lines'])}") # finding = SastFinding(title=vuln_title, description=["\n\n".join(vuln_info_chunks)]) finding.set_meta("tool", scanner.get_name()) finding.set_meta("severity", vuln_severity) finding.set_meta("legacy.file", vuln_file) endpoints = list() if vuln_file: endpoints.append(namedtuple("Endpoint", ["raw"])(raw=vuln_file)) finding.set_meta("endpoints", endpoints) log.debug(f"Endpoints: {finding.get_meta('endpoints')}") scanner.findings.append(finding)
def parse_findings(data, scanner): """ Parse findings """ # Parse JSON using legacy parser try: findings = BrakemanParser(data).items except: # pylint: disable=W0702 log.exception("Failed to parse brakeman report") log.warning( "Possibly ruby code path is invalid or not Ruby-on-Rails application" ) findings = list() # Make finding instances for item in findings: finding = SastFinding( title=item["title"], description=[ "\n\n".join([ markdown.markdown_escape(item['description']), f"**References:** {markdown.markdown_escape(item['references'])}", f"**File to review:** {markdown.markdown_escape(item['file_path'])}" \ f":{item['line']}" ]) ] ) finding.set_meta("tool", scanner.get_name()) finding.set_meta("severity", constants.BRAKEMAN_SEVERITY_MAPPING[item["severity"]]) finding.set_meta("legacy.file", item["file_path"]) finding.set_meta("legacy.line", item["line"]) finding.set_meta( "endpoints", [namedtuple("Endpoint", ["raw"])(raw=item["file_path"])]) log.debug(f"Endpoints: {finding.get_meta('endpoints')}") scanner.findings.append(finding)
def parse_findings(filename, scanner): """ Parse findings """ # Load JSON try: with open(filename, "r") as file: data = json.load(file) except: # pylint: disable=W0702 log.exception("Failed to load report JSON") return # Severity mapping severity_mapping = { "UNKNOWN": "Info", "LOW": "Low", "MEDIUM": "Medium", "HIGH": "High", "CRITICAL": "Critical", } # Parse JSON if not isinstance(data, list) or not data: log.info("No data in report") return # Make finding instances for data_block in data: if not data_block.get("Vulnerabilities", list()): log.info("Skipping empty data block: %s", data_block.get("Target", data_block)) continue for item in data_block.get("Vulnerabilities", list()): # vuln_id = item.get("VulnerabilityID", "") vuln_pkgname = item.get("PkgName", "") vuln_installed_version = item.get("InstalledVersion", "") vuln_fixed_version = item.get("FixedVersion", "") vuln_layer = item.get("Layer", dict()).get("DiffID", "") # vuln_title = item.get("Title", "-") if vuln_id: vuln_title = f"{vuln_id}: {vuln_title}" if vuln_pkgname: vuln_title = f"{vuln_pkgname}: {vuln_title}" # if not scanner.config.get("show_with_temp_id", False) and \ vuln_id.startswith("TEMP-"): log.info("Skipping finding with TEMP ID: %s", vuln_title) continue if not scanner.config.get("show_without_description", True) and \ "Description" not in item: log.info("Skipping finding without description: %s", vuln_title) continue # vuln_severity = severity_mapping[item.get("Severity", "UNKNOWN")] vuln_file = vuln_layer # vuln_info_chunks = list() # vuln_info_chunks.append( markdown.markdown_escape(item.get("Description", "-"))) # if vuln_id: vuln_info_chunks.append( f"**VulnerabilityID:** {markdown.markdown_escape(vuln_id)}" ) if vuln_pkgname: vuln_info_chunks.append( f"**PkgName:** {markdown.markdown_escape(vuln_pkgname)}") if vuln_installed_version: vuln_info_chunks.append( f"**InstalledVersion:** {markdown.markdown_escape(vuln_installed_version)}" ) if vuln_fixed_version: vuln_info_chunks.append( f"**FixedVersion:** {markdown.markdown_escape(vuln_fixed_version)}" ) if vuln_layer: vuln_info_chunks.append( f"**Layer DiffID:** {markdown.markdown_escape(vuln_layer)}" ) # vuln_refs = item.get("References", list()) if vuln_refs: vuln_info_chunks.append("**References:**") for vuln_ref in vuln_refs: vuln_info_chunks.append(markdown.markdown_escape(vuln_ref)) # finding = SastFinding(title=vuln_title, description=["\n\n".join(vuln_info_chunks)]) finding.set_meta("tool", scanner.get_name()) finding.set_meta("severity", vuln_severity) finding.set_meta("legacy.file", vuln_file) endpoints = list() if vuln_file: endpoints.append( namedtuple("Endpoint", ["raw"])(raw=vuln_file)) finding.set_meta("endpoints", endpoints) log.debug(f"Endpoints: {finding.get_meta('endpoints')}") scanner.findings.append(finding)