def test_create_result(): issue = issueLib.issue_from_dict({ "description": "MD5 is a a weak hash which is known to have collision. Use a strong hashing function.", "filename": "InsufficientPasswordHash.js", "line": 3, "lines": 'function hashPassword(password) {\n var crypto = require("crypto");\n var hasher = crypto.createHash(\'md5\');\n var hashed = hasher.update(password).digest("hex"); // BAD\n return hashed;\n}', "path": "/app/src/CWE-916/examples/InsufficientPasswordHash.js", "sha2": "bfc3a2dfec54a8e77e41c3e3d7a6d87477ea1ed6d1cb3b1b60b8e135b0d18368", "tag": "node", "title": "Weak Hash used - MD5", }) data = convertLib.create_result("nodetest", issue, {}, {}, None, "/app/src") assert (data.locations[0].physical_location.artifact_location.uri == "file:///app/src/CWE-916/examples/InsufficientPasswordHash.js") # Override the workspace and check the location os.environ["WORKSPACE"] = "/foo/bar" importlib.reload(convertLib) data = convertLib.create_result("nodetest", issue, {}, {}, None, "/app/src") assert (data.locations[0].physical_location.artifact_location.uri == "file:///foo/bar/CWE-916/examples/InsufficientPasswordHash.js") # Override the workspace and check the location os.environ[ "WORKSPACE"] = "https://github.com/ShiftLeftSecurity/cdxgen/blob/master" importlib.reload(convertLib) data = convertLib.create_result("nodetest", issue, {}, {}, None, "/app/src") assert ( data.locations[0].physical_location.artifact_location.uri == "https://github.com/ShiftLeftSecurity/cdxgen/blob/master/CWE-916/examples/InsufficientPasswordHash.js" )
def create_result(issue, rules, rule_indices, file_path_list=None): """Method to convert a single issue into result schema with rules :param issue: Issues object :param rules: List of rules :param rule_indices: Indices of referred rules :param file_path_list: Full file path for any manipulation """ if isinstance(issue, dict): issue = issue_from_dict(issue) issue_dict = issue.as_dict() rule, rule_index = create_or_find_rule(issue_dict, rules, rule_indices) physical_location = om.PhysicalLocation( artifact_location=om.ArtifactLocation( uri=to_uri(issue_dict["filename"]))) add_region_and_context_region(physical_location, issue_dict["line_number"], issue_dict["code"]) return om.Result( rule_id=rule.id, rule_index=rule_index, message=om.Message(text=issue_dict["issue_text"]), level=level_from_severity(issue_dict["issue_severity"]), locations=[om.Location(physical_location=physical_location)], properties={ "issue_confidence": issue_dict["issue_confidence"], "issue_severity": issue_dict["issue_severity"], }, )
def create_result(tool_name, issue, rules, rule_indices, file_path_list=None, working_dir=None): """Method to convert a single issue into result schema with rules :param tool_name: tool name :param issue: Issues object :param rules: List of rules :param rule_indices: Indices of referred rules :param file_path_list: Full file path for any manipulation :param working_dir: Working directory """ WORKSPACE_PREFIX = config.get("WORKSPACE", None) if isinstance(issue, dict): issue = issue_from_dict(issue) issue_dict = issue.as_dict() rule, rule_index = create_or_find_rule(tool_name, issue_dict, rules, rule_indices) # Substitute workspace prefix # Override file path prefix with workspace filename = issue_dict["filename"] if working_dir: # Issue 5 fix. Convert relative to full path automatically if not filename.startswith(working_dir): filename = os.path.join(working_dir, filename) if WORKSPACE_PREFIX: filename = re.sub(r"^" + working_dir, WORKSPACE_PREFIX, filename) physical_location = om.PhysicalLocation( artifact_location=om.ArtifactLocation(uri=to_uri(filename))) add_region_and_context_region(physical_location, issue_dict["line_number"], issue_dict["code"]) issue_severity = issue_dict["issue_severity"] issue_severity = tweak_severity(tool_name, issue_severity) return om.Result( rule_id=rule.id, rule_index=rule_index, message=om.Message(text=issue_dict["issue_text"]), level=level_from_severity(issue_severity), locations=[om.Location(physical_location=physical_location)], properties={ "issue_confidence": issue_dict["issue_confidence"], "issue_severity": issue_severity, }, hosted_viewer_uri=config.get("hosted_viewer_uri", ""), )
def add_results(tool_name, issues, run, file_path_list=None, working_dir=None): """Method to convert issues into results schema :param tool_name: tool name :param issues: Issues found :param run: Run object :param file_path_list: Full file path for any manipulation :param working_dir: Working directory """ if run.results is None: run.results = [] rules = {} rule_indices = {} # Populate metrics metrics = { "total": 0, "critical": 0, "high": 0, "medium": 0, "low": 0, } total = 0 for issue in issues: result = create_result( tool_name, issue, rules, rule_indices, file_path_list, working_dir ) if result: run.results.append(result) issue_dict = issue_from_dict(issue).as_dict() rule_id = issue_dict.get("test_id") # Is this rule ignored globally? if rule_id in config.ignored_rules: continue total += 1 issue_severity = issue_dict["issue_severity"] # Fix up severity for certain tools issue_severity = tweak_severity(tool_name, issue_dict) key = issue_severity.lower() if not metrics.get(key): metrics[key] = 0 metrics[key] += 1 if len(rules) > 0: run.tool.driver.rules = list(rules.values()) metrics["total"] = total run.properties = {"metrics": metrics}
def test_create_result_relative(): os.environ["WORKSPACE"] = "" importlib.reload(convertLib) issue = issueLib.issue_from_dict({ "line": "VERY_REDACTED ", "offender": "REDACTED", "commit": "06fd7b1f844f88fb7821df498ce6d209cb9ad875", "repo": "app", "rule": "Generic Credential", "commitMessage": "Add secret\n", "author": "Team ShiftLeft", "email": "*****@*****.**", "file": "src/main/README-new.md", "date": "2020-01-12T19:45:43Z", "tags": "key, API, generic", }) data = convertLib.create_result("gitleaks", issue, {}, {}, None, "/app") assert (data.locations[0].physical_location.artifact_location.uri == "src/main/README-new.md")
def suppress_issues(issues): """Suppress issues based on the presence of certain tags and ignore logic :param issues: List of issues to be checked :return Filtered issues, Suppressed list """ suppress_list = [] filtered_list = [] supress_markers = config.get("suppress_markers", []) for issue in issues: suppressed = False issue_dict = issue_from_dict(issue).as_dict() rule_id = issue_dict.get("test_id") filename = issue_dict.get("filename") code = issue_dict.get("code", "").replace("\n", " ").replace("\t", " ") # Is this rule ignored globally? if rule_id in config.ignored_rules: suppressed = True # Is there an ignore marker if not suppressed and code: for marker in supress_markers: if marker in code: suppressed = True break if not suppressed and filename: if is_ignored_file(None, file_name=filename): suppressed = True else: for igdir in config.get("ignore_directories"): if filename.startswith(f"{igdir}/"): suppressed = True break if suppressed: suppress_list.append(issue) else: filtered_list.append(issue) return filtered_list, suppress_list
def create_result(tool_name, issue, rules, rule_indices, file_path_list, working_dir): """Method to convert a single issue into result schema with rules :param tool_name: tool name :param issue: Issues object :param rules: List of rules :param rule_indices: Indices of referred rules :param file_path_list: Full file path for any manipulation :param working_dir: Working directory """ WORKSPACE_PREFIX = config.get("WORKSPACE", None) if isinstance(issue, dict): issue = issue_from_dict(issue) issue_dict = issue.as_dict() rule_id = issue_dict.get("test_id") # Is this rule ignored globally? if rule_id in config.ignored_rules: return None rule, rule_index = create_or_find_rule(tool_name, issue_dict, rules, rule_indices) # Substitute workspace prefix # Override file path prefix with workspace filename = issue_dict["filename"] if working_dir: # Issue 5 fix. Convert relative to full path automatically # Convert to full path only if the user wants if WORKSPACE_PREFIX is None and not filename.startswith(working_dir): filename = os.path.join(working_dir, filename) if WORKSPACE_PREFIX is not None: # Make it relative path if WORKSPACE_PREFIX == "": filename = re.sub(r"^" + working_dir + "/", WORKSPACE_PREFIX, filename) elif not filename.startswith(working_dir): filename = os.path.join(WORKSPACE_PREFIX, filename) else: filename = re.sub(r"^" + working_dir, WORKSPACE_PREFIX, filename) physical_location = om.PhysicalLocation( artifact_location=om.ArtifactLocation(uri=to_uri(filename)) ) add_region_and_context_region( physical_location, issue_dict["line_number"], issue_dict["code"] ) issue_severity = tweak_severity(tool_name, issue_dict) fingerprint = {} if physical_location.region and physical_location.region.snippet.text: snippet = physical_location.region.snippet.text snippet = snippet.strip().replace("\t", "").replace("\n", "") fingerprint = { "scanPrimaryLocationHash": to_fingerprint_hash(snippet, HASH_DIGEST_SIZE) } if issue_dict.get("tags"): tag_str = "" for tk, tv in issue_dict.get("tags", {}).items(): tag_str += tv if tag_str: fingerprint["scanTagsHash"] = to_fingerprint_hash(tag_str, HASH_DIGEST_SIZE) # Filename hash fingerprint["scanFileHash"] = to_fingerprint_hash(filename, HASH_DIGEST_SIZE) # Should we suppress this fingerprint? if should_suppress_fingerprint(fingerprint, working_dir): return None return om.Result( rule_id=rule.id, rule_index=rule_index, message=om.Message( text=issue_dict["issue_text"], markdown=issue_dict["issue_text"] if tool_name == "ng-sast" else "", ), level=level_from_severity(issue_severity), locations=[om.Location(physical_location=physical_location)], partial_fingerprints=fingerprint, properties={ "issue_confidence": issue_dict["issue_confidence"], "issue_severity": issue_severity, "issue_tags": issue_dict.get("tags", {}), }, baseline_state="unchanged" if issue_dict["first_found"] else "new", )
def create_result(tool_name, issue, rules, rule_indices, file_path_list=None, working_dir=None): """Method to convert a single issue into result schema with rules :param tool_name: tool name :param issue: Issues object :param rules: List of rules :param rule_indices: Indices of referred rules :param file_path_list: Full file path for any manipulation :param working_dir: Working directory """ WORKSPACE_PREFIX = config.get("WORKSPACE", None) if isinstance(issue, dict): issue = issue_from_dict(issue) issue_dict = issue.as_dict() rule, rule_index = create_or_find_rule(tool_name, issue_dict, rules, rule_indices) # Substitute workspace prefix # Override file path prefix with workspace filename = issue_dict["filename"] if working_dir: # Issue 5 fix. Convert relative to full path automatically # Convert to full path only if the user wants if WORKSPACE_PREFIX is None and not filename.startswith(working_dir): filename = os.path.join(working_dir, filename) if WORKSPACE_PREFIX is not None: # Make it relative path if WORKSPACE_PREFIX == "": filename = re.sub(r"^" + working_dir + "/", WORKSPACE_PREFIX, filename) else: filename = re.sub(r"^" + working_dir, WORKSPACE_PREFIX, filename) physical_location = om.PhysicalLocation( artifact_location=om.ArtifactLocation(uri=to_uri(filename))) add_region_and_context_region(physical_location, issue_dict["line_number"], issue_dict["code"]) issue_severity = tweak_severity(tool_name, issue_dict["issue_severity"]) fingerprint = {} """ if physical_location.region and physical_location.region.snippet.text: snippet = physical_location.region.snippet.text snippet = snippet.strip().replace("\t", "").replace("\n", "") h = blake2b(digest_size=HASH_DIGEST_SIZE) h.update(snippet.encode()) fingerprint = {"primaryLocationLineHash": h.hexdigest() + ":1"} """ return om.Result( rule_id=rule.id, rule_index=rule_index, message=om.Message( text=issue_dict["issue_text"], markdown=issue_dict["issue_text"] if tool_name == "inspect" else "", ), level=level_from_severity(issue_severity), locations=[om.Location(physical_location=physical_location)], partial_fingerprints=fingerprint, properties={ "issue_confidence": issue_dict["issue_confidence"], "issue_severity": issue_severity, }, )
def report( tool_name, tool_args, working_dir, metrics, skips, issues, crep_fname, file_path_list=None, ): """Prints issues in SARIF format :param tool_name: tool name :param tool_args: Args used for the tool :param working_dir: Working directory :param metrics: metrics data :param skips: skips data :param issues: issues data :param crep_fname: The output file name :param file_path_list: Full file path for any manipulation :return serialized_log: SARIF output data """ if not tool_args: tool_args = [] tool_args_str = tool_args if isinstance(tool_args, list): tool_args_str = " ".join(tool_args) repo_details = find_repo_details(working_dir) log_uuid = str(uuid.uuid4()) run_uuid = config.get("run_uuid") # Populate metrics metrics = { "total": 0, "critical": 0, "high": 0, "medium": 0, "low": 0, } metrics["total"] = len(issues) for issue in issues: issue_dict = issue_from_dict(issue).as_dict() issue_severity = issue_dict["issue_severity"] # Fix up severity for certain tools issue_severity = tweak_severity(tool_name, issue_severity) key = issue_severity.lower() if not metrics.get(key): metrics[key] = 0 metrics[key] += 1 # working directory to use in the log WORKSPACE_PREFIX = config.get("WORKSPACE", None) wd_dir_log = WORKSPACE_PREFIX if WORKSPACE_PREFIX is not None else working_dir driver_name = config.tool_purpose_message.get(tool_name, tool_name) if config.get("CI") or config.get("GITHUB_ACTIONS"): driver_name = "ShiftLeft " + driver_name # Construct SARIF log log = om.SarifLog( schema_uri= "https://raw.githubusercontent.com/oasis-tcs/sarif-spec/master/Schemata/sarif-schema-2.1.0.json", version="2.1.0", inline_external_properties=[ om.ExternalProperties(guid=log_uuid, run_guid=run_uuid) ], runs=[ om.Run( automation_details=om.RunAutomationDetails( guid=log_uuid, description=om.Message( text= "Static Analysis Security Test results using @ShiftLeft/sast-scan" ), ), tool=om.Tool(driver=om.ToolComponent(name=driver_name)), invocations=[ om.Invocation( end_time_utc=datetime.datetime.utcnow().strftime( TS_FORMAT), execution_successful=True, working_directory=om.ArtifactLocation( uri=to_uri(wd_dir_log)), ) ], conversion={ "tool": om.Tool(driver=om.ToolComponent( name="@ShiftLeft/sast-scan")), "invocation": om.Invocation( execution_successful=True, command_line=tool_args_str, arguments=tool_args, working_directory=om.ArtifactLocation( uri=to_uri(wd_dir_log)), end_time_utc=datetime.datetime.utcnow().strftime( TS_FORMAT), ), }, properties={"metrics": metrics}, version_control_provenance=[ om.VersionControlDetails( repository_uri=repo_details["repositoryUri"], branch=repo_details["branch"], revision_id=repo_details["revisionId"], ) ], ) ], ) run = log.runs[0] invocation = run.invocations[0] add_skipped_file_notifications(skips, invocation) add_results(tool_name, issues, run, file_path_list, working_dir) serialized_log = to_json(log) if crep_fname: html_file = crep_fname.replace(".sarif", ".html") with io.open(crep_fname, "w") as fileobj: fileobj.write(serialized_log) render_html(json.loads(serialized_log), html_file) if fileobj.name != sys.stdout.name: LOG.debug( "SARIF and HTML report written to file: %s, %s 👍", fileobj.name, html_file, ) return serialized_log