def create_result(path, rule_id, issue_dict, rules, rule_indices): if rule_id in rules: rule = rules[rule_id] rule_index = rule_indices[rule_id] else: doc = issue_dict['metadata'].get('reference') if not doc: doc = ('https://mobile-security.gitbook.io/' 'mobile-security-testing-guide/') rule = om.ReportingDescriptor( id=rule_id, name=get_rule_name(rule_id), help_uri=doc, ) rule_index = len(rules) rules[rule_id] = rule rule_indices[rule_id] = rule_index locations = [] for item in issue_dict.get('files', []): physical_location = om.PhysicalLocation( artifact_location=om.ArtifactLocation( uri=to_uri(item['file_path'])), ) physical_location.region = om.Region( start_line=item['match_lines'][0], end_line=item['match_lines'][1], start_column=item['match_position'][0], end_column=item['match_position'][1], snippet=om.ArtifactContent(text=item['match_string']), ) locations.append(om.Location(physical_location=physical_location)) if not locations: artifact = om.PhysicalLocation( artifact_location=om.ArtifactLocation( uri=path[0]), ) artifact.region = om.Region( start_line=1, end_line=1, start_column=1, end_column=1, snippet=om.ArtifactContent(text='Missing Best Practice'), ) locations.append(om.Location(physical_location=artifact)) return om.Result( rule_id=rule.id, rule_index=rule_index, message=om.Message(text=issue_dict['metadata']['description']), level=level_from_severity(issue_dict['metadata']['severity']), locations=locations, properties={ 'owasp-mobile': issue_dict['metadata']['owasp-mobile'], 'masvs': issue_dict['metadata']['masvs'], 'cwe': issue_dict['metadata']['cwe'], 'reference': issue_dict['metadata']['reference'], }, )
def add_skipped_file_notifications(skips, invocation): """Method to add skipped files details to the output :param skips: List of files skipped by the tool :param invocation: Invocation object for the given run """ if skips is None or len(skips) == 0: return if invocation.tool_configuration_notifications is None: invocation.tool_configuration_notifications = [] for skip in skips: (file_name, reason) = skip notification = om.Notification( level="error", message=om.Message(text=reason), locations=[ om.Location( physical_location=om.PhysicalLocation( artifact_location=om.ArtifactLocation(uri=to_uri(file_name)) ) ) ], ) invocation.tool_configuration_notifications.append(notification)
def create_result(issue, rules, rule_indices, file_path_list=None): """Method to convert a single issue into result schema with rules :param issue: Issues object :param rules: List of rules :param rule_indices: Indices of referred rules :param file_path_list: Full file path for any manipulation """ if isinstance(issue, dict): issue = issue_from_dict(issue) issue_dict = issue.as_dict() rule, rule_index = create_or_find_rule(issue_dict, rules, rule_indices) physical_location = om.PhysicalLocation( artifact_location=om.ArtifactLocation( uri=to_uri(issue_dict["filename"]))) add_region_and_context_region(physical_location, issue_dict["line_number"], issue_dict["code"]) return om.Result( rule_id=rule.id, rule_index=rule_index, message=om.Message(text=issue_dict["issue_text"]), level=level_from_severity(issue_dict["issue_severity"]), locations=[om.Location(physical_location=physical_location)], properties={ "issue_confidence": issue_dict["issue_confidence"], "issue_severity": issue_dict["issue_severity"], }, )
def create_result(tool_name, issue, rules, rule_indices, file_path_list=None, working_dir=None): """Method to convert a single issue into result schema with rules :param tool_name: tool name :param issue: Issues object :param rules: List of rules :param rule_indices: Indices of referred rules :param file_path_list: Full file path for any manipulation :param working_dir: Working directory """ WORKSPACE_PREFIX = config.get("WORKSPACE", None) if isinstance(issue, dict): issue = issue_from_dict(issue) issue_dict = issue.as_dict() rule, rule_index = create_or_find_rule(tool_name, issue_dict, rules, rule_indices) # Substitute workspace prefix # Override file path prefix with workspace filename = issue_dict["filename"] if working_dir: # Issue 5 fix. Convert relative to full path automatically if not filename.startswith(working_dir): filename = os.path.join(working_dir, filename) if WORKSPACE_PREFIX: filename = re.sub(r"^" + working_dir, WORKSPACE_PREFIX, filename) physical_location = om.PhysicalLocation( artifact_location=om.ArtifactLocation(uri=to_uri(filename))) add_region_and_context_region(physical_location, issue_dict["line_number"], issue_dict["code"]) issue_severity = issue_dict["issue_severity"] issue_severity = tweak_severity(tool_name, issue_severity) return om.Result( rule_id=rule.id, rule_index=rule_index, message=om.Message(text=issue_dict["issue_text"]), level=level_from_severity(issue_severity), locations=[om.Location(physical_location=physical_location)], properties={ "issue_confidence": issue_dict["issue_confidence"], "issue_severity": issue_severity, }, hosted_viewer_uri=config.get("hosted_viewer_uri", ""), )
def create_result(rule_id, issue_dict, rules, rule_indices): if rule_id in rules: rule = rules[rule_id] rule_index = rule_indices[rule_id] else: doc = 'https://ajinabraham.github.io/nodejsscan/#{}'.format(rule_id) rule = om.ReportingDescriptor( id=rule_id, name=get_rule_name(rule_id), help_uri=doc, ) rule_index = len(rules) rules[rule_id] = rule rule_indices[rule_id] = rule_index locations = [] for item in issue_dict['files']: physical_location = om.PhysicalLocation( artifact_location=om.ArtifactLocation( uri=to_uri(item['file_path'])), ) physical_location.region = om.Region( start_line=item['match_lines'][0], end_line=item['match_lines'][1], start_column=item['match_position'][0], end_column=item['match_position'][1], snippet=om.ArtifactContent(text=item['match_string']), ) locations.append(om.Location(physical_location=physical_location)) return om.Result( rule_id=rule.id, rule_index=rule_index, message=om.Message(text=issue_dict['metadata']['description']), level=level_from_severity(issue_dict['metadata']['severity']), locations=locations, properties={ 'owasp-web': issue_dict['metadata']['owasp-web'], 'cwe': issue_dict['metadata']['cwe'], }, )
def add_skipped_file_notifications(skips, invocation): if skips is None or len(skips) == 0: return if invocation.tool_configuration_notifications is None: invocation.tool_configuration_notifications = [] for skip in skips: (file_name, reason) = skip notification = om.Notification( level="error", message=om.Message(text=reason), locations=[ om.Location(physical_location=om.PhysicalLocation( artifact_location=om.ArtifactLocation( uri=to_uri(file_name)))) ], ) invocation.tool_configuration_notifications.append(notification)
def create_result(issue, rules, rule_indices): issue_dict = issue.as_dict() rule, rule_index = create_or_find_rule(issue_dict, rules, rule_indices) physical_location = om.PhysicalLocation( artifact_location=om.ArtifactLocation(uri=to_uri(issue_dict["filename"])) ) add_region_and_context_region( physical_location, issue_dict["line_number"], issue_dict["code"] ) return om.Result( rule_id=rule.id, rule_index=rule_index, message=om.Message(text=issue_dict["issue_text"]), level=level_from_severity(issue_dict["issue_severity"]), locations=[om.Location(physical_location=physical_location)], properties={ "issue_confidence": issue_dict["issue_confidence"], "issue_severity": issue_dict["issue_severity"], }, )
def create_result(tool_name, issue, rules, rule_indices, file_path_list, working_dir): """Method to convert a single issue into result schema with rules :param tool_name: tool name :param issue: Issues object :param rules: List of rules :param rule_indices: Indices of referred rules :param file_path_list: Full file path for any manipulation :param working_dir: Working directory """ WORKSPACE_PREFIX = config.get("WORKSPACE", None) if isinstance(issue, dict): issue = issue_from_dict(issue) issue_dict = issue.as_dict() rule_id = issue_dict.get("test_id") # Is this rule ignored globally? if rule_id in config.ignored_rules: return None rule, rule_index = create_or_find_rule(tool_name, issue_dict, rules, rule_indices) # Substitute workspace prefix # Override file path prefix with workspace filename = issue_dict["filename"] if working_dir: # Issue 5 fix. Convert relative to full path automatically # Convert to full path only if the user wants if WORKSPACE_PREFIX is None and not filename.startswith(working_dir): filename = os.path.join(working_dir, filename) if WORKSPACE_PREFIX is not None: # Make it relative path if WORKSPACE_PREFIX == "": filename = re.sub(r"^" + working_dir + "/", WORKSPACE_PREFIX, filename) elif not filename.startswith(working_dir): filename = os.path.join(WORKSPACE_PREFIX, filename) else: filename = re.sub(r"^" + working_dir, WORKSPACE_PREFIX, filename) physical_location = om.PhysicalLocation( artifact_location=om.ArtifactLocation(uri=to_uri(filename)) ) add_region_and_context_region( physical_location, issue_dict["line_number"], issue_dict["code"] ) issue_severity = tweak_severity(tool_name, issue_dict) fingerprint = {} if physical_location.region and physical_location.region.snippet.text: snippet = physical_location.region.snippet.text snippet = snippet.strip().replace("\t", "").replace("\n", "") fingerprint = { "scanPrimaryLocationHash": to_fingerprint_hash(snippet, HASH_DIGEST_SIZE) } if issue_dict.get("tags"): tag_str = "" for tk, tv in issue_dict.get("tags", {}).items(): tag_str += tv if tag_str: fingerprint["scanTagsHash"] = to_fingerprint_hash(tag_str, HASH_DIGEST_SIZE) # Filename hash fingerprint["scanFileHash"] = to_fingerprint_hash(filename, HASH_DIGEST_SIZE) # Should we suppress this fingerprint? if should_suppress_fingerprint(fingerprint, working_dir): return None return om.Result( rule_id=rule.id, rule_index=rule_index, message=om.Message( text=issue_dict["issue_text"], markdown=issue_dict["issue_text"] if tool_name == "ng-sast" else "", ), level=level_from_severity(issue_severity), locations=[om.Location(physical_location=physical_location)], partial_fingerprints=fingerprint, properties={ "issue_confidence": issue_dict["issue_confidence"], "issue_severity": issue_severity, "issue_tags": issue_dict.get("tags", {}), }, baseline_state="unchanged" if issue_dict["first_found"] else "new", )
def report( tool_name, tool_args, working_dir, metrics, skips, issues, crep_fname, file_path_list=None, ): """Prints issues in SARIF format :param tool_name: tool name :param tool_args: Args used for the tool :param working_dir: Working directory :param metrics: metrics data :param skips: skips data :param issues: issues data :param crep_fname: The output file name :param file_path_list: Full file path for any manipulation :return serialized_log: SARIF output data """ if not tool_args: tool_args = [] tool_args_str = tool_args if isinstance(tool_args, list): tool_args_str = " ".join(tool_args) repo_details = find_repo_details(working_dir) log_uuid = str(uuid.uuid4()) run_uuid = config.get("run_uuid") # working directory to use in the log WORKSPACE_PREFIX = config.get("WORKSPACE", None) wd_dir_log = WORKSPACE_PREFIX if WORKSPACE_PREFIX is not None else working_dir driver_name = config.tool_purpose_message.get(tool_name, tool_name) # Construct SARIF log log = om.SarifLog( schema_uri="https://raw.githubusercontent.com/oasis-tcs/sarif-spec/master/Schemata/sarif-schema-2.1.0.json", version="2.1.0", inline_external_properties=[ om.ExternalProperties(guid=log_uuid, run_guid=run_uuid) ], runs=[ om.Run( automation_details=om.RunAutomationDetails( guid=log_uuid, description=om.Message( text="Static Analysis Security Test results using @ShiftLeft/sast-scan" ), ), tool=om.Tool( driver=om.ToolComponent( name=driver_name, full_name=driver_name, version="1.0.0-scan" ) ), invocations=[ om.Invocation( end_time_utc=datetime.datetime.utcnow().strftime(TS_FORMAT), execution_successful=True, working_directory=om.ArtifactLocation(uri=to_uri(wd_dir_log)), ) ], conversion={ "tool": om.Tool( driver=om.ToolComponent(name="@ShiftLeft/sast-scan") ), "invocation": om.Invocation( execution_successful=True, command_line=tool_args_str, arguments=tool_args, working_directory=om.ArtifactLocation(uri=to_uri(wd_dir_log)), end_time_utc=datetime.datetime.utcnow().strftime(TS_FORMAT), ), }, version_control_provenance=[ om.VersionControlDetails( repository_uri=repo_details["repositoryUri"], branch=repo_details["branch"], revision_id=repo_details["revisionId"], ) ], ) ], ) run = log.runs[0] invocation = run.invocations[0] add_skipped_file_notifications(skips, invocation) add_results(tool_name, issues, run, file_path_list, working_dir) serialized_log = to_json(log) if crep_fname: html_file = crep_fname.replace(".sarif", ".html") with io.open(crep_fname, "w") as fileobj: fileobj.write(serialized_log) if tool_name != "empty-scan": render_html(json.loads(serialized_log), html_file) if fileobj.name != sys.stdout.name: LOG.debug( "SARIF and HTML report written to file: %s, %s :thumbsup:", fileobj.name, html_file, ) return serialized_log
def create_result(tool_name, issue, rules, rule_indices, file_path_list=None, working_dir=None): """Method to convert a single issue into result schema with rules :param tool_name: tool name :param issue: Issues object :param rules: List of rules :param rule_indices: Indices of referred rules :param file_path_list: Full file path for any manipulation :param working_dir: Working directory """ WORKSPACE_PREFIX = config.get("WORKSPACE", None) if isinstance(issue, dict): issue = issue_from_dict(issue) issue_dict = issue.as_dict() rule, rule_index = create_or_find_rule(tool_name, issue_dict, rules, rule_indices) # Substitute workspace prefix # Override file path prefix with workspace filename = issue_dict["filename"] if working_dir: # Issue 5 fix. Convert relative to full path automatically # Convert to full path only if the user wants if WORKSPACE_PREFIX is None and not filename.startswith(working_dir): filename = os.path.join(working_dir, filename) if WORKSPACE_PREFIX is not None: # Make it relative path if WORKSPACE_PREFIX == "": filename = re.sub(r"^" + working_dir + "/", WORKSPACE_PREFIX, filename) else: filename = re.sub(r"^" + working_dir, WORKSPACE_PREFIX, filename) physical_location = om.PhysicalLocation( artifact_location=om.ArtifactLocation(uri=to_uri(filename))) add_region_and_context_region(physical_location, issue_dict["line_number"], issue_dict["code"]) issue_severity = tweak_severity(tool_name, issue_dict["issue_severity"]) fingerprint = {} """ if physical_location.region and physical_location.region.snippet.text: snippet = physical_location.region.snippet.text snippet = snippet.strip().replace("\t", "").replace("\n", "") h = blake2b(digest_size=HASH_DIGEST_SIZE) h.update(snippet.encode()) fingerprint = {"primaryLocationLineHash": h.hexdigest() + ":1"} """ return om.Result( rule_id=rule.id, rule_index=rule_index, message=om.Message( text=issue_dict["issue_text"], markdown=issue_dict["issue_text"] if tool_name == "inspect" else "", ), level=level_from_severity(issue_severity), locations=[om.Location(physical_location=physical_location)], partial_fingerprints=fingerprint, properties={ "issue_confidence": issue_dict["issue_confidence"], "issue_severity": issue_severity, }, )
def report( tool_name, tool_args, working_dir, metrics, skips, issues, crep_fname, file_path_list=None, ): """Prints issues in SARIF format :param tool_name: tool name :param tool_args: Args used for the tool :param working_dir: Working directory :param metrics: metrics data :param skips: skips data :param issues: issues data :param crep_fname: The output file name :param file_path_list: Full file path for any manipulation :return serialized_log: SARIF output data """ if not tool_args: tool_args = [] tool_args_str = tool_args if isinstance(tool_args, list): tool_args_str = " ".join(tool_args) repo_details = find_repo_details(working_dir) log_uuid = str(uuid.uuid4()) run_uuid = config.get("run_uuid") # Populate metrics metrics = { "total": 0, "critical": 0, "high": 0, "medium": 0, "low": 0, } total = 0 for issue in issues: issue_dict = issue_from_dict(issue).as_dict() rule_id = issue_dict.get("test_id") # Is this rule ignored globally? if rule_id in config.ignored_rules: continue total += 1 issue_severity = issue_dict["issue_severity"] # Fix up severity for certain tools issue_severity = tweak_severity(tool_name, issue_dict) key = issue_severity.lower() if not metrics.get(key): metrics[key] = 0 metrics[key] += 1 metrics["total"] = total # working directory to use in the log WORKSPACE_PREFIX = config.get("WORKSPACE", None) wd_dir_log = WORKSPACE_PREFIX if WORKSPACE_PREFIX is not None else working_dir driver_name = config.tool_purpose_message.get(tool_name, tool_name) if tool_name != "inspect" and config.get("CI") or config.get( "GITHUB_ACTIONS"): driver_name = "ShiftLeft " + driver_name # Construct SARIF log log = om.SarifLog( schema_uri= "https://raw.githubusercontent.com/oasis-tcs/sarif-spec/master/Schemata/sarif-schema-2.1.0.json", version="2.1.0", inline_external_properties=[ om.ExternalProperties(guid=log_uuid, run_guid=run_uuid) ], runs=[ om.Run( automation_details=om.RunAutomationDetails( guid=log_uuid, description=om.Message( text= "Static Analysis Security Test results using @ShiftLeft/sast-scan" ), ), tool=om.Tool(driver=om.ToolComponent(name=driver_name)), invocations=[ om.Invocation( end_time_utc=datetime.datetime.utcnow().strftime( TS_FORMAT), execution_successful=True, working_directory=om.ArtifactLocation( uri=to_uri(wd_dir_log)), ) ], conversion={ "tool": om.Tool(driver=om.ToolComponent( name="@ShiftLeft/sast-scan")), "invocation": om.Invocation( execution_successful=True, command_line=tool_args_str, arguments=tool_args, working_directory=om.ArtifactLocation( uri=to_uri(wd_dir_log)), end_time_utc=datetime.datetime.utcnow().strftime( TS_FORMAT), ), }, properties={"metrics": metrics}, version_control_provenance=[ om.VersionControlDetails( repository_uri=repo_details["repositoryUri"], branch=repo_details["branch"], revision_id=repo_details["revisionId"], ) ], ) ], ) run = log.runs[0] invocation = run.invocations[0] add_skipped_file_notifications(skips, invocation) add_results(tool_name, issues, run, file_path_list, working_dir) serialized_log = to_json(log) if crep_fname: html_file = crep_fname.replace(".sarif", ".html") with io.open(crep_fname, "w") as fileobj: fileobj.write(serialized_log) render_html(json.loads(serialized_log), html_file) if fileobj.name != sys.stdout.name: LOG.debug( "SARIF and HTML report written to file: %s, %s 👍", fileobj.name, html_file, ) return serialized_log
def print_matches(self, matches, rules=None, filenames=None): """Output all the matches""" if not rules: rules = RulesCollection() # These "base" rules are not passed into formatters rules.extend([ParseError(), TransformError(), RuleError()]) results = [] for match in matches: results.append( sarif.Result( rule_id=match.rule.id, message=sarif.Message(text=match.message), level=self._to_sarif_level(match.rule.severity), locations=[ sarif.Location( physical_location=sarif.PhysicalLocation( artifact_location=sarif.ArtifactLocation( uri=match.filename, uri_base_id=self.uri_base_id, ), region=sarif.Region( start_column=match.columnnumber, start_line=match.linenumber, end_column=match.columnnumberend, end_line=match.linenumberend, ), )) ], )) # Output only the rules that have matches matched_rules = set(r.rule_id for r in results) rules_map = {r.id: r for r in list(rules)} rules = [ sarif.ReportingDescriptor( id=rule_id, short_description=sarif.MultiformatMessageString( text=rules_map[rule_id].shortdesc), full_description=sarif.MultiformatMessageString( text=rules_map[rule_id].description), help_uri=rules_map[rule_id].source_url if rules_map[rule_id] else None) for rule_id in matched_rules ] run = sarif.Run( tool=sarif.Tool(driver=sarif.ToolComponent( name='cfn-lint', short_description=sarif.MultiformatMessageString( text=('Validates AWS CloudFormation templates against' ' the resource specification and additional' ' checks.')), information_uri= 'https://github.com/aws-cloudformation/cfn-lint', rules=rules, version=cfnlint.version.__version__, ), ), original_uri_base_ids={ self.uri_base_id: sarif.ArtifactLocation( description=sarif.MultiformatMessageString( 'The directory in which cfn-lint was run.')) }, results=results, ) log = sarif.SarifLog(version=self.version, schema_uri=self.schema, runs=[run]) # IMPORTANT: 'warning' is the default level in SARIF and will be # stripped by serialization. return to_json(log)
def report( tool_name, tool_args, working_dir, metrics, skips, issues, crep_fname, file_path_list=None, ): """Prints issues in SARIF format :param tool_name: tool name :param tool_args: Args used for the tool :param working_dir: Working directory :param metrics: metrics data :param skips: skips data :param issues: issues data :param crep_fname: The output file name :param file_path_list: Full file path for any manipulation :return serialized_log: SARIF output data """ if not tool_args: tool_args = [] tool_args_str = tool_args if isinstance(tool_args, list): tool_args_str = " ".join(tool_args) repo_details = find_repo_details(working_dir) log_uuid = str(uuid.uuid4()) run_uuid = config.get("run_uuid") # Populate metrics metrics = { "total": 0, "critical": 0, "high": 0, "medium": 0, "low": 0, } metrics["total"] = len(issues) for issue in issues: issue_dict = issue_from_dict(issue).as_dict() key = issue_dict["issue_severity"].lower() if not metrics.get(key): metrics[key] = 0 metrics[key] += 1 # Construct SARIF log log = om.SarifLog( schema_uri= "https://raw.githubusercontent.com/oasis-tcs/sarif-spec/master/Schemata/sarif-schema-2.1.0.json", version="2.1.0", inline_external_properties=[ om.ExternalProperties(guid=log_uuid, run_guid=run_uuid) ], runs=[ om.Run( automation_details=om.RunAutomationDetails( guid=log_uuid, description=om.Message( text= "Static Analysis Security Test results using @AppThreat/sast-scan" ), ), tool=om.Tool(driver=om.ToolComponent( name=config.tool_purpose_message.get(tool_name, tool_name))), invocations=[ om.Invocation( end_time_utc=datetime.datetime.utcnow().strftime( TS_FORMAT), execution_successful=True, working_directory=om.ArtifactLocation( uri=to_uri(working_dir)), ) ], conversion={ "tool": om.Tool(driver=om.ToolComponent( name="@AppThreat/sast-scan")), "invocation": om.Invocation( execution_successful=True, command_line=tool_args_str, arguments=tool_args, working_directory=om.ArtifactLocation( uri=to_uri(working_dir)), end_time_utc=datetime.datetime.utcnow().strftime( TS_FORMAT), ), }, properties={"metrics": metrics}, version_control_provenance=[ om.VersionControlDetails( repository_uri=repo_details["repositoryUri"], branch=repo_details["branch"], revision_id=repo_details["revisionId"], ) ], ) ], ) run = log.runs[0] invocation = run.invocations[0] add_skipped_file_notifications(skips, invocation) add_results(tool_name, issues, run, file_path_list, working_dir) serialized_log = to_json(log) if crep_fname: with io.open(crep_fname, "w") as fileobj: fileobj.write(serialized_log) if fileobj.name != sys.stdout.name: LOG.info("SARIF output written to file: %s 👍", fileobj.name) return serialized_log
def report( tool_name, tool_args, working_dir, metrics, skips, issues, crep_fname, file_path_list=None, ): """Prints issues in SARIF format :param tool_name: tool name :param tool_args: Args used for the tool :param working_dir: Working directory :param metrics: metrics data :param skips: skips data :param issues: issues data :param crep_fname: The output file name :param file_path_list: Full file path for any manipulation :return serialized_log: SARIF output data """ if not tool_args: tool_args = [] tool_args_str = tool_args if isinstance(tool_args, list): tool_args_str = " ".join(tool_args) log = om.SarifLog( schema_uri= "https://raw.githubusercontent.com/oasis-tcs/sarif-spec/master/Schemata/sarif-schema-2.1.0.json", version="2.1.0", runs=[ om.Run( tool=om.Tool(driver=om.ToolComponent(name=tool_name)), invocations=[ om.Invocation( end_time_utc=datetime.datetime.utcnow().strftime( TS_FORMAT), execution_successful=True, working_directory=om.ArtifactLocation( uri=to_uri(working_dir)), ) ], conversion={ "tool": om.Tool(driver=om.ToolComponent( name="@AppThreat/sast-scan")), "invocation": om.Invocation( execution_successful=True, command_line=tool_args_str, arguments=tool_args, working_directory=om.ArtifactLocation( uri=to_uri(working_dir)), end_time_utc=datetime.datetime.utcnow().strftime( TS_FORMAT), ), }, properties={"metrics": metrics}, ) ], ) run = log.runs[0] invocation = run.invocations[0] add_skipped_file_notifications(skips, invocation) add_results(issues, run, file_path_list) serialized_log = to_json(log) if crep_fname: with io.open(crep_fname, "w") as fileobj: fileobj.write(serialized_log) if fileobj.name != sys.stdout.name: LOG.info("SARIF output written to file: %s", fileobj.name) return serialized_log