def create_result(path, rule_id, issue_dict, rules, rule_indices): if rule_id in rules: rule = rules[rule_id] rule_index = rule_indices[rule_id] else: doc = issue_dict['metadata'].get('reference') if not doc: doc = ('https://mobile-security.gitbook.io/' 'mobile-security-testing-guide/') rule = om.ReportingDescriptor( id=rule_id, name=get_rule_name(rule_id), help_uri=doc, ) rule_index = len(rules) rules[rule_id] = rule rule_indices[rule_id] = rule_index locations = [] for item in issue_dict.get('files', []): physical_location = om.PhysicalLocation( artifact_location=om.ArtifactLocation( uri=to_uri(item['file_path'])), ) physical_location.region = om.Region( start_line=item['match_lines'][0], end_line=item['match_lines'][1], start_column=item['match_position'][0], end_column=item['match_position'][1], snippet=om.ArtifactContent(text=item['match_string']), ) locations.append(om.Location(physical_location=physical_location)) if not locations: artifact = om.PhysicalLocation( artifact_location=om.ArtifactLocation( uri=path[0]), ) artifact.region = om.Region( start_line=1, end_line=1, start_column=1, end_column=1, snippet=om.ArtifactContent(text='Missing Best Practice'), ) locations.append(om.Location(physical_location=artifact)) return om.Result( rule_id=rule.id, rule_index=rule_index, message=om.Message(text=issue_dict['metadata']['description']), level=level_from_severity(issue_dict['metadata']['severity']), locations=locations, properties={ 'owasp-mobile': issue_dict['metadata']['owasp-mobile'], 'masvs': issue_dict['metadata']['masvs'], 'cwe': issue_dict['metadata']['cwe'], 'reference': issue_dict['metadata']['reference'], }, )
def add_skipped_file_notifications(skips, invocation): """Method to add skipped files details to the output :param skips: List of files skipped by the tool :param invocation: Invocation object for the given run """ if skips is None or len(skips) == 0: return if invocation.tool_configuration_notifications is None: invocation.tool_configuration_notifications = [] for skip in skips: (file_name, reason) = skip notification = om.Notification( level="error", message=om.Message(text=reason), locations=[ om.Location( physical_location=om.PhysicalLocation( artifact_location=om.ArtifactLocation(uri=to_uri(file_name)) ) ) ], ) invocation.tool_configuration_notifications.append(notification)
def create_result(issue, rules, rule_indices, file_path_list=None): """Method to convert a single issue into result schema with rules :param issue: Issues object :param rules: List of rules :param rule_indices: Indices of referred rules :param file_path_list: Full file path for any manipulation """ if isinstance(issue, dict): issue = issue_from_dict(issue) issue_dict = issue.as_dict() rule, rule_index = create_or_find_rule(issue_dict, rules, rule_indices) physical_location = om.PhysicalLocation( artifact_location=om.ArtifactLocation( uri=to_uri(issue_dict["filename"]))) add_region_and_context_region(physical_location, issue_dict["line_number"], issue_dict["code"]) return om.Result( rule_id=rule.id, rule_index=rule_index, message=om.Message(text=issue_dict["issue_text"]), level=level_from_severity(issue_dict["issue_severity"]), locations=[om.Location(physical_location=physical_location)], properties={ "issue_confidence": issue_dict["issue_confidence"], "issue_severity": issue_dict["issue_severity"], }, )
def create_result(tool_name, issue, rules, rule_indices, file_path_list=None, working_dir=None): """Method to convert a single issue into result schema with rules :param tool_name: tool name :param issue: Issues object :param rules: List of rules :param rule_indices: Indices of referred rules :param file_path_list: Full file path for any manipulation :param working_dir: Working directory """ WORKSPACE_PREFIX = config.get("WORKSPACE", None) if isinstance(issue, dict): issue = issue_from_dict(issue) issue_dict = issue.as_dict() rule, rule_index = create_or_find_rule(tool_name, issue_dict, rules, rule_indices) # Substitute workspace prefix # Override file path prefix with workspace filename = issue_dict["filename"] if working_dir: # Issue 5 fix. Convert relative to full path automatically if not filename.startswith(working_dir): filename = os.path.join(working_dir, filename) if WORKSPACE_PREFIX: filename = re.sub(r"^" + working_dir, WORKSPACE_PREFIX, filename) physical_location = om.PhysicalLocation( artifact_location=om.ArtifactLocation(uri=to_uri(filename))) add_region_and_context_region(physical_location, issue_dict["line_number"], issue_dict["code"]) issue_severity = issue_dict["issue_severity"] issue_severity = tweak_severity(tool_name, issue_severity) return om.Result( rule_id=rule.id, rule_index=rule_index, message=om.Message(text=issue_dict["issue_text"]), level=level_from_severity(issue_severity), locations=[om.Location(physical_location=physical_location)], properties={ "issue_confidence": issue_dict["issue_confidence"], "issue_severity": issue_severity, }, hosted_viewer_uri=config.get("hosted_viewer_uri", ""), )
def create_result(rule_id, issue_dict, rules, rule_indices): if rule_id in rules: rule = rules[rule_id] rule_index = rule_indices[rule_id] else: doc = 'https://ajinabraham.github.io/nodejsscan/#{}'.format(rule_id) rule = om.ReportingDescriptor( id=rule_id, name=get_rule_name(rule_id), help_uri=doc, ) rule_index = len(rules) rules[rule_id] = rule rule_indices[rule_id] = rule_index locations = [] for item in issue_dict['files']: physical_location = om.PhysicalLocation( artifact_location=om.ArtifactLocation( uri=to_uri(item['file_path'])), ) physical_location.region = om.Region( start_line=item['match_lines'][0], end_line=item['match_lines'][1], start_column=item['match_position'][0], end_column=item['match_position'][1], snippet=om.ArtifactContent(text=item['match_string']), ) locations.append(om.Location(physical_location=physical_location)) return om.Result( rule_id=rule.id, rule_index=rule_index, message=om.Message(text=issue_dict['metadata']['description']), level=level_from_severity(issue_dict['metadata']['severity']), locations=locations, properties={ 'owasp-web': issue_dict['metadata']['owasp-web'], 'cwe': issue_dict['metadata']['cwe'], }, )
def add_skipped_file_notifications(skips, invocation): if skips is None or len(skips) == 0: return if invocation.tool_configuration_notifications is None: invocation.tool_configuration_notifications = [] for skip in skips: (file_name, reason) = skip notification = om.Notification( level="error", message=om.Message(text=reason), locations=[ om.Location(physical_location=om.PhysicalLocation( artifact_location=om.ArtifactLocation( uri=to_uri(file_name)))) ], ) invocation.tool_configuration_notifications.append(notification)
def create_result(issue, rules, rule_indices): issue_dict = issue.as_dict() rule, rule_index = create_or_find_rule(issue_dict, rules, rule_indices) physical_location = om.PhysicalLocation( artifact_location=om.ArtifactLocation(uri=to_uri(issue_dict["filename"])) ) add_region_and_context_region( physical_location, issue_dict["line_number"], issue_dict["code"] ) return om.Result( rule_id=rule.id, rule_index=rule_index, message=om.Message(text=issue_dict["issue_text"]), level=level_from_severity(issue_dict["issue_severity"]), locations=[om.Location(physical_location=physical_location)], properties={ "issue_confidence": issue_dict["issue_confidence"], "issue_severity": issue_dict["issue_severity"], }, )
def create_result(tool_name, issue, rules, rule_indices, file_path_list, working_dir): """Method to convert a single issue into result schema with rules :param tool_name: tool name :param issue: Issues object :param rules: List of rules :param rule_indices: Indices of referred rules :param file_path_list: Full file path for any manipulation :param working_dir: Working directory """ WORKSPACE_PREFIX = config.get("WORKSPACE", None) if isinstance(issue, dict): issue = issue_from_dict(issue) issue_dict = issue.as_dict() rule_id = issue_dict.get("test_id") # Is this rule ignored globally? if rule_id in config.ignored_rules: return None rule, rule_index = create_or_find_rule(tool_name, issue_dict, rules, rule_indices) # Substitute workspace prefix # Override file path prefix with workspace filename = issue_dict["filename"] if working_dir: # Issue 5 fix. Convert relative to full path automatically # Convert to full path only if the user wants if WORKSPACE_PREFIX is None and not filename.startswith(working_dir): filename = os.path.join(working_dir, filename) if WORKSPACE_PREFIX is not None: # Make it relative path if WORKSPACE_PREFIX == "": filename = re.sub(r"^" + working_dir + "/", WORKSPACE_PREFIX, filename) elif not filename.startswith(working_dir): filename = os.path.join(WORKSPACE_PREFIX, filename) else: filename = re.sub(r"^" + working_dir, WORKSPACE_PREFIX, filename) physical_location = om.PhysicalLocation( artifact_location=om.ArtifactLocation(uri=to_uri(filename)) ) add_region_and_context_region( physical_location, issue_dict["line_number"], issue_dict["code"] ) issue_severity = tweak_severity(tool_name, issue_dict) fingerprint = {} if physical_location.region and physical_location.region.snippet.text: snippet = physical_location.region.snippet.text snippet = snippet.strip().replace("\t", "").replace("\n", "") fingerprint = { "scanPrimaryLocationHash": to_fingerprint_hash(snippet, HASH_DIGEST_SIZE) } if issue_dict.get("tags"): tag_str = "" for tk, tv in issue_dict.get("tags", {}).items(): tag_str += tv if tag_str: fingerprint["scanTagsHash"] = to_fingerprint_hash(tag_str, HASH_DIGEST_SIZE) # Filename hash fingerprint["scanFileHash"] = to_fingerprint_hash(filename, HASH_DIGEST_SIZE) # Should we suppress this fingerprint? if should_suppress_fingerprint(fingerprint, working_dir): return None return om.Result( rule_id=rule.id, rule_index=rule_index, message=om.Message( text=issue_dict["issue_text"], markdown=issue_dict["issue_text"] if tool_name == "ng-sast" else "", ), level=level_from_severity(issue_severity), locations=[om.Location(physical_location=physical_location)], partial_fingerprints=fingerprint, properties={ "issue_confidence": issue_dict["issue_confidence"], "issue_severity": issue_severity, "issue_tags": issue_dict.get("tags", {}), }, baseline_state="unchanged" if issue_dict["first_found"] else "new", )
def create_result(tool_name, issue, rules, rule_indices, file_path_list=None, working_dir=None): """Method to convert a single issue into result schema with rules :param tool_name: tool name :param issue: Issues object :param rules: List of rules :param rule_indices: Indices of referred rules :param file_path_list: Full file path for any manipulation :param working_dir: Working directory """ WORKSPACE_PREFIX = config.get("WORKSPACE", None) if isinstance(issue, dict): issue = issue_from_dict(issue) issue_dict = issue.as_dict() rule, rule_index = create_or_find_rule(tool_name, issue_dict, rules, rule_indices) # Substitute workspace prefix # Override file path prefix with workspace filename = issue_dict["filename"] if working_dir: # Issue 5 fix. Convert relative to full path automatically # Convert to full path only if the user wants if WORKSPACE_PREFIX is None and not filename.startswith(working_dir): filename = os.path.join(working_dir, filename) if WORKSPACE_PREFIX is not None: # Make it relative path if WORKSPACE_PREFIX == "": filename = re.sub(r"^" + working_dir + "/", WORKSPACE_PREFIX, filename) else: filename = re.sub(r"^" + working_dir, WORKSPACE_PREFIX, filename) physical_location = om.PhysicalLocation( artifact_location=om.ArtifactLocation(uri=to_uri(filename))) add_region_and_context_region(physical_location, issue_dict["line_number"], issue_dict["code"]) issue_severity = tweak_severity(tool_name, issue_dict["issue_severity"]) fingerprint = {} """ if physical_location.region and physical_location.region.snippet.text: snippet = physical_location.region.snippet.text snippet = snippet.strip().replace("\t", "").replace("\n", "") h = blake2b(digest_size=HASH_DIGEST_SIZE) h.update(snippet.encode()) fingerprint = {"primaryLocationLineHash": h.hexdigest() + ":1"} """ return om.Result( rule_id=rule.id, rule_index=rule_index, message=om.Message( text=issue_dict["issue_text"], markdown=issue_dict["issue_text"] if tool_name == "inspect" else "", ), level=level_from_severity(issue_severity), locations=[om.Location(physical_location=physical_location)], partial_fingerprints=fingerprint, properties={ "issue_confidence": issue_dict["issue_confidence"], "issue_severity": issue_severity, }, )
def print_matches(self, matches, rules=None, filenames=None): """Output all the matches""" if not rules: rules = RulesCollection() # These "base" rules are not passed into formatters rules.extend([ParseError(), TransformError(), RuleError()]) results = [] for match in matches: results.append( sarif.Result( rule_id=match.rule.id, message=sarif.Message(text=match.message), level=self._to_sarif_level(match.rule.severity), locations=[ sarif.Location( physical_location=sarif.PhysicalLocation( artifact_location=sarif.ArtifactLocation( uri=match.filename, uri_base_id=self.uri_base_id, ), region=sarif.Region( start_column=match.columnnumber, start_line=match.linenumber, end_column=match.columnnumberend, end_line=match.linenumberend, ), )) ], )) # Output only the rules that have matches matched_rules = set(r.rule_id for r in results) rules_map = {r.id: r for r in list(rules)} rules = [ sarif.ReportingDescriptor( id=rule_id, short_description=sarif.MultiformatMessageString( text=rules_map[rule_id].shortdesc), full_description=sarif.MultiformatMessageString( text=rules_map[rule_id].description), help_uri=rules_map[rule_id].source_url if rules_map[rule_id] else None) for rule_id in matched_rules ] run = sarif.Run( tool=sarif.Tool(driver=sarif.ToolComponent( name='cfn-lint', short_description=sarif.MultiformatMessageString( text=('Validates AWS CloudFormation templates against' ' the resource specification and additional' ' checks.')), information_uri= 'https://github.com/aws-cloudformation/cfn-lint', rules=rules, version=cfnlint.version.__version__, ), ), original_uri_base_ids={ self.uri_base_id: sarif.ArtifactLocation( description=sarif.MultiformatMessageString( 'The directory in which cfn-lint was run.')) }, results=results, ) log = sarif.SarifLog(version=self.version, schema_uri=self.schema, runs=[run]) # IMPORTANT: 'warning' is the default level in SARIF and will be # stripped by serialization. return to_json(log)