def reimport_scan(self, scan, scan_type, test, active=True, verified=True, tags=None, minimum_severity=None, user=None, endpoints_to_add=None, scan_date=None, version=None, branch_tag=None, build_id=None, commit_hash=None, push_to_jira=None, close_old_findings=True, group_by=None, api_scan_configuration=None, service=None): logger.debug(f'REIMPORT_SCAN: parameters: {locals()}') user = user or get_current_user() now = timezone.now() if api_scan_configuration: if api_scan_configuration.product != test.engagement.product: raise ValidationError( 'API Scan Configuration has to be from same product as the Test' ) if test.api_scan_configuration != api_scan_configuration: test.api_scan_configuration = api_scan_configuration test.save() # check if the parser that handle the scan_type manage tests parser = get_parser(scan_type) if hasattr(parser, 'get_tests'): logger.debug('REIMPORT_SCAN parser v2: Create parse findings') tests = parser.get_tests(scan_type, scan) # for now we only consider the first test in the list and artificially aggregate all findings of all tests # this is the same as the old behavior as current import/reimporter implementation doesn't handle the case # when there is more than 1 test parsed_findings = [] for test_raw in tests: parsed_findings.extend(test_raw.findings) else: logger.debug('REIMPORT_SCAN: Parse findings') parsed_findings = parser.get_findings(scan, test) logger.debug('REIMPORT_SCAN: Processing findings') new_findings = [] reactivated_findings = [] findings_to_mitigate = [] untouched_findings = [] if settings.ASYNC_FINDING_IMPORT: chunk_list = importer_utils.chunk_list(parsed_findings) results_list = [] # First kick off all the workers for findings_list in chunk_list: result = self.process_parsed_findings( test, findings_list, scan_type, user, active, verified, minimum_severity=minimum_severity, endpoints_to_add=endpoints_to_add, push_to_jira=push_to_jira, group_by=group_by, now=now, service=service, scan_date=scan_date, sync=False) # Since I dont want to wait until the task is done right now, save the id # So I can check on the task later results_list += [result] # After all tasks have been started, time to pull the results logger.debug('REIMPORT_SCAN: Collecting Findings') for results in results_list: serial_new_findings, serial_reactivated_findings, serial_findings_to_mitigate, serial_untouched_findings = results.get( ) new_findings += [ next(serializers.deserialize("json", finding)).object for finding in serial_new_findings ] reactivated_findings += [ next(serializers.deserialize("json", finding)).object for finding in serial_reactivated_findings ] findings_to_mitigate += [ next(serializers.deserialize("json", finding)).object for finding in serial_findings_to_mitigate ] untouched_findings += [ next(serializers.deserialize("json", finding)).object for finding in serial_untouched_findings ] logger.debug('REIMPORT_SCAN: All Findings Collected') # Indicate that the test is not complete yet as endpoints will still be rolling in. test.percent_complete = 50 test.save() importer_utils.update_test_progress(test) else: new_findings, reactivated_findings, findings_to_mitigate, untouched_findings = \ self.process_parsed_findings(test, parsed_findings, scan_type, user, active, verified, minimum_severity=minimum_severity, endpoints_to_add=endpoints_to_add, push_to_jira=push_to_jira, group_by=group_by, now=now, service=service, scan_date=scan_date, sync=True) closed_findings = [] if close_old_findings: logger.debug( 'REIMPORT_SCAN: Closing findings no longer present in scan report' ) closed_findings = self.close_old_findings( test, findings_to_mitigate, scan_date, user=user, push_to_jira=push_to_jira) logger.debug('REIMPORT_SCAN: Updating test/engagement timestamps') importer_utils.update_timestamps(test, version, branch_tag, build_id, commit_hash, now, scan_date) if settings.TRACK_IMPORT_HISTORY: logger.debug('REIMPORT_SCAN: Updating Import History') importer_utils.update_import_history( Test_Import.REIMPORT_TYPE, active, verified, tags, minimum_severity, endpoints_to_add, version, branch_tag, build_id, commit_hash, push_to_jira, close_old_findings, test, new_findings, closed_findings, reactivated_findings) logger.debug('REIMPORT_SCAN: Generating notifications') updated_count = len(closed_findings) + len(reactivated_findings) + len( new_findings) if updated_count > 0: notifications_helper.notify_scan_added( test, updated_count, new_findings=new_findings, findings_mitigated=closed_findings, findings_reactivated=reactivated_findings, findings_untouched=untouched_findings) logger.debug('REIMPORT_SCAN: Done') return test, updated_count, len(new_findings), len( closed_findings), len(reactivated_findings), len( untouched_findings)
def import_scan(self, scan, scan_type, engagement, lead, environment, active, verified, tags=None, minimum_severity=None, user=None, endpoints_to_add=None, scan_date=None, version=None, branch_tag=None, build_id=None, commit_hash=None, push_to_jira=None, close_old_findings=False, group_by=None, api_scan_configuration=None, service=None, title=None): logger.debug(f'IMPORT_SCAN: parameters: {locals()}') user = user or get_current_user() now = timezone.now() if api_scan_configuration and api_scan_configuration.product != engagement.product: raise ValidationError( 'API Scan Configuration has to be from same product as the Engagement' ) # check if the parser that handle the scan_type manage tests # if yes, we parse the data first # after that we customize the Test_Type to reflect the data # This allow us to support some meta-formats like SARIF or the generic format parser = get_parser(scan_type) if hasattr(parser, 'get_tests'): logger.debug( 'IMPORT_SCAN parser v2: Create Test and parse findings') tests = parser.get_tests(scan_type, scan) # for now we only consider the first test in the list and artificially aggregate all findings of all tests # this is the same as the old behavior as current import/reimporter implementation doesn't handle the case # when there is more than 1 test # # we also aggregate the label of the Test_type to show the user the original scan_type # only if they are different. This is to support meta format like SARIF # so a report that have the label 'CodeScanner' will be changed to 'CodeScanner Scan (SARIF)' test_type_name = scan_type if len(tests) > 0: if tests[0].type: test_type_name = tests[0].type + " Scan" if test_type_name != scan_type: test_type_name = f"{test_type_name} ({scan_type})" test = self.create_test( scan_type, test_type_name, engagement, lead, environment, scan_date=scan_date, tags=tags, version=version, branch_tag=branch_tag, build_id=build_id, commit_hash=commit_hash, now=now, api_scan_configuration=api_scan_configuration, title=title) # This part change the name of the Test # we get it from the data of the parser test_raw = tests[0] if test_raw.name: test.name = test_raw.name if test_raw.description: test.description = test_raw.description test.save() logger.debug( 'IMPORT_SCAN parser v2: Parse findings (aggregate)') # currently we only support import one Test # so for parser that support multiple tests (like SARIF) # we aggregate all the findings into one uniq test parsed_findings = [] for test_raw in tests: parsed_findings.extend(test_raw.findings) else: logger.info(f'No tests found in import for {scan_type}') else: logger.debug('IMPORT_SCAN: Create Test') # by default test_type == scan_type test = self.create_test( scan_type, scan_type, engagement, lead, environment, scan_date=scan_date, tags=tags, version=version, branch_tag=branch_tag, build_id=build_id, commit_hash=commit_hash, now=now, api_scan_configuration=api_scan_configuration, title=title) logger.debug('IMPORT_SCAN: Parse findings') parser = get_parser(scan_type) parsed_findings = parser.get_findings(scan, test) logger.debug('IMPORT_SCAN: Processing findings') new_findings = [] if settings.ASYNC_FINDING_IMPORT: chunk_list = importer_utils.chunk_list(parsed_findings) results_list = [] # First kick off all the workers for findings_list in chunk_list: result = self.process_parsed_findings( test, findings_list, scan_type, user, active, verified, minimum_severity=minimum_severity, endpoints_to_add=endpoints_to_add, push_to_jira=push_to_jira, group_by=group_by, now=now, service=service, scan_date=scan_date, sync=False) # Since I dont want to wait until the task is done right now, save the id # So I can check on the task later results_list += [result] # After all tasks have been started, time to pull the results logger.info('IMPORT_SCAN: Collecting Findings') for results in results_list: serial_new_findings = results.get() new_findings += [ next(serializers.deserialize("json", finding)).object for finding in serial_new_findings ] logger.info('IMPORT_SCAN: All Findings Collected') # Indicate that the test is not complete yet as endpoints will still be rolling in. test.percent_complete = 50 test.save() importer_utils.update_test_progress(test) else: new_findings = self.process_parsed_findings( test, parsed_findings, scan_type, user, active, verified, minimum_severity=minimum_severity, endpoints_to_add=endpoints_to_add, push_to_jira=push_to_jira, group_by=group_by, now=now, service=service, scan_date=scan_date, sync=True) closed_findings = [] if close_old_findings: logger.debug( 'IMPORT_SCAN: Closing findings no longer present in scan report' ) closed_findings = self.close_old_findings( test, scan_date, user=user, push_to_jira=push_to_jira, service=service) logger.debug('IMPORT_SCAN: Updating test/engagement timestamps') importer_utils.update_timestamps(test, version, branch_tag, build_id, commit_hash, now, scan_date) if settings.TRACK_IMPORT_HISTORY: logger.debug('IMPORT_SCAN: Updating Import History') importer_utils.update_import_history( Test_Import.IMPORT_TYPE, active, verified, tags, minimum_severity, endpoints_to_add, version, branch_tag, build_id, commit_hash, push_to_jira, close_old_findings, test, new_findings, closed_findings) logger.debug('IMPORT_SCAN: Generating notifications') notifications_helper.notify_test_created(test) updated_count = len(new_findings) + len(closed_findings) if updated_count > 0: notifications_helper.notify_scan_added( test, updated_count, new_findings=new_findings, findings_mitigated=closed_findings) logger.debug('IMPORT_SCAN: Done') return test, len(new_findings), len(closed_findings)