def pre_save_finding_status_change(sender, instance, changed_fields=None, **kwargs): # some code is cloning findings by setting id/pk to None, ignore those, will be handled on next save # if not instance.id: # logger.debug('ignoring save of finding without id') # return logger.debug('%i: changed status fields pre_save: %s', instance.id or 0, changed_fields) for field, (old, new) in changed_fields.items(): logger.debug("%i: %s changed from %s to %s" % (instance.id or 0, field, old, new)) user = None if get_current_user() and get_current_user().is_authenticated: user = get_current_user() update_finding_status(instance, user, changed_fields)
def create_finding_group(finds, finding_group_name): logger.debug('creating finding_group_create') if not finds or len(finds) == 0: raise ValueError('cannot create empty Finding Group') finding_group_name_dummy = 'bulk group ' + strftime( "%a, %d %b %Y %X", timezone.now().timetuple()) finding_group = Finding_Group(test=finds[0].test) finding_group.creator = get_current_user() finding_group.name = finding_group_name + finding_group_name_dummy finding_group.save() available_findings = [ find for find in finds if not find.finding_group_set.all() ] finding_group.findings.set(available_findings) # if user provided a name, we use that, else: # if we have components, we may set a nice name but catch 'name already exist' exceptions try: if finding_group_name: finding_group.name = finding_group_name elif finding_group.components: finding_group.name = finding_group.components finding_group.save() except: pass added = len(available_findings) skipped = len(finds) - added return finding_group, added, skipped
def add_finding_to_auto_group(finding, group_by): test = finding.test name = get_group_by_group_name(finding, group_by) finding_group, created = Finding_Group.objects.get_or_create(test=test, creator=get_current_user(), name=name) if created: logger.debug('Created Finding Group %d:%s for test %d:%s', finding_group.id, finding_group, test.id, test) finding_group.findings.add(finding)
def __wrapper__(*args, **kwargs): from dojo.utils import get_current_user user = get_current_user() kwargs['async_user'] = user if we_want_async(*args, func=func, **kwargs): return func.delay(*args, **kwargs) else: return func(*args, **kwargs)
def pre_save_finding_status_change(sender, instance, changed_fields, **kwargs): # some code is cloning findings by setting id/pk to None, ignore those, will be handled on next save if not instance.id: logger.debug('ignoring save of finding without id') return logger.debug('%i: changed status fields pre_save: %s', instance.id, changed_fields) current_user = get_current_user() user = current_user if current_user.is_authenticated else None update_finding_status(instance, user, changed_fields)
def __wrapper__(*args, **kwargs): from dojo.utils import get_current_user user = get_current_user() from dojo.models import Dojo_User if Dojo_User.wants_block_execution(user): logger.debug( 'dojo_async_task: running task in the foreground as block_execution is set to True for %s', user) return func(*args, **kwargs) else: return func.delay(*args, **kwargs)
def add_finding_to_auto_group(finding, group_by, **kwargs): test = finding.test name = get_group_by_group_name(finding, group_by) creator = get_current_user() if not creator: creator = kwargs.get('async_user', None) finding_group, created = Finding_Group.objects.get_or_create( test=test, creator=creator, name=name) if created: logger.debug('Created Finding Group %d:%s for test %d:%s', finding_group.id, finding_group, test.id, test) finding_group.findings.add(finding)
def we_want_async(): from dojo.utils import get_current_user from dojo.models import Dojo_User user = get_current_user() if Dojo_User.wants_block_execution(user): logger.debug( 'dojo_async_task: running task in the foreground as block_execution is set to True for %s', user) return False return True
def import_scan(self, scan, scan_type, engagement, lead, environment, active, verified, tags=None, minimum_severity=None, user=None, endpoints_to_add=None, scan_date=None, version=None, branch_tag=None, build_id=None, commit_hash=None, push_to_jira=None, close_old_findings=False): user = user or get_current_user() now = timezone.now() # retain weird existing logic to use current time for provided scan date scan_date_time = datetime.datetime.combine(scan_date, timezone.now().time()) if settings.USE_TZ: scan_date_time = timezone.make_aware(scan_date_time, timezone.get_default_timezone()) logger.debug('IMPORT_SCAN: Create Test') test = self.create_test(scan_type, engagement, lead, environment, scan_date=scan_date, tags=tags, version=version, branch_tag=branch_tag, build_id=build_id, commit_hash=commit_hash, now=now) logger.debug('IMPORT_SCAN: Parse findings') parsed_findings = importer_utils.parse_findings(scan, test, active, verified, scan_type) logger.debug('IMPORT_SCAN: Processing findings') new_findings = self.process_parsed_findings(test, parsed_findings, scan_type, user, active, verified, minimum_severity=minimum_severity, endpoints_to_add=endpoints_to_add, push_to_jira=push_to_jira, now=now) closed_findings = [] if close_old_findings: logger.debug('IMPORT_SCAN: Closing findings no longer present in scan report') closed_findings = self.close_old_findings(test, scan_date_time, user=user, push_to_jira=push_to_jira) logger.debug('IMPORT_SCAN: Updating test/engagement timestamps') importer_utils.update_timestamps(test, scan_date, version, branch_tag, build_id, commit_hash, now, scan_date_time) if settings.TRACK_IMPORT_HISTORY: logger.debug('IMPORT_SCAN: Updating Import History') importer_utils.update_import_history(Test_Import.IMPORT_TYPE, active, verified, tags, minimum_severity, endpoints_to_add, version, branch_tag, build_id, commit_hash, push_to_jira, close_old_findings, test, new_findings, closed_findings) logger.debug('IMPORT_SCAN: Generating notifications') notifications_helper.notify_test_created(test) updated_count = len(new_findings) + len(closed_findings) if updated_count > 0: notifications_helper.notify_scan_added(test, updated_count, new_findings=new_findings, findings_mitigated=closed_findings) logger.debug('IMPORT_SCAN: Done') return test, len(new_findings), len(closed_findings)
def we_want_async(*args, **kwargs): from dojo.utils import get_current_user from dojo.models import Dojo_User sync = kwargs.get('sync', False) if sync: logger.debug( 'dojo_async_task: running task in the foreground as sync=True has been found as kwarg' ) return False user = get_current_user() if Dojo_User.wants_block_execution(user): logger.debug( 'dojo_async_task: running task in the foreground as block_execution is set to True for %s', user) return False return True
def we_want_async(*args, func=None, **kwargs): from dojo.utils import get_current_user from dojo.models import Dojo_User sync = kwargs.get('sync', False) if sync: logger.debug( 'dojo_async_task %s: running task in the foreground as sync=True has been found as kwarg', func) return False user = kwargs.get('async_user', get_current_user()) logger.debug('user: %s', user) if Dojo_User.wants_block_execution(user): logger.debug( 'dojo_async_task %s: running task in the foreground as block_execution is set to True for %s', func, user) return False logger.debug( 'dojo_async_task %s: no current user, running task in the background', func) return True
def reimport_scan(self, scan, scan_type, test, active=True, verified=True, tags=None, minimum_severity=None, user=None, endpoints_to_add=None, scan_date=None, version=None, branch_tag=None, build_id=None, commit_hash=None, push_to_jira=None, close_old_findings=True, group_by=None, api_scan_configuration=None, service=None): logger.debug(f'REIMPORT_SCAN: parameters: {locals()}') user = user or get_current_user() now = timezone.now() if api_scan_configuration: if api_scan_configuration.product != test.engagement.product: raise ValidationError( 'API Scan Configuration has to be from same product as the Test' ) if test.api_scan_configuration != api_scan_configuration: test.api_scan_configuration = api_scan_configuration test.save() # check if the parser that handle the scan_type manage tests parser = get_parser(scan_type) if hasattr(parser, 'get_tests'): logger.debug('REIMPORT_SCAN parser v2: Create parse findings') tests = parser.get_tests(scan_type, scan) # for now we only consider the first test in the list and artificially aggregate all findings of all tests # this is the same as the old behavior as current import/reimporter implementation doesn't handle the case # when there is more than 1 test parsed_findings = [] for test_raw in tests: parsed_findings.extend(test_raw.findings) else: logger.debug('REIMPORT_SCAN: Parse findings') parsed_findings = parser.get_findings(scan, test) logger.debug('REIMPORT_SCAN: Processing findings') new_findings = [] reactivated_findings = [] findings_to_mitigate = [] untouched_findings = [] if settings.ASYNC_FINDING_IMPORT: chunk_list = importer_utils.chunk_list(parsed_findings) results_list = [] # First kick off all the workers for findings_list in chunk_list: result = self.process_parsed_findings( test, findings_list, scan_type, user, active, verified, minimum_severity=minimum_severity, endpoints_to_add=endpoints_to_add, push_to_jira=push_to_jira, group_by=group_by, now=now, service=service, scan_date=scan_date, sync=False) # Since I dont want to wait until the task is done right now, save the id # So I can check on the task later results_list += [result] # After all tasks have been started, time to pull the results logger.debug('REIMPORT_SCAN: Collecting Findings') for results in results_list: serial_new_findings, serial_reactivated_findings, serial_findings_to_mitigate, serial_untouched_findings = results.get( ) new_findings += [ next(serializers.deserialize("json", finding)).object for finding in serial_new_findings ] reactivated_findings += [ next(serializers.deserialize("json", finding)).object for finding in serial_reactivated_findings ] findings_to_mitigate += [ next(serializers.deserialize("json", finding)).object for finding in serial_findings_to_mitigate ] untouched_findings += [ next(serializers.deserialize("json", finding)).object for finding in serial_untouched_findings ] logger.debug('REIMPORT_SCAN: All Findings Collected') # Indicate that the test is not complete yet as endpoints will still be rolling in. test.percent_complete = 50 test.save() importer_utils.update_test_progress(test) else: new_findings, reactivated_findings, findings_to_mitigate, untouched_findings = \ self.process_parsed_findings(test, parsed_findings, scan_type, user, active, verified, minimum_severity=minimum_severity, endpoints_to_add=endpoints_to_add, push_to_jira=push_to_jira, group_by=group_by, now=now, service=service, scan_date=scan_date, sync=True) closed_findings = [] if close_old_findings: logger.debug( 'REIMPORT_SCAN: Closing findings no longer present in scan report' ) closed_findings = self.close_old_findings( test, findings_to_mitigate, scan_date, user=user, push_to_jira=push_to_jira) logger.debug('REIMPORT_SCAN: Updating test/engagement timestamps') importer_utils.update_timestamps(test, version, branch_tag, build_id, commit_hash, now, scan_date) if settings.TRACK_IMPORT_HISTORY: logger.debug('REIMPORT_SCAN: Updating Import History') importer_utils.update_import_history( Test_Import.REIMPORT_TYPE, active, verified, tags, minimum_severity, endpoints_to_add, version, branch_tag, build_id, commit_hash, push_to_jira, close_old_findings, test, new_findings, closed_findings, reactivated_findings) logger.debug('REIMPORT_SCAN: Generating notifications') updated_count = len(closed_findings) + len(reactivated_findings) + len( new_findings) if updated_count > 0: notifications_helper.notify_scan_added( test, updated_count, new_findings=new_findings, findings_mitigated=closed_findings, findings_reactivated=reactivated_findings, findings_untouched=untouched_findings) logger.debug('REIMPORT_SCAN: Done') return test, updated_count, len(new_findings), len( closed_findings), len(reactivated_findings), len( untouched_findings)
def reimport_scan(self, scan, scan_type, test, active=True, verified=True, tags=None, minimum_severity=None, user=None, endpoints_to_add=None, scan_date=None, version=None, branch_tag=None, build_id=None, commit_hash=None, push_to_jira=None, close_old_findings=True, group_by=None, sonarqube_config=None, cobaltio_config=None): logger.debug(f'REIMPORT_SCAN: parameters: {locals()}') user = user or get_current_user() now = timezone.now() # retain weird existing logic to use current time for provided scan date scan_date_time = datetime.datetime.combine(scan_date, timezone.now().time()) if settings.USE_TZ: scan_date_time = timezone.make_aware(scan_date_time, timezone.get_default_timezone()) if sonarqube_config: # it there is not sonarqube_config, just use original if sonarqube_config.product != test.engagement.product: raise ValidationError('"sonarqube_config" has to be from same product as "test"') if test.sonarqube_config != sonarqube_config: # update of sonarqube_config test.sonarqube_config = sonarqube_config test.save() if cobaltio_config: # it there is no cobaltio_config, just use original if cobaltio_config.product != test.engagement.product: raise ValidationError('"cobaltio_config" has to be from same product as "test"') if test.cobaltio_config != cobaltio_config: # update the cobaltio_config test.cobaltio_config = cobaltio_config test.save() # check if the parser that handle the scan_type manage tests parser = get_parser(scan_type) if hasattr(parser, 'get_tests'): logger.debug('REIMPORT_SCAN parser v2: Create parse findings') tests = parser.get_tests(scan_type, scan) # for now we only consider the first test in the list and artificially aggregate all findings of all tests # this is the same as the old behavior as current import/reimporter implementation doesn't handle the case # when there is more than 1 test parsed_findings = [] for test_raw in tests: parsed_findings.extend(test_raw.findings) else: logger.debug('REIMPORT_SCAN: Parse findings') parsed_findings = parser.get_findings(scan, test) logger.debug('REIMPORT_SCAN: Processing findings') new_findings, reactivated_findings, findings_to_mitigate, untouched_findings = \ self.process_parsed_findings(test, parsed_findings, scan_type, user, active, verified, minimum_severity=minimum_severity, endpoints_to_add=endpoints_to_add, push_to_jira=push_to_jira, group_by=group_by, now=now) closed_findings = [] if close_old_findings: logger.debug('REIMPORT_SCAN: Closing findings no longer present in scan report') closed_findings = self.close_old_findings(test, findings_to_mitigate, scan_date_time, user=user, push_to_jira=push_to_jira) logger.debug('REIMPORT_SCAN: Updating test/engagement timestamps') importer_utils.update_timestamps(test, scan_date, version, branch_tag, build_id, commit_hash, now, scan_date_time) if settings.TRACK_IMPORT_HISTORY: logger.debug('REIMPORT_SCAN: Updating Import History') importer_utils.update_import_history(Test_Import.REIMPORT_TYPE, active, verified, tags, minimum_severity, endpoints_to_add, version, branch_tag, build_id, commit_hash, push_to_jira, close_old_findings, test, new_findings, closed_findings, reactivated_findings) logger.debug('REIMPORT_SCAN: Generating notifications') updated_count = len(closed_findings) + len(reactivated_findings) + len(new_findings) if updated_count > 0: notifications_helper.notify_scan_added(test, updated_count, new_findings=new_findings, findings_mitigated=closed_findings, findings_reactivated=reactivated_findings, findings_untouched=untouched_findings) logger.debug('REIMPORT_SCAN: Done') return test, updated_count, len(new_findings), len(closed_findings), len(reactivated_findings), len(untouched_findings)
def import_scan(self, scan, scan_type, engagement, lead, environment, active, verified, tags=None, minimum_severity=None, user=None, endpoints_to_add=None, scan_date=None, version=None, branch_tag=None, build_id=None, commit_hash=None, push_to_jira=None, close_old_findings=False, group_by=None, api_scan_configuration=None, service=None, title=None): logger.debug(f'IMPORT_SCAN: parameters: {locals()}') user = user or get_current_user() now = timezone.now() if api_scan_configuration and api_scan_configuration.product != engagement.product: raise ValidationError( 'API Scan Configuration has to be from same product as the Engagement' ) # check if the parser that handle the scan_type manage tests # if yes, we parse the data first # after that we customize the Test_Type to reflect the data # This allow us to support some meta-formats like SARIF or the generic format parser = get_parser(scan_type) if hasattr(parser, 'get_tests'): logger.debug( 'IMPORT_SCAN parser v2: Create Test and parse findings') tests = parser.get_tests(scan_type, scan) # for now we only consider the first test in the list and artificially aggregate all findings of all tests # this is the same as the old behavior as current import/reimporter implementation doesn't handle the case # when there is more than 1 test # # we also aggregate the label of the Test_type to show the user the original scan_type # only if they are different. This is to support meta format like SARIF # so a report that have the label 'CodeScanner' will be changed to 'CodeScanner Scan (SARIF)' test_type_name = scan_type if len(tests) > 0: if tests[0].type: test_type_name = tests[0].type + " Scan" if test_type_name != scan_type: test_type_name = f"{test_type_name} ({scan_type})" test = self.create_test( scan_type, test_type_name, engagement, lead, environment, scan_date=scan_date, tags=tags, version=version, branch_tag=branch_tag, build_id=build_id, commit_hash=commit_hash, now=now, api_scan_configuration=api_scan_configuration, title=title) # This part change the name of the Test # we get it from the data of the parser test_raw = tests[0] if test_raw.name: test.name = test_raw.name if test_raw.description: test.description = test_raw.description test.save() logger.debug( 'IMPORT_SCAN parser v2: Parse findings (aggregate)') # currently we only support import one Test # so for parser that support multiple tests (like SARIF) # we aggregate all the findings into one uniq test parsed_findings = [] for test_raw in tests: parsed_findings.extend(test_raw.findings) else: logger.info(f'No tests found in import for {scan_type}') else: logger.debug('IMPORT_SCAN: Create Test') # by default test_type == scan_type test = self.create_test( scan_type, scan_type, engagement, lead, environment, scan_date=scan_date, tags=tags, version=version, branch_tag=branch_tag, build_id=build_id, commit_hash=commit_hash, now=now, api_scan_configuration=api_scan_configuration, title=title) logger.debug('IMPORT_SCAN: Parse findings') parser = get_parser(scan_type) parsed_findings = parser.get_findings(scan, test) logger.debug('IMPORT_SCAN: Processing findings') new_findings = [] if settings.ASYNC_FINDING_IMPORT: chunk_list = importer_utils.chunk_list(parsed_findings) results_list = [] # First kick off all the workers for findings_list in chunk_list: result = self.process_parsed_findings( test, findings_list, scan_type, user, active, verified, minimum_severity=minimum_severity, endpoints_to_add=endpoints_to_add, push_to_jira=push_to_jira, group_by=group_by, now=now, service=service, scan_date=scan_date, sync=False) # Since I dont want to wait until the task is done right now, save the id # So I can check on the task later results_list += [result] # After all tasks have been started, time to pull the results logger.info('IMPORT_SCAN: Collecting Findings') for results in results_list: serial_new_findings = results.get() new_findings += [ next(serializers.deserialize("json", finding)).object for finding in serial_new_findings ] logger.info('IMPORT_SCAN: All Findings Collected') # Indicate that the test is not complete yet as endpoints will still be rolling in. test.percent_complete = 50 test.save() importer_utils.update_test_progress(test) else: new_findings = self.process_parsed_findings( test, parsed_findings, scan_type, user, active, verified, minimum_severity=minimum_severity, endpoints_to_add=endpoints_to_add, push_to_jira=push_to_jira, group_by=group_by, now=now, service=service, scan_date=scan_date, sync=True) closed_findings = [] if close_old_findings: logger.debug( 'IMPORT_SCAN: Closing findings no longer present in scan report' ) closed_findings = self.close_old_findings( test, scan_date, user=user, push_to_jira=push_to_jira, service=service) logger.debug('IMPORT_SCAN: Updating test/engagement timestamps') importer_utils.update_timestamps(test, version, branch_tag, build_id, commit_hash, now, scan_date) if settings.TRACK_IMPORT_HISTORY: logger.debug('IMPORT_SCAN: Updating Import History') importer_utils.update_import_history( Test_Import.IMPORT_TYPE, active, verified, tags, minimum_severity, endpoints_to_add, version, branch_tag, build_id, commit_hash, push_to_jira, close_old_findings, test, new_findings, closed_findings) logger.debug('IMPORT_SCAN: Generating notifications') notifications_helper.notify_test_created(test) updated_count = len(new_findings) + len(closed_findings) if updated_count > 0: notifications_helper.notify_scan_added( test, updated_count, new_findings=new_findings, findings_mitigated=closed_findings) logger.debug('IMPORT_SCAN: Done') return test, len(new_findings), len(closed_findings)
def import_scan(self, scan, scan_type, engagement, lead, environment, active, verified, tags=None, minimum_severity=None, user=None, endpoints_to_add=None, scan_date=None, version=None, branch_tag=None, build_id=None, commit_hash=None, push_to_jira=None, close_old_findings=False, group_by=None, api_scan_configuration=None, service=None): logger.debug(f'IMPORT_SCAN: parameters: {locals()}') user = user or get_current_user() now = timezone.now() # retain weird existing logic to use current time for provided scan date scan_date_time = datetime.datetime.combine(scan_date, timezone.now().time()) if settings.USE_TZ: scan_date_time = timezone.make_aware( scan_date_time, timezone.get_default_timezone()) if api_scan_configuration and api_scan_configuration.product != engagement.product: raise ValidationError( 'API Scan Configuration has to be from same product as the Engagement' ) # check if the parser that handle the scan_type manage tests # if yes, we parse the data first # after that we customize the Test_Type to reflect the data # This allow us to support some meta-formats like SARIF or the generic format parser = get_parser(scan_type) if hasattr(parser, 'get_tests'): logger.debug( 'IMPORT_SCAN parser v2: Create Test and parse findings') tests = parser.get_tests(scan_type, scan) # for now we only consider the first test in the list and artificially aggregate all findings of all tests # this is the same as the old behavior as current import/reimporter implementation doesn't handle the case # when there is more than 1 test # # we also aggregate the label of the Test_type to show the user the original scan_type # only if they are different. This is to support meta format like SARIF # so a report that have the label 'CodeScanner' will be changed to 'CodeScanner Scan (SARIF)' test_type_name = scan_type if len(tests) > 0: if tests[0].type: test_type_name = tests[0].type + " Scan" if test_type_name != scan_type: test_type_name = f"{test_type_name} ({scan_type})" test = self.create_test( scan_type, test_type_name, engagement, lead, environment, scan_date=scan_date, tags=tags, version=version, branch_tag=branch_tag, build_id=build_id, commit_hash=commit_hash, now=now, api_scan_configuration=api_scan_configuration) # This part change the name of the Test # we get it from the data of the parser test_raw = tests[0] if test_raw.name: test.name = test_raw.name if test_raw.description: test.description = test_raw.description test.save() logger.debug( 'IMPORT_SCAN parser v2: Parse findings (aggregate)') # currently we only support import one Test # so for parser that support multiple tests (like SARIF) # we aggregate all the findings into one uniq test parsed_findings = [] for test_raw in tests: parsed_findings.extend(test_raw.findings) else: logger.info(f'No tests found in import for {scan_type}') else: logger.debug('IMPORT_SCAN: Create Test') # by default test_type == scan_type test = self.create_test( scan_type, scan_type, engagement, lead, environment, scan_date=scan_date, tags=tags, version=version, branch_tag=branch_tag, build_id=build_id, commit_hash=commit_hash, now=now, api_scan_configuration=api_scan_configuration) logger.debug('IMPORT_SCAN: Parse findings') parser = get_parser(scan_type) parsed_findings = parser.get_findings(scan, test) logger.debug('IMPORT_SCAN: Processing findings') new_findings = self.process_parsed_findings( test, parsed_findings, scan_type, user, active, verified, minimum_severity=minimum_severity, endpoints_to_add=endpoints_to_add, push_to_jira=push_to_jira, group_by=group_by, now=now, service=service) closed_findings = [] if close_old_findings: logger.debug( 'IMPORT_SCAN: Closing findings no longer present in scan report' ) closed_findings = self.close_old_findings( test, scan_date_time, user=user, push_to_jira=push_to_jira) logger.debug('IMPORT_SCAN: Updating test/engagement timestamps') importer_utils.update_timestamps(test, scan_date, version, branch_tag, build_id, commit_hash, now, scan_date_time) if settings.TRACK_IMPORT_HISTORY: logger.debug('IMPORT_SCAN: Updating Import History') importer_utils.update_import_history( Test_Import.IMPORT_TYPE, active, verified, tags, minimum_severity, endpoints_to_add, version, branch_tag, build_id, commit_hash, push_to_jira, close_old_findings, test, new_findings, closed_findings) logger.debug('IMPORT_SCAN: Generating notifications') notifications_helper.notify_test_created(test) updated_count = len(new_findings) + len(closed_findings) if updated_count > 0: notifications_helper.notify_scan_added( test, updated_count, new_findings=new_findings, findings_mitigated=closed_findings) logger.debug('IMPORT_SCAN: Done') return test, len(new_findings), len(closed_findings)
def reimport_scan(self, scan, scan_type, test, active=True, verified=True, tags=None, minimum_severity=None, user=None, endpoints_to_add=None, scan_date=None, version=None, branch_tag=None, build_id=None, commit_hash=None, push_to_jira=None, close_old_findings=True, group_by=None, sonarqube_config=None): logger.debug(f'REIMPORT_SCAN: parameters: {locals()}') user = user or get_current_user() now = timezone.now() # retain weird existing logic to use current time for provided scan date scan_date_time = datetime.datetime.combine(scan_date, timezone.now().time()) if settings.USE_TZ: scan_date_time = timezone.make_aware( scan_date_time, timezone.get_default_timezone()) if sonarqube_config: # it there is not sonarqube_config, just use original if sonarqube_config.product != test.engagement.product: raise ValidationError( '"sonarqube_config" has to be from same product as "test"') if test.sonarqube_config != sonarqube_config: # update of sonarqube_config test.sonarqube_config = sonarqube_config test.save() logger.debug('REIMPORT_SCAN: Parse findings') parsed_findings = importer_utils.parse_findings( scan, test, active, verified, scan_type) logger.debug('REIMPORT_SCAN: Processing findings') new_findings, reactivated_findings, findings_to_mitigate, untouched_findings = \ self.process_parsed_findings(test, parsed_findings, scan_type, user, active, verified, minimum_severity=minimum_severity, endpoints_to_add=endpoints_to_add, push_to_jira=push_to_jira, group_by=group_by, now=now) closed_findings = [] if close_old_findings: logger.debug( 'REIMPORT_SCAN: Closing findings no longer present in scan report' ) closed_findings = self.close_old_findings( test, findings_to_mitigate, scan_date_time, user=user, push_to_jira=push_to_jira) logger.debug('REIMPORT_SCAN: Updating test/engagement timestamps') importer_utils.update_timestamps(test, scan_date, version, branch_tag, build_id, commit_hash, now, scan_date_time) if settings.TRACK_IMPORT_HISTORY: logger.debug('REIMPORT_SCAN: Updating Import History') importer_utils.update_import_history( Test_Import.REIMPORT_TYPE, active, verified, tags, minimum_severity, endpoints_to_add, version, branch_tag, build_id, commit_hash, push_to_jira, close_old_findings, test, new_findings, closed_findings, reactivated_findings) logger.debug('REIMPORT_SCAN: Generating notifications') updated_count = len(closed_findings) + len(reactivated_findings) + len( new_findings) if updated_count > 0: notifications_helper.notify_scan_added( test, updated_count, new_findings=new_findings, findings_mitigated=closed_findings, findings_reactivated=reactivated_findings, findings_untouched=untouched_findings) logger.debug('REIMPORT_SCAN: Done') return test, updated_count, len(new_findings), len( closed_findings), len(reactivated_findings), len( untouched_findings)