def update_timestamps(test, version, branch_tag, build_id, commit_hash, now, scan_date): if not scan_date: scan_date = now test.engagement.updated = now if test.engagement.engagement_type == 'CI/CD': test.engagement.target_end = max_safe([scan_date.date(), test.engagement.target_end]) test.updated = now test.target_end = max_safe([scan_date, test.target_end]) if version: test.version = version if branch_tag: test.branch_tag = branch_tag test.engagement.version = version if build_id: test.build_id = build_id if branch_tag: test.commit_hash = commit_hash test.save() test.engagement.save()
def re_import_scan_results(request, tid): additional_message = "When re-uploading a scan, any findings not found in original scan will be updated as " \ "mitigated. The process attempts to identify the differences, however manual verification " \ "is highly recommended." test = get_object_or_404(Test, id=tid) scan_type = test.test_type.name engagement = test.engagement form = ReImportScanForm() jform = None push_all_jira_issues = False # Decide if we need to present the Push to JIRA form if get_system_setting( 'enable_jira') and engagement.product.jira_pkey_set.first( ) is not None: push_all_jira_issues = engagement.product.jira_pkey_set.first( ).push_all_issues jform = JIRAImportScanForm(push_all=push_all_jira_issues, prefix='jiraform') form.initial['tags'] = [tag.name for tag in test.tags] if request.method == "POST": form = ReImportScanForm(request.POST, request.FILES) if form.is_valid(): scan_date = form.cleaned_data['scan_date'] scan_date_time = datetime.combine(scan_date, timezone.now().time()) if settings.USE_TZ: scan_date_time = timezone.make_aware( scan_date_time, timezone.get_default_timezone()) min_sev = form.cleaned_data['minimum_severity'] file = request.FILES.get('file', None) scan_type = test.test_type.name active = form.cleaned_data['active'] verified = form.cleaned_data['verified'] tags = request.POST.getlist('tags') ts = ", ".join(tags) test.tags = ts if file and is_scan_file_too_large(file): messages.add_message( request, messages.ERROR, "Report file is too large. Maximum supported size is {} MB" .format(settings.SCAN_FILE_MAX_SIZE), extra_tags='alert-danger') return HttpResponseRedirect( reverse('re_import_scan_results', args=(test.id, ))) try: parser = import_parser_factory(file, test, active, verified) except ValueError: raise Http404() except Exception as e: messages.add_message( request, messages.ERROR, "An error has occurred in the parser, please see error " "log for details.", extra_tags='alert-danger') parse_logger.exception(e) parse_logger.error("Error in parser: {}".format(str(e))) return HttpResponseRedirect( reverse('re_import_scan_results', args=(test.id, ))) try: items = parser.items original_items = test.finding_set.all().values_list("id", flat=True) new_items = [] mitigated_count = 0 finding_count = 0 finding_added_count = 0 reactivated_count = 0 # Push to Jira? push_to_jira = False if push_all_jira_issues: push_to_jira = True elif 'jiraform-push_to_jira' in request.POST: jform = JIRAImportScanForm(request.POST, prefix='jiraform', push_all=push_all_jira_issues) if jform.is_valid(): push_to_jira = jform.cleaned_data.get('push_to_jira') for item in items: sev = item.severity if sev == 'Information' or sev == 'Informational': sev = 'Info' item.severity = sev # existing findings may be from before we had component_name/version fields component_name = item.component_name if hasattr( item, 'component_name') else None component_version = item.component_version if hasattr( item, 'component_version') else None # If it doesn't clear minimum severity, move on if Finding.SEVERITIES[sev] > Finding.SEVERITIES[min_sev]: continue # Try to find the existing finding # If it's Veracode or Arachni, then we consider the description for some # reason... from titlecase import titlecase item.title = titlecase(item.title) if scan_type == 'Veracode Scan' or scan_type == 'Arachni Scan': finding = Finding.objects.filter( title=item.title, test__id=test.id, severity=sev, numerical_severity=Finding.get_numerical_severity( sev), description=item.description) else: finding = Finding.objects.filter( title=item.title, test__id=test.id, severity=sev, numerical_severity=Finding.get_numerical_severity( sev)) if len(finding) == 1: finding = finding[0] if finding.mitigated or finding.is_Mitigated: # it was once fixed, but now back finding.mitigated = None finding.is_Mitigated = False finding.mitigated_by = None finding.active = True finding.verified = verified # existing findings may be from before we had component_name/version fields finding.component_name = finding.component_name if finding.component_name else component_name finding.component_version = finding.component_version if finding.component_version else component_version finding.save() note = Notes( entry="Re-activated by %s re-upload." % scan_type, author=request.user) note.save() finding.notes.add(note) endpoint_status = finding.endpoint_status.all() for status in endpoint_status: status.mitigated_by = None status.mitigated_time = None status.mitigated = False status.last_modified = timezone.now() status.save() reactivated_count += 1 else: # existing findings may be from before we had component_name/version fields if not finding.component_name or not finding.component_version: finding.component_name = finding.component_name if finding.component_name else component_name finding.component_version = finding.component_version if finding.component_version else component_version finding.save(dedupe_option=False, push_to_jira=False) new_items.append(finding.id) else: item.test = test if item.date == timezone.now().date(): item.date = test.target_start.date() item.reporter = request.user item.last_reviewed = timezone.now() item.last_reviewed_by = request.user item.verified = verified item.active = active # Save it item.save(dedupe_option=False) finding_added_count += 1 # Add it to the new items new_items.append(item.id) finding = item if hasattr(item, 'unsaved_req_resp') and len( item.unsaved_req_resp) > 0: for req_resp in item.unsaved_req_resp: if scan_type == "Arachni Scan": burp_rr = BurpRawRequestResponse( finding=item, burpRequestBase64=req_resp["req"], burpResponseBase64=req_resp["resp"], ) else: burp_rr = BurpRawRequestResponse( finding=item, burpRequestBase64=base64.b64encode( req_resp["req"].encode("utf-8")), burpResponseBase64=base64.b64encode( req_resp["resp"].encode("utf-8")), ) burp_rr.clean() burp_rr.save() if item.unsaved_request is not None and item.unsaved_response is not None: burp_rr = BurpRawRequestResponse( finding=finding, burpRequestBase64=base64.b64encode( item.unsaved_request.encode()), burpResponseBase64=base64.b64encode( item.unsaved_response.encode()), ) burp_rr.clean() burp_rr.save() if finding: finding_count += 1 for endpoint in item.unsaved_endpoints: ep, created = Endpoint.objects.get_or_create( protocol=endpoint.protocol, host=endpoint.host, path=endpoint.path, query=endpoint.query, fragment=endpoint.fragment, product=test.engagement.product) eps, created = Endpoint_Status.objects.get_or_create( finding=finding, endpoint=ep) ep.endpoint_status.add(eps) finding.endpoints.add(ep) finding.endpoint_status.add(eps) for endpoint in form.cleaned_data['endpoints']: ep, created = Endpoint.objects.get_or_create( protocol=endpoint.protocol, host=endpoint.host, path=endpoint.path, query=endpoint.query, fragment=endpoint.fragment, product=test.engagement.product) eps, created = Endpoint_Status.objects.get_or_create( finding=finding, endpoint=ep) ep.endpoint_status.add(eps) finding.endpoints.add(ep) finding.endpoint_status.add(eps) if item.unsaved_tags is not None: finding.tags = item.unsaved_tags # Save it. This may be the second time we save it in this function. finding.save(push_to_jira=push_to_jira) # calculate the difference to_mitigate = set(original_items) - set(new_items) for finding_id in to_mitigate: finding = Finding.objects.get(id=finding_id) if not finding.mitigated or not finding.is_Mitigated: finding.mitigated = scan_date_time finding.is_Mitigated = True finding.mitigated_by = request.user finding.active = False finding.save() note = Notes(entry="Mitigated by %s re-upload." % scan_type, author=request.user) note.save() finding.notes.add(note) mitigated_count += 1 endpoint_status = finding.endpoint_status.all() for status in endpoint_status: status.mitigated_by = request.user status.mitigated_time = timezone.now() status.mitigated = True status.last_modified = timezone.now() status.save() test.updated = max_safe([scan_date_time, test.updated]) test.engagement.updated = max_safe( [scan_date_time, test.engagement.updated]) test.save() test.engagement.save() messages.add_message( request, messages.SUCCESS, '%s processed, a total of ' % scan_type + message(finding_count, 'finding', 'processed'), extra_tags='alert-success') if finding_added_count > 0: messages.add_message( request, messages.SUCCESS, 'A total of ' + message(finding_added_count, 'finding', 'added') + ', that are new to scan.', extra_tags='alert-success') if reactivated_count > 0: messages.add_message( request, messages.SUCCESS, 'A total of ' + message(reactivated_count, 'finding', 'reactivated') + ', that are back in scan results.', extra_tags='alert-success') if mitigated_count > 0: messages.add_message( request, messages.SUCCESS, 'A total of ' + message(mitigated_count, 'finding', 'mitigated') + '. Please manually verify each one.', extra_tags='alert-success') create_notification(event='scan_added', title=str(finding_count) + " findings for " + test.engagement.product.name, finding_count=finding_count, test=test, engagement=test.engagement, url=reverse('view_test', args=(test.id, ))) return HttpResponseRedirect( reverse('view_test', args=(test.id, ))) except SyntaxError: messages.add_message( request, messages.ERROR, 'There appears to be an error in the XML report, please check and try again.', extra_tags='alert-danger') product_tab = Product_Tab(engagement.product.id, title="Re-upload a %s" % scan_type, tab="engagements") product_tab.setEngagement(engagement) form.fields['endpoints'].queryset = Endpoint.objects.filter( product__id=product_tab.product.id) return render( request, 'dojo/import_scan_results.html', { 'form': form, 'product_tab': product_tab, 'eid': engagement.id, 'additional_message': additional_message, 'jform': jform, })
def save(self, push_to_jira=False): data = self.validated_data test = data['test'] scan_type = data['scan_type'] endpoint_to_add = data['endpoint_to_add'] min_sev = data['minimum_severity'] scan_date = data['scan_date'] scan_date_time = datetime.datetime.combine(scan_date, timezone.now().time()) if settings.USE_TZ: scan_date_time = timezone.make_aware(scan_date_time, timezone.get_default_timezone()) verified = data['verified'] active = data['active'] try: parser = import_parser_factory(data.get('file'), test, active, verified, data['scan_type'],) except ValueError: raise Exception("Parser ValueError") try: items = parser.items original_items = list(test.finding_set.all()) new_items = [] mitigated_count = 0 finding_count = 0 finding_added_count = 0 reactivated_count = 0 for item in items: sev = item.severity if sev == 'Information' or sev == 'Informational': sev = 'Info' if (Finding.SEVERITIES[sev] > Finding.SEVERITIES[min_sev]): continue if scan_type == 'Veracode Scan' or scan_type == 'Arachni Scan': findings = Finding.objects.filter( title=item.title, test=test, severity=sev, numerical_severity=Finding.get_numerical_severity(sev), description=item.description).all() else: findings = Finding.objects.filter( title=item.title, test=test, severity=sev, numerical_severity=Finding.get_numerical_severity(sev)).all() if findings: finding = findings[0] if finding.mitigated or finding.is_Mitigated: finding.mitigated = None finding.is_Mitigated = False finding.mitigated_by = None finding.active = True finding.verified = verified finding.save() note = Notes( entry="Re-activated by %s re-upload." % scan_type, author=self.context['request'].user) note.save() finding.notes.add(note) reactivated_count += 1 new_items.append(finding) else: item.test = test item.date = scan_date item.reporter = self.context['request'].user item.last_reviewed = timezone.now() item.last_reviewed_by = self.context['request'].user item.verified = verified item.active = active item.save(dedupe_option=False) finding_added_count += 1 new_items.append(item.id) finding = item if hasattr(item, 'unsaved_req_resp'): for req_resp in item.unsaved_req_resp: burp_rr = BurpRawRequestResponse( finding=finding, burpRequestBase64=req_resp['req'], burpResponseBase64=req_resp['resp']) burp_rr.clean() burp_rr.save() if item.unsaved_request and item.unsaved_response: burp_rr = BurpRawRequestResponse( finding=finding, burpRequestBase64=item.unsaved_request, burpResponseBase64=item.unsaved_response) burp_rr.clean() burp_rr.save() if finding: finding_count += 1 for endpoint in item.unsaved_endpoints: ep, created = Endpoint.objects.get_or_create( protocol=endpoint.protocol, host=endpoint.host, path=endpoint.path, query=endpoint.query, fragment=endpoint.fragment, product=test.engagement.product) finding.endpoints.add(ep) if endpoint_to_add: finding.endpoints.add(endpoint_to_add) if item.unsaved_tags: finding.tags = item.unsaved_tags finding.save(push_to_jira=push_to_jira) to_mitigate = set(original_items) - set(new_items) for finding in to_mitigate: if not finding.mitigated or not finding.is_Mitigated: finding.mitigated = scan_date_time finding.is_Mitigated = True finding.mitigated_by = self.context['request'].user finding.active = False finding.save(push_to_jira=push_to_jira) note = Notes(entry="Mitigated by %s re-upload." % scan_type, author=self.context['request'].user) note.save() finding.notes.add(note) mitigated_count += 1 test.updated = max_safe([scan_date_time, test.updated]) test.engagement.updated = max_safe([scan_date_time, test.engagement.updated]) if test.engagement.engagement_type == 'CI/CD': test.target_end = max_safe([scan_date_time, test.target_end]) test.engagement.target_end = max_safe([scan_date, test.engagement.target_end]) test.save() test.engagement.save() except SyntaxError: raise Exception("Parser SyntaxError") return test
def save(self, push_to_jira=False): data = self.validated_data close_old_findings = data['close_old_findings'] active = data['active'] verified = data['verified'] test_type, created = Test_Type.objects.get_or_create( name=data.get('test_type', data['scan_type'])) endpoint_to_add = data['endpoint_to_add'] environment, created = Development_Environment.objects.get_or_create( name='Development') scan_date = data['scan_date'] scan_date_time = datetime.datetime.combine(scan_date, timezone.now().time()) if settings.USE_TZ: scan_date_time = timezone.make_aware(scan_date_time, timezone.get_default_timezone()) test = Test( engagement=data['engagement'], lead=data['lead'], test_type=test_type, target_start=data['scan_date'], target_end=data['scan_date'], environment=environment, percent_complete=100) try: test.full_clean() except ValidationError: pass test.save() # return the id of the created test, can't find a better way because this is not a ModelSerializer.... self.fields['test'] = serializers.IntegerField(read_only=True, default=test.id) test.engagement.updated = max_safe([scan_date_time, test.engagement.updated]) if test.engagement.engagement_type == 'CI/CD': test.engagement.target_end = max_safe([scan_date, test.engagement.target_end]) test.engagement.save() if 'tags' in data: test.tags = ' '.join(data['tags']) try: parser = import_parser_factory(data.get('file'), test, active, verified, data['scan_type'],) except ValueError: raise Exception('FileParser ValueError') skipped_hashcodes = [] try: for item in parser.items: sev = item.severity if sev == 'Information' or sev == 'Informational': sev = 'Info' item.severity = sev if (Finding.SEVERITIES[sev] > Finding.SEVERITIES[data['minimum_severity']]): continue item.test = test item.date = test.target_start item.reporter = self.context['request'].user item.last_reviewed = timezone.now() item.last_reviewed_by = self.context['request'].user item.active = data['active'] item.verified = data['verified'] item.save(dedupe_option=False) if (hasattr(item, 'unsaved_req_resp') and len(item.unsaved_req_resp) > 0): for req_resp in item.unsaved_req_resp: burp_rr = BurpRawRequestResponse( finding=item, burpRequestBase64=req_resp["req"], burpResponseBase64=req_resp["resp"]) burp_rr.clean() burp_rr.save() if (item.unsaved_request is not None and item.unsaved_response is not None): burp_rr = BurpRawRequestResponse( finding=item, burpRequestBase64=item.unsaved_request, burpResponseBase64=item.unsaved_response) burp_rr.clean() burp_rr.save() for endpoint in item.unsaved_endpoints: ep, created = Endpoint.objects.get_or_create( protocol=endpoint.protocol, host=endpoint.host, path=endpoint.path, query=endpoint.query, fragment=endpoint.fragment, product=test.engagement.product) item.endpoints.add(ep) if endpoint_to_add: item.endpoints.add(endpoint_to_add) if item.unsaved_tags is not None: item.tags = item.unsaved_tags item.save(push_to_jira=push_to_jira) except SyntaxError: raise Exception('Parser SyntaxError') if close_old_findings: # Close old active findings that are not reported by this scan. new_hash_codes = test.finding_set.values('hash_code') old_findings = None if test.engagement.deduplication_on_engagement: old_findings = Finding.objects.exclude(test=test) \ .exclude(hash_code__in=new_hash_codes) \ .exclude(hash_code__in=skipped_hashcodes) \ .filter(test__engagement=test.engagement, test__test_type=test_type, active=True) else: old_findings = Finding.objects.exclude(test=test) \ .exclude(hash_code__in=new_hash_codes) \ .exclude(hash_code__in=skipped_hashcodes) \ .filter(test__engagement__product=test.engagement.product, test__test_type=test_type, active=True) for old_finding in old_findings: old_finding.active = False old_finding.mitigated = datetime.datetime.combine( test.target_start, timezone.now().time()) if settings.USE_TZ: old_finding.mitigated = timezone.make_aware( old_finding.mitigated, timezone.get_default_timezone()) old_finding.mitigated_by = self.context['request'].user old_finding.notes.create(author=self.context['request'].user, entry="This finding has been automatically closed" " as it is not present anymore in recent scans.") Tag.objects.add_tag(old_finding, 'stale') old_finding.save() title = 'An old finding has been closed for "{}".' \ .format(test.engagement.product.name) description = 'See <a href="{}">{}</a>' \ .format(reverse('view_finding', args=(old_finding.id, )), old_finding.title) create_notification(event='other', title=title, description=description, icon='bullseye', objowner=self.context['request'].user) return test