def import_scan_results(request, eid): engagement = get_object_or_404(Engagement, id=eid) finding_count = 0 form = ImportScanForm() if request.method == "POST": form = ImportScanForm(request.POST, request.FILES) if form.is_valid(): file = request.FILES['file'] scan_date = form.cleaned_data['scan_date'] min_sev = form.cleaned_data['minimum_severity'] scan_type = request.POST['scan_type'] if not any(scan_type in code for code in ImportScanForm.SCAN_TYPE_CHOICES): raise Http404() tt, t_created = Test_Type.objects.get_or_create(name=scan_type) # will save in development environment environment, env_created = Development_Environment.objects.get_or_create(name="Development") t = Test(engagement=engagement, test_type=tt) t.full_clean() t.save() tags = request.POST.getlist('tags') ts = ", ".join(tags) t.tags = ts try: parser = import_parser_factory(file, t) except ValueError: raise Http404() try: for item in parser.items: sev = item.severity if sev == 'Information' or sev == 'Informational': sev = 'Info' item.severity = sev if Finding.SEVERITIES[sev] > Finding.SEVERITIES[min_sev]: continue item.test = t item.date = timezone.now() item.reporter = request.user item.last_reviewed = timezone.now() item.last_reviewed_by = request.user item.save() if hasattr(item, 'unsaved_req_resp') and len(item.unsaved_req_resp) > 0: for req_resp in item.unsaved_req_resp: burp_rr = BurpRawRequestResponse(finding=item, burpRequestBase64=req_resp["req"], burpResponseBase64=req_resp["resp"], ) burp_rr.clean() burp_rr.save() if item.unsaved_request is not None and item.unsaved_response is not None: burp_rr = BurpRawRequestResponse(finding=item, burpRequestBase64=item.unsaved_request, burpResponseBase64=item.unsaved_response, ) burp_rr.clean() burp_rr.save() for endpoint in item.unsaved_endpoints: ep, created = Endpoint.objects.get_or_create(protocol=endpoint.protocol, host=endpoint.host, fqdn=endpoint.fqdn, port=endpoint.port, path=endpoint.path, query=endpoint.query, fragment=endpoint.fragment, product=t.engagement.product) item.endpoints.add(ep) if item.unsaved_tags is not None: item.tags = item.unsaved_tags finding_count += 1 messages.add_message(request, messages.SUCCESS, scan_type + ' processed, a total of ' + message(finding_count, 'finding', 'processed'), extra_tags='alert-success') create_notification(event='results_added', title='Results added', finding_count=finding_count, test=t, engagement=engagement, url=request.build_absolute_uri(reverse('view_test', args=(t.id,)))) return HttpResponseRedirect(reverse('view_test', args=(t.id,))) except SyntaxError: messages.add_message(request, messages.ERROR, 'There appears to be an error in the XML report, please check and try again.', extra_tags='alert-danger') add_breadcrumb(parent=engagement, title="Import Scan Results", top_level=False, request=request) return render(request, 'dojo/import_scan_results.html', {'form': form, 'eid': engagement.id, })
def import_scan_results(request, eid): engagement = get_object_or_404(Engagement, id=eid) finding_count = 0 form = ImportScanForm() cred_form = CredMappingForm() cred_form.fields["cred_user"].queryset = Cred_Mapping.objects.filter( engagement=engagement).order_by('cred_id') if request.method == "POST": form = ImportScanForm(request.POST, request.FILES) cred_form = CredMappingForm(request.POST) cred_form.fields["cred_user"].queryset = Cred_Mapping.objects.filter( engagement=engagement).order_by('cred_id') if form.is_valid(): file = request.FILES['file'] scan_date = form.cleaned_data['scan_date'] min_sev = form.cleaned_data['minimum_severity'] active = form.cleaned_data['active'] verified = form.cleaned_data['verified'] scan_type = request.POST['scan_type'] if not any(scan_type in code for code in ImportScanForm.SCAN_TYPE_CHOICES): raise Http404() tt, t_created = Test_Type.objects.get_or_create(name=scan_type) # will save in development environment environment, env_created = Development_Environment.objects.get_or_create( name="Development") t = Test(engagement=engagement, test_type=tt, target_start=scan_date, target_end=scan_date, environment=environment, percent_complete=100) t.lead = request.user t.full_clean() t.save() tags = request.POST.getlist('tags') ts = ", ".join(tags) t.tags = ts #Save the credential to the test if cred_form.is_valid(): if cred_form.cleaned_data['cred_user']: #Select the credential mapping object from the selected list and only allow if the credential is associated with the product cred_user = Cred_Mapping.objects.filter( pk=cred_form.cleaned_data['cred_user'].id, engagement=eid).first() new_f = cred_form.save(commit=False) new_f.test = t new_f.cred_id = cred_user.cred_id new_f.save() try: parser = import_parser_factory(file, t) except ValueError: raise Http404() try: for item in parser.items: sev = item.severity if sev == 'Information' or sev == 'Informational': sev = 'Info' item.severity = sev if Finding.SEVERITIES[sev] > Finding.SEVERITIES[min_sev]: continue item.test = t item.date = t.target_start item.reporter = request.user item.last_reviewed = datetime.now(tz=localtz) item.last_reviewed_by = request.user item.active = active item.verified = verified item.save() if hasattr(item, 'unsaved_req_resp') and len( item.unsaved_req_resp) > 0: for req_resp in item.unsaved_req_resp: burp_rr = BurpRawRequestResponse( finding=item, burpRequestBase64=req_resp["req"], burpResponseBase64=req_resp["resp"], ) burp_rr.clean() burp_rr.save() if item.unsaved_request is not None and item.unsaved_response is not None: burp_rr = BurpRawRequestResponse( finding=item, burpRequestBase64=item.unsaved_request, burpResponseBase64=item.unsaved_response, ) burp_rr.clean() burp_rr.save() for endpoint in item.unsaved_endpoints: ep, created = Endpoint.objects.get_or_create( protocol=endpoint.protocol, host=endpoint.host, path=endpoint.path, query=endpoint.query, fragment=endpoint.fragment, product=t.engagement.product) item.endpoints.add(ep) if item.unsaved_tags is not None: item.tags = item.unsaved_tags finding_count += 1 messages.add_message( request, messages.SUCCESS, scan_type + ' processed, a total of ' + message(finding_count, 'finding', 'processed'), extra_tags='alert-success') return HttpResponseRedirect(reverse('view_test', args=(t.id, ))) except SyntaxError: messages.add_message( request, messages.ERROR, 'There appears to be an error in the XML report, please check and try again.', extra_tags='alert-danger') add_breadcrumb(parent=engagement, title="Import Scan Results", top_level=False, request=request) return render(request, 'dojo/import_scan_results.html', { 'form': form, 'eid': engagement.id, 'cred_form': cred_form, })
def save(self): data = self.validated_data test = data['test'] scan_type = data['scan_type'] min_sev = data['minimum_severity'] scan_date = data['scan_date'] verified = data['verified'] active = data['active'] try: parser = import_parser_factory(data['file'], test, data['scan_type'],) except ValueError: raise Exception("Parser ValueError") try: items = parser.items original_items = list(test.finding_set.all()) new_items = [] mitigated_count = 0 finding_count = 0 finding_added_count = 0 reactivated_count = 0 for item in items: sev = item.severity if sev == 'Information' or sev == 'Informational': sev = 'Info' if (Finding.SEVERITIES[sev] > Finding.SEVERITIES[min_sev]): continue if scan_type == 'Veracode Scan' or scan_type == 'Arachni Scan': findings = Finding.objects.filter( title=item.title, test=test, severity=sev, numerical_severity=Finding.get_numerical_severity(sev), description=item.description).all() else: findings = Finding.objects.filter( title=item.title, test=test, severity=sev, numerical_severity=Finding.get_numerical_severity(sev)).all() if findings: finding = findings[0] if finding.mitigated: finding.mitigated = None finding.mitigated_by = None finding.active = True finding.verified = verified finding.save() note = Notes( entry="Re-activated by %s re-upload." % scan_type, author=self.context['request'].user) note.save() finding.notes.add(note) reactivated_count += 1 new_items.append(finding) else: item.test = test item.date = test.target_start item.reporter = self.context['request'].user item.last_reviewed = timezone.now() item.last_reviewed_by = self.context['request'].user item.verified = verified item.active = active item.save() finding_added_count += 1 new_items.append(item.id) finding = item if hasattr(item, 'unsaved_req_resp'): for req_resp in item.unsaved_req_resp: burp_rr = BurpRawRequestResponse( finding=finding, burpRequestBase64=req_resp['req'], burpResponseBase64=req_resp['resp']) burp_rr.clean() burp_rr.save() if item.unsaved_request and item.unsaved_response: burp_rr = BurpRawRequestResponse( finding=finding, burpRequestBase64=item.unsaved_request, burpResponseBase64=item.unsaved_response) burp_rr.clean() burp_rr.save() if finding: finding_count += 1 for endpoint in item.unsaved_endpoints: ep, created = Endpoint.objects.get_or_create( protocol=endpoint.protocol, host=endpoint.host, path=endpoint.path, query=endpoint.query, fragment=endpoint.fragment, product=test.engagement.product) finding.endpoints.add(ep) # if item.unsaved_tags: # finding.tags = item.unsaved_tags to_mitigate = set(original_items) - set(new_items) for finding in to_mitigate: finding.mitigated = datetime.datetime.combine( scan_date, timezone.now().time()) finding.mitigated_by = self.context['request'].user finding.active = False finding.save() note = Notes(entry="Mitigated by %s re-upload." % scan_type, author=self.context['request'].user) note.save() finding.notes.add(note) mitigated_count += 1 except SyntaxError: raise Exception("Parser SyntaxError") return test
def save(self): data = self.validated_data test = data['test'] scan_type = data['scan_type'] min_sev = data['minimum_severity'] scan_date = data['scan_date'] verified = data['verified'] active = data['active'] try: parser = import_parser_factory(data['file'], test, active, verified, data['scan_type'],) except ValueError: raise Exception("Parser ValueError") try: items = parser.items original_items = list(test.finding_set.all()) new_items = [] mitigated_count = 0 finding_count = 0 finding_added_count = 0 reactivated_count = 0 for item in items: sev = item.severity if sev == 'Information' or sev == 'Informational': sev = 'Info' if (Finding.SEVERITIES[sev] > Finding.SEVERITIES[min_sev]): continue if scan_type == 'Veracode Scan' or scan_type == 'Arachni Scan': findings = Finding.objects.filter( title=item.title, test=test, severity=sev, numerical_severity=Finding.get_numerical_severity(sev), description=item.description).all() else: findings = Finding.objects.filter( title=item.title, test=test, severity=sev, numerical_severity=Finding.get_numerical_severity(sev)).all() if findings: finding = findings[0] if finding.mitigated: finding.mitigated = None finding.mitigated_by = None finding.active = True finding.verified = verified finding.save() note = Notes( entry="Re-activated by %s re-upload." % scan_type, author=self.context['request'].user) note.save() finding.notes.add(note) reactivated_count += 1 new_items.append(finding) else: item.test = test item.date = scan_date item.reporter = self.context['request'].user item.last_reviewed = timezone.now() item.last_reviewed_by = self.context['request'].user item.verified = verified item.active = active item.save(dedupe_option=False) finding_added_count += 1 new_items.append(item.id) finding = item if hasattr(item, 'unsaved_req_resp'): for req_resp in item.unsaved_req_resp: burp_rr = BurpRawRequestResponse( finding=finding, burpRequestBase64=req_resp['req'], burpResponseBase64=req_resp['resp']) burp_rr.clean() burp_rr.save() if item.unsaved_request and item.unsaved_response: burp_rr = BurpRawRequestResponse( finding=finding, burpRequestBase64=item.unsaved_request, burpResponseBase64=item.unsaved_response) burp_rr.clean() burp_rr.save() if finding: finding_count += 1 for endpoint in item.unsaved_endpoints: ep, created = Endpoint.objects.get_or_create( protocol=endpoint.protocol, host=endpoint.host, path=endpoint.path, query=endpoint.query, fragment=endpoint.fragment, product=test.engagement.product) finding.endpoints.add(ep) if item.unsaved_tags: finding.tags = item.unsaved_tags finding.save() to_mitigate = set(original_items) - set(new_items) for finding in to_mitigate: finding.mitigated = datetime.datetime.combine( scan_date, timezone.now().time()) if settings.USE_TZ: finding.mitigated = timezone.make_aware( finding.mitigated, timezone.get_default_timezone()) finding.mitigated_by = self.context['request'].user finding.active = False finding.save() note = Notes(entry="Mitigated by %s re-upload." % scan_type, author=self.context['request'].user) note.save() finding.notes.add(note) mitigated_count += 1 except SyntaxError: raise Exception("Parser SyntaxError") return test
def obj_create(self, bundle, **kwargs): bundle.obj = ImportScanObject(initial=kwargs) self.is_valid(bundle) if bundle.errors: raise ImmediateHttpResponse(response=self.error_response(bundle.request, bundle.errors)) bundle = self.full_hydrate(bundle) test = bundle.obj.__getattr__('test_obj') scan_type = bundle.obj.__getattr__('scan_type') min_sev = bundle.obj.__getattr__('minimum_severity') scan_date = bundle.obj.__getattr__('scan_date') verified = bundle.obj.__getattr__('verified') active = bundle.obj.__getattr__('active') try: parser = import_parser_factory(bundle.data['file'], test) except ValueError: raise NotFound("Parser ValueError") try: items = parser.items original_items = test.finding_set.all().values_list("id", flat=True) new_items = [] mitigated_count = 0 finding_count = 0 finding_added_count = 0 reactivated_count = 0 for item in items: sev = item.severity if sev == 'Information' or sev == 'Informational': sev = 'Info' if Finding.SEVERITIES[sev] > Finding.SEVERITIES[min_sev]: continue if scan_type == 'Veracode Scan' or scan_type == 'Arachni Scan': find = Finding.objects.filter(title=item.title, test__id=test.id, severity=sev, numerical_severity=Finding.get_numerical_severity(sev), description=item.description ) else: find = Finding.objects.filter(title=item.title, test__id=test.id, severity=sev, numerical_severity=Finding.get_numerical_severity(sev), ) if len(find) == 1: find = find[0] if find.mitigated: # it was once fixed, but now back find.mitigated = None find.mitigated_by = None find.active = True find.verified = verified find.save() note = Notes(entry="Re-activated by %s re-upload." % scan_type, author=bundle.request.user) note.save() find.notes.add(note) reactivated_count += 1 new_items.append(find.id) else: item.test = test item.date = test.target_start item.reporter = bundle.request.user item.last_reviewed = timezone.now() item.last_reviewed_by = bundle.request.user item.verified = verified item.active = active item.save() finding_added_count += 1 new_items.append(item.id) find = item if hasattr(item, 'unsaved_req_resp') and len(item.unsaved_req_resp) > 0: for req_resp in item.unsaved_req_resp: burp_rr = BurpRawRequestResponse(finding=find, burpRequestBase64=req_resp["req"], burpResponseBase64=req_resp["resp"], ) burp_rr.clean() burp_rr.save() if item.unsaved_request is not None and item.unsaved_response is not None: burp_rr = BurpRawRequestResponse(finding=find, burpRequestBase64=item.unsaved_request, burpResponseBase64=item.unsaved_response, ) burp_rr.clean() burp_rr.save() if find: finding_count += 1 for endpoint in item.unsaved_endpoints: ep, created = Endpoint.objects.get_or_create(protocol=endpoint.protocol, host=endpoint.host, path=endpoint.path, query=endpoint.query, fragment=endpoint.fragment, product=test.engagement.product) find.endpoints.add(ep) if item.unsaved_tags is not None: find.tags = item.unsaved_tags # calculate the difference to_mitigate = set(original_items) - set(new_items) for finding_id in to_mitigate: finding = Finding.objects.get(id=finding_id) finding.mitigated = datetime.combine(scan_date, timezone.now().time()) finding.mitigated_by = bundle.request.user finding.active = False finding.save() note = Notes(entry="Mitigated by %s re-upload." % scan_type, author=bundle.request.user) note.save() finding.notes.add(note) mitigated_count += 1 except SyntaxError: raise NotFound("Parser SyntaxError") # Everything executed fine. We successfully imported the scan. raise ImmediateHttpResponse(HttpCreated(location = bundle.obj.__getattr__('test')))
def add_findings(request, tid): test = Test.objects.get(id=tid) form_error = False jform = None form = AddFindingForm(initial={'date': timezone.now().date()}, req_resp=None) push_all_jira_issues = False use_jira = get_system_setting( 'enable_jira') and test.engagement.product.jira_pkey is not None if request.method == 'POST': form = AddFindingForm(request.POST, req_resp=None) if (form['active'].value() is False or form['verified'].value() is False) \ and 'jiraform-push_to_jira' in request.POST: error = ValidationError( 'Findings must be active and verified to be pushed to JIRA', code='not_active_or_verified') if form['active'].value() is False: form.add_error('active', error) if form['verified'].value() is False: form.add_error('verified', error) messages.add_message( request, messages.ERROR, 'Findings must be active and verified to be pushed to JIRA', extra_tags='alert-danger') if form['severity'].value( ) == 'Info' and 'jiraform-push_to_jira' in request.POST: error = ValidationError( 'Findings with Informational severity cannot be pushed to JIRA.', code='info-severity-to-jira') if (form['active'].value() is False or form['false_p'].value() ) and form['duplicate'].value() is False: closing_disabled = Note_Type.objects.filter( is_mandatory=True, is_active=True).count() if closing_disabled != 0: error_inactive = ValidationError( 'Can not set a finding as inactive without adding all mandatory notes', code='inactive_without_mandatory_notes') error_false_p = ValidationError( 'Can not set a finding as false positive without adding all mandatory notes', code='false_p_without_mandatory_notes') if form['active'].value() is False: form.add_error('active', error_inactive) if form['false_p'].value(): form.add_error('false_p', error_false_p) messages.add_message( request, messages.ERROR, 'Can not set a finding as inactive or false positive without adding all mandatory notes', extra_tags='alert-danger') if use_jira: jform = JIRAFindingForm( request.POST, prefix='jiraform', push_all=push_all_jira_issues, jira_pkey=test.engagement.product.jira_pkey) if form.is_valid() and (jform is None or jform.is_valid()): if jform: logger.debug('jform.jira_issue: %s', jform.cleaned_data.get('jira_issue')) logger.debug('jform.push_to_jira: %s', jform.cleaned_data.get('push_to_jira')) new_finding = form.save(commit=False) new_finding.test = test new_finding.reporter = request.user new_finding.numerical_severity = Finding.get_numerical_severity( new_finding.severity) if new_finding.false_p or new_finding.active is False: new_finding.mitigated = timezone.now() new_finding.mitigated_by = request.user new_finding.is_Mitigated = True create_template = new_finding.is_template # always false now since this will be deprecated soon in favor of new Finding_Template model new_finding.is_template = False new_finding.save(dedupe_option=False, push_to_jira=False) for ep in form.cleaned_data['endpoints']: eps, created = Endpoint_Status.objects.get_or_create( finding=new_finding, endpoint=ep) ep.endpoint_status.add(eps) new_finding.endpoints.add(ep) new_finding.endpoint_status.add(eps) # Push to jira? push_to_jira = False jira_message = None if jform and jform.is_valid(): # Push to Jira? push_to_jira = push_all_jira_issues or jform.cleaned_data.get( 'push_to_jira') # if the jira issue key was changed, update database new_jira_issue_key = jform.cleaned_data.get('jira_issue') if new_finding.has_jira_issue(): jira_issue = new_finding.jira_issue # everything in DD around JIRA integration is based on the internal id of the issue in JIRA # instead of on the public jira issue key. # I have no idea why, but it means we have to retrieve the issue from JIRA to get the internal JIRA id. # we can assume the issue exist, which is already checked in the validation of the jform if not new_jira_issue_key: finding_unlink_jira(request, new_finding) jira_message = 'Link to JIRA issue removed successfully.' elif new_jira_issue_key != new_finding.jira_issue.jira_key: finding_unlink_jira(request, new_finding) finding_link_jira(request, new_finding, new_jira_issue_key) jira_message = 'Changed JIRA link successfully.' else: logger.debug('finding has no jira issue yet') if new_jira_issue_key: logger.debug( 'finding has no jira issue yet, but jira issue specified in request. trying to link.' ) finding_link_jira(request, new_finding, new_jira_issue_key) jira_message = 'Linked a JIRA issue successfully.' new_finding.save(false_history=True, push_to_jira=push_to_jira) create_notification(event='other', title='Addition of %s' % new_finding.title, description='Finding "%s" was added by %s' % (new_finding.title, request.user), url=request.build_absolute_uri( reverse('view_finding', args=(new_finding.id, ))), icon="exclamation-triangle") if 'request' in form.cleaned_data or 'response' in form.cleaned_data: burp_rr = BurpRawRequestResponse( finding=new_finding, burpRequestBase64=base64.b64encode( form.cleaned_data['request'].encode()), burpResponseBase64=base64.b64encode( form.cleaned_data['response'].encode()), ) burp_rr.clean() burp_rr.save() if create_template: templates = Finding_Template.objects.filter( title=new_finding.title) if len(templates) > 0: messages.add_message( request, messages.ERROR, 'A finding template was not created. A template with this title already ' 'exists.', extra_tags='alert-danger') else: template = Finding_Template( title=new_finding.title, cwe=new_finding.cwe, severity=new_finding.severity, description=new_finding.description, mitigation=new_finding.mitigation, impact=new_finding.impact, references=new_finding.references, numerical_severity=new_finding.numerical_severity) template.save() messages.add_message( request, messages.SUCCESS, 'A finding template was also created.', extra_tags='alert-success') if '_Finished' in request.POST: return HttpResponseRedirect( reverse('view_test', args=(test.id, ))) else: return HttpResponseRedirect( reverse('add_findings', args=(test.id, ))) else: if 'endpoints' in form.cleaned_data: form.fields['endpoints'].queryset = form.cleaned_data[ 'endpoints'] else: form.fields['endpoints'].queryset = Endpoint.objects.none() form_error = True messages.add_message( request, messages.ERROR, 'The form has errors, please correct them below.', extra_tags='alert-danger') else: if use_jira: push_all_jira_issues = test.engagement.product.jira_pkey.push_all_issues jform = JIRAFindingForm( push_all=push_all_jira_issues, prefix='jiraform', jira_pkey=test.engagement.product.jira_pkey) product_tab = Product_Tab(test.engagement.product.id, title="Add Finding", tab="engagements") product_tab.setEngagement(test.engagement) return render( request, 'dojo/add_findings.html', { 'form': form, 'product_tab': product_tab, 'test': test, 'temp': False, 'tid': tid, 'form_error': form_error, 'jform': jform, })
def add_findings(request, tid): test = Test.objects.get(id=tid) form_error = False jform = None form = AddFindingForm(initial={'date': timezone.now().date()}, req_resp=None, product=test.engagement.product) push_all_jira_issues = jira_helper.is_push_all_issues(test) use_jira = jira_helper.get_jira_project(test) is not None if request.method == 'POST': form = AddFindingForm(request.POST, req_resp=None, product=test.engagement.product) if (form['active'].value() is False or form['false_p'].value() ) and form['duplicate'].value() is False: closing_disabled = Note_Type.objects.filter( is_mandatory=True, is_active=True).count() if closing_disabled != 0: error_inactive = ValidationError( 'Can not set a finding as inactive without adding all mandatory notes', code='inactive_without_mandatory_notes') error_false_p = ValidationError( 'Can not set a finding as false positive without adding all mandatory notes', code='false_p_without_mandatory_notes') if form['active'].value() is False: form.add_error('active', error_inactive) if form['false_p'].value(): form.add_error('false_p', error_false_p) messages.add_message( request, messages.ERROR, 'Can not set a finding as inactive or false positive without adding all mandatory notes', extra_tags='alert-danger') if use_jira: jform = JIRAFindingForm( request.POST, prefix='jiraform', push_all=push_all_jira_issues, jira_project=jira_helper.get_jira_project(test), finding_form=form) if form.is_valid() and (jform is None or jform.is_valid()): if jform: logger.debug('jform.jira_issue: %s', jform.cleaned_data.get('jira_issue')) logger.debug('jform.push_to_jira: %s', jform.cleaned_data.get('push_to_jira')) new_finding = form.save(commit=False) new_finding.test = test new_finding.reporter = request.user new_finding.numerical_severity = Finding.get_numerical_severity( new_finding.severity) new_finding.tags = form.cleaned_data['tags'] new_finding.save(dedupe_option=False, push_to_jira=False) # Save and add new endpoints finding_helper.add_endpoints(new_finding, form) # Push to jira? push_to_jira = False jira_message = None if jform and jform.is_valid(): # can't use helper as when push_all_jira_issues is True, the checkbox gets disabled and is always false # push_to_jira = jira_helper.is_push_to_jira(new_finding, jform.cleaned_data.get('push_to_jira')) push_to_jira = push_all_jira_issues or jform.cleaned_data.get( 'push_to_jira') # if the jira issue key was changed, update database new_jira_issue_key = jform.cleaned_data.get('jira_issue') if new_finding.has_jira_issue: jira_issue = new_finding.jira_issue # everything in DD around JIRA integration is based on the internal id of the issue in JIRA # instead of on the public jira issue key. # I have no idea why, but it means we have to retrieve the issue from JIRA to get the internal JIRA id. # we can assume the issue exist, which is already checked in the validation of the jform if not new_jira_issue_key: jira_helper.finding_unlink_jira(request, new_finding) jira_message = 'Link to JIRA issue removed successfully.' elif new_jira_issue_key != new_finding.jira_issue.jira_key: jira_helper.finding_unlink_jira(request, new_finding) jira_helper.finding_link_jira(request, new_finding, new_jira_issue_key) jira_message = 'Changed JIRA link successfully.' else: logger.debug('finding has no jira issue yet') if new_jira_issue_key: logger.debug( 'finding has no jira issue yet, but jira issue specified in request. trying to link.' ) jira_helper.finding_link_jira(request, new_finding, new_jira_issue_key) jira_message = 'Linked a JIRA issue successfully.' new_finding.save(false_history=True, push_to_jira=push_to_jira) create_notification(event='other', title='Addition of %s' % new_finding.title, finding=new_finding, description='Finding "%s" was added by %s' % (new_finding.title, request.user), url=request.build_absolute_uri( reverse('view_finding', args=(new_finding.id, ))), icon="exclamation-triangle") if 'request' in form.cleaned_data or 'response' in form.cleaned_data: burp_rr = BurpRawRequestResponse( finding=new_finding, burpRequestBase64=base64.b64encode( form.cleaned_data['request'].encode()), burpResponseBase64=base64.b64encode( form.cleaned_data['response'].encode()), ) burp_rr.clean() burp_rr.save() if '_Finished' in request.POST: return HttpResponseRedirect( reverse('view_test', args=(test.id, ))) else: return HttpResponseRedirect( reverse('add_findings', args=(test.id, ))) else: form_error = True add_error_message_to_response( 'The form has errors, please correct them below.') add_field_errors_to_response(jform) add_field_errors_to_response(form) else: if use_jira: jform = JIRAFindingForm( push_all=jira_helper.is_push_all_issues(test), prefix='jiraform', jira_project=jira_helper.get_jira_project(test), finding_form=form) product_tab = Product_Tab(test.engagement.product.id, title="Add Finding", tab="engagements") product_tab.setEngagement(test.engagement) return render( request, 'dojo/add_findings.html', { 'form': form, 'product_tab': product_tab, 'test': test, 'temp': False, 'tid': tid, 'form_error': form_error, 'jform': jform, })
def obj_create(self, bundle, **kwargs): bundle.obj = ImportScanObject(initial=kwargs) self.is_valid(bundle) if bundle.errors: raise ImmediateHttpResponse(response=self.error_response(bundle.request, bundle.errors)) bundle = self.full_hydrate(bundle) # We now have all the options we need and will just replicate the process in views.py tt, t_created = Test_Type.objects.get_or_create(name=bundle.data['scan_type']) # will save in development environment environment, env_created = Development_Environment.objects.get_or_create(name="Development") scan_date = datetime.strptime(bundle.data['scan_date'], '%Y-%m-%d') t = Test(engagement=bundle.obj.__getattr__('engagement_obj'), lead = bundle.obj.__getattr__('user_obj'), test_type=tt, target_start=scan_date, target_end=scan_date, environment=environment, percent_complete=100) try: t.full_clean() except ValidationError: print "Error Validating Test Object" print ValidationError t.save() t.tags = bundle.data['tags'] try: parser = import_parser_factory(bundle.data['file'], t) except ValueError: raise NotFound("Parser ValueError") try: for item in parser.items: sev = item.severity if sev == 'Information' or sev == 'Informational': sev = 'Info' item.severity = sev if Finding.SEVERITIES[sev] > Finding.SEVERITIES[bundle.data['minimum_severity']]: continue item.test = t item.date = t.target_start item.reporter = bundle.request.user item.last_reviewed = timezone.now() item.last_reviewed_by = bundle.request.user item.active = bundle.data['active'] item.verified = bundle.data['verified'] item.save() if hasattr(item, 'unsaved_req_resp') and len(item.unsaved_req_resp) > 0: for req_resp in item.unsaved_req_resp: burp_rr = BurpRawRequestResponse(finding=item, burpRequestBase64=req_resp["req"], burpResponseBase64=req_resp["resp"], ) burp_rr.clean() burp_rr.save() if item.unsaved_request is not None and item.unsaved_response is not None: burp_rr = BurpRawRequestResponse(finding=item, burpRequestBase64=item.unsaved_request, burpResponseBase64=item.unsaved_response, ) burp_rr.clean() burp_rr.save() for endpoint in item.unsaved_endpoints: ep, created = Endpoint.objects.get_or_create(protocol=endpoint.protocol, host=endpoint.host, path=endpoint.path, query=endpoint.query, fragment=endpoint.fragment, product=t.engagement.product) item.endpoints.add(ep) if item.unsaved_tags is not None: item.tags = item.unsaved_tags except SyntaxError: raise NotFound("Parser SyntaxError") # Everything executed fine. We successfully imported the scan. res = TestResource() uri = res.get_resource_uri(t) raise ImmediateHttpResponse(HttpCreated(location = uri))
def process_parsed_findings(self, test, parsed_findings, scan_type, user, active, verified, minimum_severity=None, endpoints_to_add=None, push_to_jira=None, now=timezone.now()): logger.debug('endpoints_to_add: %s', endpoints_to_add) new_findings = [] items = parsed_findings logger.debug('starting import of %i items.', len(items) if items else 0) i = 0 for item in items: sev = item.severity if sev == 'Information' or sev == 'Informational': sev = 'Info' item.severity = sev item.numerical_severity = Finding.get_numerical_severity(sev) if minimum_severity and (Finding.SEVERITIES[sev] > Finding.SEVERITIES[minimum_severity]): continue item.test = test item.reporter = user if user else get_current_user item.last_reviewed = now item.last_reviewed_by = user if user else get_current_user # Only set active/verified flags if they were NOT set by default value(True) if item.active: item.active = active if item.verified: item.verified = verified item.created = now item.updated = now item.save(dedupe_option=False) if (hasattr(item, 'unsaved_req_resp') and len(item.unsaved_req_resp) > 0): for req_resp in item.unsaved_req_resp: burp_rr = BurpRawRequestResponse( finding=item, burpRequestBase64=base64.b64encode(req_resp["req"].encode("utf-8")), burpResponseBase64=base64.b64encode(req_resp["resp"].encode("utf-8"))) burp_rr.clean() burp_rr.save() if (item.unsaved_request is not None and item.unsaved_response is not None): burp_rr = BurpRawRequestResponse( finding=item, burpRequestBase64=base64.b64encode(item.unsaved_request.encode()), burpResponseBase64=base64.b64encode(item.unsaved_response.encode())) burp_rr.clean() burp_rr.save() for endpoint in item.unsaved_endpoints: try: ep, created = Endpoint.objects.get_or_create( protocol=endpoint.protocol, host=endpoint.host, path=endpoint.path, query=endpoint.query, fragment=endpoint.fragment, product=test.engagement.product) except (MultipleObjectsReturned): pass try: eps, created = Endpoint_Status.objects.get_or_create( finding=item, endpoint=ep) except (MultipleObjectsReturned): pass ep.endpoint_status.add(eps) item.endpoint_status.add(eps) item.endpoints.add(ep) if endpoints_to_add: for endpoint in endpoints_to_add: # TODO Not sure what happens here, we get an endpoint model and try to create it again? try: ep, created = Endpoint.objects.get_or_create( protocol=endpoint.protocol, host=endpoint.host, path=endpoint.path, query=endpoint.query, fragment=endpoint.fragment, product=test.engagement.product) except (MultipleObjectsReturned): pass try: eps, created = Endpoint_Status.objects.get_or_create( finding=item, endpoint=ep) except (MultipleObjectsReturned): pass ep.endpoint_status.add(eps) item.endpoints.add(ep) item.endpoint_status.add(eps) if item.unsaved_tags: item.tags = item.unsaved_tags new_findings.append(item) item.save(push_to_jira=push_to_jira) return new_findings
def import_scan_results(request, eid=None, pid=None): engagement = None form = ImportScanForm() cred_form = CredMappingForm() finding_count = 0 if eid: engagement = get_object_or_404(Engagement, id=eid) cred_form.fields["cred_user"].queryset = Cred_Mapping.objects.filter(engagement=engagement).order_by('cred_id') if request.method == "POST": form = ImportScanForm(request.POST, request.FILES) cred_form = CredMappingForm(request.POST) cred_form.fields["cred_user"].queryset = Cred_Mapping.objects.filter( engagement=engagement).order_by('cred_id') if form.is_valid(): # Allows for a test to be imported with an engagement created on the fly if engagement is None: engagement = Engagement() product = get_object_or_404(Product, id=pid) engagement.name = "AdHoc Import - " + strftime("%a, %d %b %Y %X", timezone.now().timetuple()) engagement.threat_model = False engagement.api_test = False engagement.pen_test = False engagement.check_list = False engagement.target_start = timezone.now().date() engagement.target_end = timezone.now().date() engagement.product = product engagement.active = True engagement.status = 'In Progress' engagement.save() file = request.FILES['file'] scan_date = form.cleaned_data['scan_date'] min_sev = form.cleaned_data['minimum_severity'] active = form.cleaned_data['active'] verified = form.cleaned_data['verified'] scan_type = request.POST['scan_type'] if not any(scan_type in code for code in ImportScanForm.SCAN_TYPE_CHOICES): raise Http404() tt, t_created = Test_Type.objects.get_or_create(name=scan_type) # will save in development environment environment, env_created = Development_Environment.objects.get_or_create( name="Development") t = Test( engagement=engagement, test_type=tt, target_start=scan_date, target_end=scan_date, environment=environment, percent_complete=100) t.lead = request.user t.full_clean() t.save() tags = request.POST.getlist('tags') ts = ", ".join(tags) t.tags = ts # Save the credential to the test if cred_form.is_valid(): if cred_form.cleaned_data['cred_user']: # Select the credential mapping object from the selected list and only allow if the credential is associated with the product cred_user = Cred_Mapping.objects.filter( pk=cred_form.cleaned_data['cred_user'].id, engagement=eid).first() new_f = cred_form.save(commit=False) new_f.test = t new_f.cred_id = cred_user.cred_id new_f.save() parser = import_parser_factory(file, t) try: for item in parser.items: print "item blowup" print item sev = item.severity if sev == 'Information' or sev == 'Informational': sev = 'Info' item.severity = sev if Finding.SEVERITIES[sev] > Finding.SEVERITIES[min_sev]: continue item.test = t if item.date == timezone.now().date(): item.date = t.target_start item.reporter = request.user item.last_reviewed = timezone.now() item.last_reviewed_by = request.user item.active = active item.verified = verified item.save(dedupe_option=False, false_history=True) if hasattr(item, 'unsaved_req_resp') and len( item.unsaved_req_resp) > 0: for req_resp in item.unsaved_req_resp: burp_rr = BurpRawRequestResponse( finding=item, burpRequestBase64=req_resp["req"], burpResponseBase64=req_resp["resp"], ) burp_rr.clean() burp_rr.save() if item.unsaved_request is not None and item.unsaved_response is not None: burp_rr = BurpRawRequestResponse( finding=item, burpRequestBase64=item.unsaved_request, burpResponseBase64=item.unsaved_response, ) burp_rr.clean() burp_rr.save() for endpoint in item.unsaved_endpoints: ep, created = Endpoint.objects.get_or_create( protocol=endpoint.protocol, host=endpoint.host, path=endpoint.path, query=endpoint.query, fragment=endpoint.fragment, product=t.engagement.product) item.endpoints.add(ep) item.save(false_history=True) if item.unsaved_tags is not None: item.tags = item.unsaved_tags finding_count += 1 messages.add_message( request, messages.SUCCESS, scan_type + ' processed, a total of ' + message( finding_count, 'finding', 'processed'), extra_tags='alert-success') create_notification( event='results_added', title=str(finding_count) + " findings for " + engagement.product.name, finding_count=finding_count, test=t, engagement=engagement, url=request.build_absolute_uri( reverse('view_test', args=(t.id, )))) return HttpResponseRedirect( reverse('view_test', args=(t.id, ))) except SyntaxError: messages.add_message( request, messages.ERROR, 'There appears to be an error in the XML report, please check and try again.', extra_tags='alert-danger') prod_id = None custom_breadcrumb = None title = "Import Scan Results" if engagement: prod_id = engagement.product.id product_tab = Product_Tab(prod_id, title=title, tab="engagements") product_tab.setEngagement(engagement) else: prod_id = pid custom_breadcrumb = {"", ""} product_tab = Product_Tab(prod_id, title=title, tab="findings") return render(request, 'dojo/import_scan_results.html', { 'form': form, 'product_tab': product_tab, 'custom_breadcrumb': custom_breadcrumb, 'title': title, 'cred_form': cred_form, })
def save(self, push_to_jira=False): data = self.validated_data test = data['test'] scan_type = data['scan_type'] endpoint_to_add = data['endpoint_to_add'] min_sev = data['minimum_severity'] scan_date = data['scan_date'] scan_date_time = datetime.datetime.combine(scan_date, timezone.now().time()) if settings.USE_TZ: scan_date_time = timezone.make_aware( scan_date_time, timezone.get_default_timezone()) verified = data['verified'] active = data['active'] try: parser = import_parser_factory( data.get('file', None), test, active, verified, data['scan_type'], ) except ValueError: raise Exception("Parser ValueError") try: items = parser.items original_items = list(test.finding_set.all()) new_items = [] mitigated_count = 0 finding_count = 0 finding_added_count = 0 reactivated_count = 0 reactivated_items = [] unchanged_count = 0 unchanged_items = [] for item in items: sev = item.severity if sev == 'Information' or sev == 'Informational': sev = 'Info' if (Finding.SEVERITIES[sev] > Finding.SEVERITIES[min_sev]): continue if scan_type == 'Veracode Scan' or scan_type == 'Arachni Scan': findings = Finding.objects.filter( title=item.title, test=test, severity=sev, numerical_severity=Finding.get_numerical_severity(sev), description=item.description).all() else: findings = Finding.objects.filter( title=item.title, test=test, severity=sev, numerical_severity=Finding.get_numerical_severity( sev)).all() if findings: # existing finding found finding = findings[0] if finding.mitigated or finding.is_Mitigated: finding.mitigated = None finding.is_Mitigated = False finding.mitigated_by = None finding.active = True finding.verified = verified finding.save() note = Notes(entry="Re-activated by %s re-upload." % scan_type, author=self.context['request'].user) note.save() endpoint_status = finding.endpoint_status.all() for status in endpoint_status: status.mitigated_by = None status.mitigated_time = None status.mitigated = False status.last_modified = timezone.now() status.save() finding.notes.add(note) reactivated_items.append(finding) reactivated_count += 1 else: unchanged_items.append(finding) unchanged_count += 1 else: # no existing finding found item.test = test item.date = scan_date item.reporter = self.context['request'].user item.last_reviewed = timezone.now() item.last_reviewed_by = self.context['request'].user item.verified = verified item.active = active item.save(dedupe_option=False) finding_added_count += 1 new_items.append(item) finding = item if hasattr(item, 'unsaved_req_resp'): for req_resp in item.unsaved_req_resp: burp_rr = BurpRawRequestResponse( finding=finding, burpRequestBase64=req_resp['req'], burpResponseBase64=req_resp['resp']) burp_rr.clean() burp_rr.save() if item.unsaved_request and item.unsaved_response: burp_rr = BurpRawRequestResponse( finding=finding, burpRequestBase64=item.unsaved_request, burpResponseBase64=item.unsaved_response) burp_rr.clean() burp_rr.save() if finding: finding_count += 1 for endpoint in item.unsaved_endpoints: ep, created = Endpoint.objects.get_or_create( protocol=endpoint.protocol, host=endpoint.host, path=endpoint.path, query=endpoint.query, fragment=endpoint.fragment, product=test.engagement.product) finding.endpoints.add(ep) if endpoint_to_add: finding.endpoints.add(endpoint_to_add) if item.unsaved_tags: finding.tags = item.unsaved_tags finding.save(push_to_jira=push_to_jira) to_mitigate = set(original_items) - set(reactivated_items) - set( unchanged_items) mitigated_findings = [] for finding in to_mitigate: if not finding.mitigated or not finding.is_Mitigated: finding.mitigated = scan_date_time finding.is_Mitigated = True finding.mitigated_by = self.context['request'].user finding.active = False endpoint_status = finding.endpoint_status.all() for status in endpoint_status: status.mitigated_by = self.context['request'].user status.mitigated_time = timezone.now() status.mitigated = True status.last_modified = timezone.now() status.save() finding.save(push_to_jira=push_to_jira) note = Notes(entry="Mitigated by %s re-upload." % scan_type, author=self.context['request'].user) note.save() finding.notes.add(note) mitigated_findings.append(finding) mitigated_count += 1 untouched = set(unchanged_items) - set(to_mitigate) test.updated = max_safe([scan_date_time, test.updated]) test.engagement.updated = max_safe( [scan_date_time, test.engagement.updated]) if test.engagement.engagement_type == 'CI/CD': test.target_end = max_safe([scan_date_time, test.target_end]) test.engagement.target_end = max_safe( [scan_date, test.engagement.target_end]) test.save() test.engagement.save() print(len(new_items)) print(reactivated_count) print(mitigated_count) print(unchanged_count - mitigated_count) updated_count = mitigated_count + reactivated_count + len( new_items) if updated_count > 0: # new_items = original_items title = 'Updated ' + str( updated_count) + " findings for " + str( test.engagement.product) + ': ' + str( test.engagement.name) + ': ' + str(test) create_notification(initiator=self.context['request'].user, event='scan_added', title=title, findings_new=new_items, findings_mitigated=mitigated_findings, findings_reactivated=reactivated_items, finding_count=updated_count, test=test, engagement=test.engagement, product=test.engagement.product, findings_untouched=untouched, url=reverse('view_test', args=(test.id, ))) except SyntaxError: raise Exception("Parser SyntaxError") return test
def save(self, push_to_jira=False): data = self.validated_data close_old_findings = data['close_old_findings'] active = data['active'] verified = data['verified'] test_type, created = Test_Type.objects.get_or_create( name=data.get('test_type', data['scan_type'])) endpoint_to_add = data['endpoint_to_add'] environment, created = Development_Environment.objects.get_or_create( name='Development') scan_date = data['scan_date'] scan_date_time = datetime.datetime.combine(scan_date, timezone.now().time()) if settings.USE_TZ: scan_date_time = timezone.make_aware( scan_date_time, timezone.get_default_timezone()) version = '' if 'version' in data: version = data['version'] test = Test(engagement=data['engagement'], lead=data['lead'], test_type=test_type, target_start=data['scan_date'], target_end=data['scan_date'], environment=environment, percent_complete=100, version=version) try: test.full_clean() except ValidationError: pass test.save() # return the id of the created test, can't find a better way because this is not a ModelSerializer.... self.fields['test'] = serializers.IntegerField(read_only=True, default=test.id) test.engagement.updated = max_safe( [scan_date_time, test.engagement.updated]) if test.engagement.engagement_type == 'CI/CD': test.engagement.target_end = max_safe( [scan_date, test.engagement.target_end]) test.engagement.save() if 'tags' in data: test.tags = ' '.join(data['tags']) try: parser = import_parser_factory( data.get('file', None), test, active, verified, data['scan_type'], ) except ValueError: raise Exception('FileParser ValueError') new_findings = [] skipped_hashcodes = [] try: for item in parser.items: sev = item.severity if sev == 'Information' or sev == 'Informational': sev = 'Info' item.severity = sev if (Finding.SEVERITIES[sev] > Finding.SEVERITIES[data['minimum_severity']]): continue item.test = test item.date = test.target_start item.reporter = self.context['request'].user item.last_reviewed = timezone.now() item.last_reviewed_by = self.context['request'].user item.active = data['active'] item.verified = data['verified'] item.save(dedupe_option=False) if (hasattr(item, 'unsaved_req_resp') and len(item.unsaved_req_resp) > 0): for req_resp in item.unsaved_req_resp: burp_rr = BurpRawRequestResponse( finding=item, burpRequestBase64=req_resp["req"], burpResponseBase64=req_resp["resp"]) burp_rr.clean() burp_rr.save() if (item.unsaved_request is not None and item.unsaved_response is not None): burp_rr = BurpRawRequestResponse( finding=item, burpRequestBase64=item.unsaved_request, burpResponseBase64=item.unsaved_response) burp_rr.clean() burp_rr.save() for endpoint in item.unsaved_endpoints: ep, created = Endpoint.objects.get_or_create( protocol=endpoint.protocol, host=endpoint.host, path=endpoint.path, query=endpoint.query, fragment=endpoint.fragment, product=test.engagement.product) item.endpoints.add(ep) if endpoint_to_add: item.endpoints.add(endpoint_to_add) if item.unsaved_tags is not None: item.tags = item.unsaved_tags item.save(push_to_jira=push_to_jira) new_findings.append(item) except SyntaxError: raise Exception('Parser SyntaxError') old_findings = [] if close_old_findings: # Close old active findings that are not reported by this scan. new_hash_codes = test.finding_set.values('hash_code') if test.engagement.deduplication_on_engagement: old_findings = Finding.objects.exclude(test=test) \ .exclude(hash_code__in=new_hash_codes) \ .exclude(hash_code__in=skipped_hashcodes) \ .filter(test__engagement=test.engagement, test__test_type=test_type, active=True) else: old_findings = Finding.objects.exclude(test=test) \ .exclude(hash_code__in=new_hash_codes) \ .exclude(hash_code__in=skipped_hashcodes) \ .filter(test__engagement__product=test.engagement.product, test__test_type=test_type, active=True) for old_finding in old_findings: old_finding.active = False old_finding.mitigated = datetime.datetime.combine( test.target_start, timezone.now().time()) if settings.USE_TZ: old_finding.mitigated = timezone.make_aware( old_finding.mitigated, timezone.get_default_timezone()) old_finding.mitigated_by = self.context['request'].user old_finding.notes.create( author=self.context['request'].user, entry="This finding has been automatically closed" " as it is not present anymore in recent scans.") endpoint_status = old_finding.endpoint_status.all() for status in endpoint_status: status.mitigated_by = self.context['request'].user status.mitigated_time = timezone.now() status.mitigated = True status.last_modified = timezone.now() status.save() Tag.objects.add_tag(old_finding, 'stale') old_finding.save() title = 'Test created for ' + str( test.engagement.product) + ': ' + str( test.engagement.name) + ': ' + str(test) create_notification(event='test_added', title=title, test=test, engagement=test.engagement, product=test.engagement.product, url=reverse('view_test', args=(test.id, ))) updated_count = len(new_findings) + len(old_findings) if updated_count > 0: title = 'Created ' + str(updated_count) + " findings for " + str( test.engagement.product) + ': ' + str( test.engagement.name) + ': ' + str(test) create_notification(initiator=self.context['request'].user, event='scan_added', title=title, findings_new=new_findings, findings_mitigated=old_findings, finding_count=updated_count, test=test, engagement=test.engagement, product=test.engagement.product, url=reverse('view_test', args=(test.id, ))) return test
def process_parsed_findings(self, test, parsed_findings, scan_type, user, active, verified, minimum_severity=None, endpoints_to_add=None, push_to_jira=None, group_by=None, now=timezone.now(), service=None): items = parsed_findings original_items = list(test.finding_set.all()) new_items = [] mitigated_count = 0 finding_count = 0 finding_added_count = 0 reactivated_count = 0 reactivated_items = [] unchanged_count = 0 unchanged_items = [] logger.debug('starting reimport of %i items.', len(items) if items else 0) from dojo.importers.reimporter.utils import ( get_deduplication_algorithm_from_conf, match_new_finding_to_existing_finding, update_endpoint_status) deduplication_algorithm = get_deduplication_algorithm_from_conf( scan_type) i = 0 logger.debug( 'STEP 1: looping over findings from the reimported report and trying to match them to existing findings' ) deduplicationLogger.debug( 'Algorithm used for matching new findings to existing findings: %s', deduplication_algorithm) for item in items: # FIXME hack to remove when all parsers have unit tests for this attribute if item.severity.lower().startswith( 'info') and item.severity != 'Info': item.severity = 'Info' item.numerical_severity = Finding.get_numerical_severity( item.severity) if minimum_severity and (Finding.SEVERITIES[item.severity] > Finding.SEVERITIES[minimum_severity]): # finding's severity is below the configured threshold : ignoring the finding continue # existing findings may be from before we had component_name/version fields component_name = item.component_name if hasattr( item, 'component_name') else None component_version = item.component_version if hasattr( item, 'component_version') else None if not hasattr(item, 'test'): item.test = test item.service = service item.hash_code = item.compute_hash_code() deduplicationLogger.debug("item's hash_code: %s", item.hash_code) findings = match_new_finding_to_existing_finding( item, test, deduplication_algorithm, scan_type) deduplicationLogger.debug( 'found %i findings matching with current new finding', len(findings)) if findings: # existing finding found finding = findings[0] if finding.false_p or finding.out_of_scope or finding.risk_accepted: logger.debug( '%i: skipping existing finding (it is marked as false positive:%s and/or out of scope:%s or is a risk accepted:%s): %i:%s:%s:%s', i, finding.false_p, finding.out_of_scope, finding.risk_accepted, finding.id, finding, finding.component_name, finding.component_version) elif finding.mitigated or finding.is_mitigated: logger.debug('%i: reactivating: %i:%s:%s:%s', i, finding.id, finding, finding.component_name, finding.component_version) finding.mitigated = None finding.is_mitigated = False finding.mitigated_by = None finding.active = True finding.verified = verified # existing findings may be from before we had component_name/version fields finding.component_name = finding.component_name if finding.component_name else component_name finding.component_version = finding.component_version if finding.component_version else component_version # don't dedupe before endpoints are added finding.save(dedupe_option=False) note = Notes(entry="Re-activated by %s re-upload." % scan_type, author=user) note.save() endpoint_status = finding.endpoint_status.all() for status in endpoint_status: status.mitigated_by = None status.mitigated_time = None status.mitigated = False status.last_modified = timezone.now() status.save() finding.notes.add(note) reactivated_items.append(finding) reactivated_count += 1 else: # existing findings may be from before we had component_name/version fields logger.debug('%i: updating existing finding: %i:%s:%s:%s', i, finding.id, finding, finding.component_name, finding.component_version) if not finding.component_name or not finding.component_version: finding.component_name = finding.component_name if finding.component_name else component_name finding.component_version = finding.component_version if finding.component_version else component_version finding.save(dedupe_option=False) unchanged_items.append(finding) unchanged_count += 1 if finding.dynamic_finding: logger.debug( "Re-import found an existing dynamic finding for this new finding. Checking the status of endpoints" ) update_endpoint_status(finding, item, user) else: # no existing finding found item.reporter = user item.last_reviewed = timezone.now() item.last_reviewed_by = user item.verified = verified item.active = active # Save it. Don't dedupe before endpoints are added. item.save(dedupe_option=False) logger.debug( '%i: reimport created new finding as no existing finding match: %i:%s:%s:%s', i, item.id, item, item.component_name, item.component_version) # only new items get auto grouped to avoid confusion around already existing items that are already grouped if settings.FEATURE_FINDING_GROUPS and group_by: finding_helper.add_finding_to_auto_group(item, group_by) finding_added_count += 1 new_items.append(item) finding = item if hasattr(item, 'unsaved_req_resp'): for req_resp in item.unsaved_req_resp: burp_rr = BurpRawRequestResponse( finding=finding, burpRequestBase64=base64.b64encode( req_resp["req"].encode("utf-8")), burpResponseBase64=base64.b64encode( req_resp["resp"].encode("utf-8"))) burp_rr.clean() burp_rr.save() if item.unsaved_request and item.unsaved_response: burp_rr = BurpRawRequestResponse( finding=finding, burpRequestBase64=base64.b64encode( item.unsaved_request.encode()), burpResponseBase64=base64.b64encode( item.unsaved_response.encode())) burp_rr.clean() burp_rr.save() # for existing findings: make sure endpoints are present or created if finding: finding_count += 1 for endpoint in item.unsaved_endpoints: try: endpoint.clean() except ValidationError as e: logger.warning( "DefectDojo is storing broken endpoint because cleaning wasn't successful: " "{}".format(e)) try: ep, created = endpoint_get_or_create( protocol=endpoint.protocol, userinfo=endpoint.userinfo, host=endpoint.host, port=endpoint.port, path=endpoint.path, query=endpoint.query, fragment=endpoint.fragment, product=test.engagement.product) except (MultipleObjectsReturned): pass try: eps, created = Endpoint_Status.objects.get_or_create( finding=finding, endpoint=ep) except (MultipleObjectsReturned): pass ep.endpoint_status.add(eps) finding.endpoints.add(ep) finding.endpoint_status.add(eps) if endpoints_to_add: for endpoint in endpoints_to_add: # TODO Not sure what happens here, we get an endpoint model and try to create it again? try: endpoint.clean() except ValidationError as e: logger.warning( "DefectDojo is storing broken endpoint because cleaning wasn't successful: " "{}".format(e)) try: ep, created = endpoint_get_or_create( protocol=endpoint.protocol, userinfo=endpoint.userinfo, host=endpoint.host, port=endpoint.port, path=endpoint.path, query=endpoint.query, fragment=endpoint.fragment, product=test.engagement.product) except (MultipleObjectsReturned): pass try: eps, created = Endpoint_Status.objects.get_or_create( finding=finding, endpoint=ep) except (MultipleObjectsReturned): pass ep.endpoint_status.add(eps) finding.endpoints.add(ep) finding.endpoint_status.add(eps) if item.unsaved_tags: finding.tags = item.unsaved_tags # existing findings may be from before we had component_name/version fields finding.component_name = finding.component_name if finding.component_name else component_name finding.component_version = finding.component_version if finding.component_version else component_version # finding = new finding or existing finding still in the upload report # to avoid pushing a finding group multiple times, we push those outside of the loop if settings.FEATURE_FINDING_GROUPS and finding.finding_group: finding.save() else: finding.save(push_to_jira=push_to_jira) to_mitigate = set(original_items) - set(reactivated_items) - set( unchanged_items) untouched = set(unchanged_items) - set(to_mitigate) if settings.FEATURE_FINDING_GROUPS and push_to_jira: for finding_group in set([ finding.finding_group for finding in reactivated_items + unchanged_items + new_items if finding.finding_group is not None ]): jira_helper.push_to_jira(finding_group) return new_items, reactivated_items, to_mitigate, untouched
def import_scan_results(request, eid=None, pid=None): engagement = None form = ImportScanForm() cred_form = CredMappingForm() finding_count = 0 if eid: engagement = get_object_or_404(Engagement, id=eid) cred_form.fields["cred_user"].queryset = Cred_Mapping.objects.filter( engagement=engagement).order_by('cred_id') if request.method == "POST": form = ImportScanForm(request.POST, request.FILES) cred_form = CredMappingForm(request.POST) cred_form.fields["cred_user"].queryset = Cred_Mapping.objects.filter( engagement=engagement).order_by('cred_id') if form.is_valid(): # Allows for a test to be imported with an engagement created on the fly if engagement is None: engagement = Engagement() product = get_object_or_404(Product, id=pid) engagement.name = "AdHoc Import - " + strftime( "%a, %d %b %Y %X", timezone.now().timetuple()) engagement.threat_model = False engagement.api_test = False engagement.pen_test = False engagement.check_list = False engagement.target_start = timezone.now().date() engagement.target_end = timezone.now().date() engagement.product = product engagement.active = True engagement.status = 'In Progress' engagement.save() file = request.FILES['file'] scan_date = form.cleaned_data['scan_date'] min_sev = form.cleaned_data['minimum_severity'] active = form.cleaned_data['active'] verified = form.cleaned_data['verified'] scan_type = request.POST['scan_type'] if not any(scan_type in code for code in ImportScanForm.SCAN_TYPE_CHOICES): raise Http404() tt, t_created = Test_Type.objects.get_or_create(name=scan_type) # will save in development environment environment, env_created = Development_Environment.objects.get_or_create( name="Development") t = Test(engagement=engagement, test_type=tt, target_start=scan_date, target_end=scan_date, environment=environment, percent_complete=100) t.lead = request.user t.full_clean() t.save() tags = request.POST.getlist('tags') ts = ", ".join(tags) t.tags = ts # Save the credential to the test if cred_form.is_valid(): if cred_form.cleaned_data['cred_user']: # Select the credential mapping object from the selected list and only allow if the credential is associated with the product cred_user = Cred_Mapping.objects.filter( pk=cred_form.cleaned_data['cred_user'].id, engagement=eid).first() new_f = cred_form.save(commit=False) new_f.test = t new_f.cred_id = cred_user.cred_id new_f.save() try: parser = import_parser_factory(file, t) except ValueError: raise Http404() try: for item in parser.items: sev = item.severity if sev == 'Information' or sev == 'Informational': sev = 'Info' item.severity = sev if Finding.SEVERITIES[sev] > Finding.SEVERITIES[min_sev]: continue item.test = t if item.date == timezone.now().date(): item.date = t.target_start item.reporter = request.user item.last_reviewed = timezone.now() item.last_reviewed_by = request.user item.active = active item.verified = verified item.save(dedupe_option=False) if hasattr(item, 'unsaved_req_resp') and len( item.unsaved_req_resp) > 0: for req_resp in item.unsaved_req_resp: burp_rr = BurpRawRequestResponse( finding=item, burpRequestBase64=req_resp["req"], burpResponseBase64=req_resp["resp"], ) burp_rr.clean() burp_rr.save() if item.unsaved_request is not None and item.unsaved_response is not None: burp_rr = BurpRawRequestResponse( finding=item, burpRequestBase64=item.unsaved_request, burpResponseBase64=item.unsaved_response, ) burp_rr.clean() burp_rr.save() for endpoint in item.unsaved_endpoints: ep, created = Endpoint.objects.get_or_create( protocol=endpoint.protocol, host=endpoint.host, path=endpoint.path, query=endpoint.query, fragment=endpoint.fragment, product=t.engagement.product) item.endpoints.add(ep) item.save() if item.unsaved_tags is not None: item.tags = item.unsaved_tags # patched: I have to add the jira creation in line below add_issue(item, True) finding_count += 1 messages.add_message( request, messages.SUCCESS, scan_type + ' processed, a total of ' + message(finding_count, 'finding', 'processed'), extra_tags='alert-success') create_notification(event='results_added', title=str(finding_count) + " findings for " + engagement.product.name, finding_count=finding_count, test=t, engagement=engagement, url=request.build_absolute_uri( reverse('view_test', args=(t.id, )))) return HttpResponseRedirect(reverse('view_test', args=(t.id, ))) except SyntaxError: messages.add_message( request, messages.ERROR, 'There appears to be an error in the XML report, please check and try again.', extra_tags='alert-danger') prod_id = None custom_breadcrumb = None title = "Import Scan Results" if engagement: prod_id = engagement.product.id product_tab = Product_Tab(prod_id, title=title, tab="engagements") product_tab.setEngagement(engagement) else: prod_id = pid custom_breadcrumb = {"", ""} product_tab = Product_Tab(prod_id, title=title, tab="findings") return render( request, 'dojo/import_scan_results.html', { 'form': form, 'product_tab': product_tab, 'custom_breadcrumb': custom_breadcrumb, 'title': title, 'cred_form': cred_form, })
def process_parsed_findings(self, test, parsed_findings, scan_type, user, active, verified, minimum_severity=None, endpoints_to_add=None, push_to_jira=None, group_by=None, now=timezone.now(), service=None, scan_date=None, **kwargs): logger.debug('endpoints_to_add: %s', endpoints_to_add) new_findings = [] items = parsed_findings logger.debug('starting import of %i items.', len(items) if items else 0) i = 0 for item in items: # FIXME hack to remove when all parsers have unit tests for this attribute if item.severity.lower().startswith( 'info') and item.severity != 'Info': item.severity = 'Info' item.numerical_severity = Finding.get_numerical_severity( item.severity) if minimum_severity and (Finding.SEVERITIES[item.severity] > Finding.SEVERITIES[minimum_severity]): # finding's severity is below the configured threshold : ignoring the finding continue item.test = test item.reporter = user if user else get_current_user item.last_reviewed = now item.last_reviewed_by = user if user else get_current_user logger.debug( 'process_parsed_findings: active from report: %s, verified from report: %s', item.active, item.verified) # active, verified parameters = parameters from the gui or api call. # item.active, item.verified = values from the report / the parser # if either value of active (from the parser or from the api/gui) is false, final status is inactive # else final status is active # if either value of verified (from the parser or from the api/gui) is false, final status is not verified # else final status is verified # Note that: # - the API (active/verified parameters) values default to True if not specified # - the parser values default to true if not set by the parser (as per the default value in models.py) # - there is no "not specified" in the GUI (not ticked means not active/not verified) if item.active: item.active = active if item.verified: item.verified = verified # if scan_date was provided, override value from parser if scan_date: item.date = scan_date item.service = service item.save(dedupe_option=False) if settings.FEATURE_FINDING_GROUPS and group_by: finding_helper.add_finding_to_auto_group(item, group_by) if (hasattr(item, 'unsaved_req_resp') and len(item.unsaved_req_resp) > 0): for req_resp in item.unsaved_req_resp: burp_rr = BurpRawRequestResponse( finding=item, burpRequestBase64=base64.b64encode( req_resp["req"].encode("utf-8")), burpResponseBase64=base64.b64encode( req_resp["resp"].encode("utf-8"))) burp_rr.clean() burp_rr.save() if (item.unsaved_request is not None and item.unsaved_response is not None): burp_rr = BurpRawRequestResponse( finding=item, burpRequestBase64=base64.b64encode( item.unsaved_request.encode()), burpResponseBase64=base64.b64encode( item.unsaved_response.encode())) burp_rr.clean() burp_rr.save() if settings.ASYNC_FINDING_IMPORT: importer_utils.chunk_endpoints_and_disperse( item, test, item.unsaved_endpoints) else: importer_utils.add_endpoints_to_unsaved_finding( item, test, item.unsaved_endpoints, sync=True) if endpoints_to_add: if settings.ASYNC_FINDING_IMPORT: importer_utils.chunk_endpoints_and_disperse( item, test, endpoints_to_add) else: importer_utils.add_endpoints_to_unsaved_finding( item, test, endpoints_to_add, sync=True) if item.unsaved_tags: item.tags = item.unsaved_tags if item.unsaved_files: for unsaved_file in item.unsaved_files: data = base64.b64decode(unsaved_file.get('data')) title = unsaved_file.get('title', '<No title>') file_upload, file_upload_created = FileUpload.objects.get_or_create( title=title, ) file_upload.file.save(title, ContentFile(data)) file_upload.save() item.files.add(file_upload) new_findings.append(item) # to avoid pushing a finding group multiple times, we push those outside of the loop if settings.FEATURE_FINDING_GROUPS and item.finding_group: item.save() else: item.save(push_to_jira=push_to_jira) if settings.FEATURE_FINDING_GROUPS and push_to_jira: for finding_group in set([ finding.finding_group for finding in new_findings if finding.finding_group is not None ]): jira_helper.push_to_jira(finding_group) sync = kwargs.get('sync', False) if not sync: return [ serializers.serialize('json', [ finding, ]) for finding in new_findings ] return new_findings
def import_scan_results(request, eid=None, pid=None): engagement = None form = ImportScanForm() cred_form = CredMappingForm() finding_count = 0 enabled = False jform = None if eid: engagement = get_object_or_404(Engagement, id=eid) cred_form.fields["cred_user"].queryset = Cred_Mapping.objects.filter( engagement=engagement).order_by('cred_id') if get_system_setting( 'enable_jira') and engagement.product.jira_pkey_set.first( ) is not None: enabled = engagement.product.jira_pkey_set.first().push_all_issues jform = JIRAFindingForm(enabled=enabled, prefix='jiraform') elif pid: product = get_object_or_404(Product, id=pid) if get_system_setting( 'enable_jira') and product.jira_pkey_set.first() is not None: enabled = product.jira_pkey_set.first().push_all_issues jform = JIRAFindingForm(enabled=enabled, prefix='jiraform') if request.method == "POST": form = ImportScanForm(request.POST, request.FILES) cred_form = CredMappingForm(request.POST) cred_form.fields["cred_user"].queryset = Cred_Mapping.objects.filter( engagement=engagement).order_by('cred_id') if form.is_valid(): # Allows for a test to be imported with an engagement created on the fly if engagement is None: engagement = Engagement() # product = get_object_or_404(Product, id=pid) engagement.name = "AdHoc Import - " + strftime( "%a, %d %b %Y %X", timezone.now().timetuple()) engagement.threat_model = False engagement.api_test = False engagement.pen_test = False engagement.check_list = False engagement.target_start = timezone.now().date() engagement.target_end = timezone.now().date() engagement.product = product engagement.active = True engagement.status = 'In Progress' engagement.save() file = request.FILES.get('file') scan_date = form.cleaned_data['scan_date'] min_sev = form.cleaned_data['minimum_severity'] active = form.cleaned_data['active'] verified = form.cleaned_data['verified'] scan_type = request.POST['scan_type'] if not any(scan_type in code for code in ImportScanForm.SCAN_TYPE_CHOICES): raise Http404() tt, t_created = Test_Type.objects.get_or_create(name=scan_type) # will save in development environment environment, env_created = Development_Environment.objects.get_or_create( name="Development") t = Test(engagement=engagement, test_type=tt, target_start=scan_date, target_end=scan_date, environment=environment, percent_complete=100) t.lead = request.user t.full_clean() t.save() tags = request.POST.getlist('tags') ts = ", ".join(tags) t.tags = ts # Save the credential to the test if cred_form.is_valid(): if cred_form.cleaned_data['cred_user']: # Select the credential mapping object from the selected list and only allow if the credential is associated with the product cred_user = Cred_Mapping.objects.filter( pk=cred_form.cleaned_data['cred_user'].id, engagement=eid).first() new_f = cred_form.save(commit=False) new_f.test = t new_f.cred_id = cred_user.cred_id new_f.save() try: parser = import_parser_factory(file, t, active, verified) except Exception as e: messages.add_message( request, messages.ERROR, "An error has occurred in the parser, please see error " "log for details.", extra_tags='alert-danger') parse_logger.exception(e) parse_logger.error("Error in parser: {}".format(str(e))) return HttpResponseRedirect( reverse('import_scan_results', args=(eid, ))) try: # Push to Jira? push_to_jira = False if enabled: push_to_jira = True elif 'jiraform-push_to_jira' in request.POST: jform = JIRAFindingForm(request.POST, prefix='jiraform', enabled=enabled) if jform.is_valid(): push_to_jira = jform.cleaned_data.get('push_to_jira') for item in parser.items: print("item blowup") print(item) sev = item.severity if sev == 'Information' or sev == 'Informational': sev = 'Info' item.severity = sev if Finding.SEVERITIES[sev] > Finding.SEVERITIES[min_sev]: continue item.test = t if item.date == timezone.now().date(): item.date = t.target_start item.reporter = request.user item.last_reviewed = timezone.now() item.last_reviewed_by = request.user if not handles_active_verified_statuses( form.get_scan_type()): item.active = active item.verified = verified item.save(dedupe_option=False, false_history=True) if hasattr(item, 'unsaved_req_resp') and len( item.unsaved_req_resp) > 0: for req_resp in item.unsaved_req_resp: if form.get_scan_type() == "Arachni Scan": burp_rr = BurpRawRequestResponse( finding=item, burpRequestBase64=req_resp["req"], burpResponseBase64=req_resp["resp"], ) else: burp_rr = BurpRawRequestResponse( finding=item, burpRequestBase64=base64.b64encode( req_resp["req"].encode("utf-8")), burpResponseBase64=base64.b64encode( req_resp["resp"].encode("utf-8")), ) burp_rr.clean() burp_rr.save() if item.unsaved_request is not None and item.unsaved_response is not None: burp_rr = BurpRawRequestResponse( finding=item, burpRequestBase64=base64.b64encode( item.unsaved_request.encode()), burpResponseBase64=base64.b64encode( item.unsaved_response.encode()), ) burp_rr.clean() burp_rr.save() for endpoint in item.unsaved_endpoints: ep, created = Endpoint.objects.get_or_create( protocol=endpoint.protocol, host=endpoint.host, path=endpoint.path, query=endpoint.query, fragment=endpoint.fragment, product=t.engagement.product) item.endpoints.add(ep) for endpoint in form.cleaned_data['endpoints']: ep, created = Endpoint.objects.get_or_create( protocol=endpoint.protocol, host=endpoint.host, path=endpoint.path, query=endpoint.query, fragment=endpoint.fragment, product=t.engagement.product) item.endpoints.add(ep) item.save(false_history=True, push_to_jira=push_to_jira) if item.unsaved_tags is not None: item.tags = item.unsaved_tags finding_count += 1 messages.add_message( request, messages.SUCCESS, scan_type + ' processed, a total of ' + message(finding_count, 'finding', 'processed'), extra_tags='alert-success') create_notification(event='results_added', initiator=request.user, title=str(finding_count) + " findings for " + engagement.product.name, finding_count=finding_count, test=t, engagement=engagement, url=reverse('view_test', args=(t.id, ))) return HttpResponseRedirect(reverse('view_test', args=(t.id, ))) except SyntaxError: messages.add_message( request, messages.ERROR, 'There appears to be an error in the XML report, please check and try again.', extra_tags='alert-danger') prod_id = None custom_breadcrumb = None title = "Import Scan Results" if engagement: prod_id = engagement.product.id product_tab = Product_Tab(prod_id, title=title, tab="engagements") product_tab.setEngagement(engagement) else: prod_id = pid custom_breadcrumb = {"", ""} product_tab = Product_Tab(prod_id, title=title, tab="findings") form.fields['endpoints'].queryset = Endpoint.objects.filter( product__id=product_tab.product.id) return render( request, 'dojo/import_scan_results.html', { 'form': form, 'product_tab': product_tab, 'custom_breadcrumb': custom_breadcrumb, 'title': title, 'cred_form': cred_form, 'jform': jform })
def save(self): data = self.validated_data test_type, created = Test_Type.objects.get_or_create( name=data['scan_type']) environment, created = Development_Environment.objects.get_or_create( name='Development') test = Test(engagement=data['engagement'], lead=data['lead'], test_type=test_type, target_start=data['scan_date'], target_end=data['scan_date'], environment=environment, percent_complete=100) try: test.full_clean() except ValidationError: pass test.save() test.tags = data['tags'] try: parser = import_parser_factory( data['file'], test, data['scan_type'], ) except ValueError: raise Exception('FileParser ValueError') try: for item in parser.items: sev = item.severity if sev == 'Information' or sev == 'Informational': sev = 'Info' item.severity = sev if (Finding.SEVERITIES[sev] > Finding.SEVERITIES[data['minimum_severity']]): continue item.test = test item.date = test.target_start item.reporter = self.context['request'].user item.last_reviewed = timezone.now() item.last_reviewed_by = self.context['request'].user item.active = data['active'] item.verified = data['verified'] item.save() if (hasattr(item, 'unsaved_req_resp') and len(item.unsaved_req_resp) > 0): for req_resp in item.unsaved_req_resp: burp_rr = BurpRawRequestResponse( finding=item, burpRequestBase64=req_resp["req"], burpResponseBase64=req_resp["resp"]) burp_rr.clean() burp_rr.save() if (item.unsaved_request is not None and item.unsaved_response is not None): burp_rr = BurpRawRequestResponse( finding=item, burpRequestBase64=item.unsaved_request, burpResponseBase64=item.unsaved_response) burp_rr.clean() burp_rr.save() for endpoint in item.unsaved_endpoints: ep, created = Endpoint.objects.get_or_create( protocol=endpoint.protocol, host=endpoint.host, path=endpoint.path, query=endpoint.query, fragment=endpoint.fragment, product=test.engagement.product) item.endpoints.add(ep) if item.unsaved_tags is not None: item.tags = item.unsaved_tags except SyntaxError: raise Exception('Parser SyntaxError') return test
def re_import_scan_results(request, tid): additional_message = "When re-uploading a scan, any findings not found in original scan will be updated as " \ "mitigated. The process attempts to identify the differences, however manual verification " \ "is highly recommended." test = get_object_or_404(Test, id=tid) scan_type = test.test_type.name engagement = test.engagement form = ReImportScanForm() jform = None push_all_jira_issues = False # Decide if we need to present the Push to JIRA form if get_system_setting( 'enable_jira') and engagement.product.jira_pkey_set.first( ) is not None: push_all_jira_issues = engagement.product.jira_pkey_set.first( ).push_all_issues jform = JIRAImportScanForm(push_all=push_all_jira_issues, prefix='jiraform') form.initial['tags'] = [tag.name for tag in test.tags] if request.method == "POST": form = ReImportScanForm(request.POST, request.FILES) if form.is_valid(): scan_date = form.cleaned_data['scan_date'] scan_date_time = datetime.combine(scan_date, timezone.now().time()) if settings.USE_TZ: scan_date_time = timezone.make_aware( scan_date_time, timezone.get_default_timezone()) min_sev = form.cleaned_data['minimum_severity'] file = request.FILES.get('file', None) scan_type = test.test_type.name active = form.cleaned_data['active'] verified = form.cleaned_data['verified'] tags = request.POST.getlist('tags') ts = ", ".join(tags) test.tags = ts if file and is_scan_file_too_large(file): messages.add_message( request, messages.ERROR, "Report file is too large. Maximum supported size is {} MB" .format(settings.SCAN_FILE_MAX_SIZE), extra_tags='alert-danger') return HttpResponseRedirect( reverse('re_import_scan_results', args=(test.id, ))) try: parser = import_parser_factory(file, test, active, verified) except ValueError: raise Http404() except Exception as e: messages.add_message( request, messages.ERROR, "An error has occurred in the parser, please see error " "log for details.", extra_tags='alert-danger') parse_logger.exception(e) parse_logger.error("Error in parser: {}".format(str(e))) return HttpResponseRedirect( reverse('re_import_scan_results', args=(test.id, ))) try: items = parser.items original_items = test.finding_set.all().values_list("id", flat=True) new_items = [] mitigated_count = 0 finding_count = 0 finding_added_count = 0 reactivated_count = 0 # Push to Jira? push_to_jira = False if push_all_jira_issues: push_to_jira = True elif 'jiraform-push_to_jira' in request.POST: jform = JIRAImportScanForm(request.POST, prefix='jiraform', push_all=push_all_jira_issues) if jform.is_valid(): push_to_jira = jform.cleaned_data.get('push_to_jira') for item in items: sev = item.severity if sev == 'Information' or sev == 'Informational': sev = 'Info' item.severity = sev # existing findings may be from before we had component_name/version fields component_name = item.component_name if hasattr( item, 'component_name') else None component_version = item.component_version if hasattr( item, 'component_version') else None # If it doesn't clear minimum severity, move on if Finding.SEVERITIES[sev] > Finding.SEVERITIES[min_sev]: continue # Try to find the existing finding # If it's Veracode or Arachni, then we consider the description for some # reason... from titlecase import titlecase item.title = titlecase(item.title) if scan_type == 'Veracode Scan' or scan_type == 'Arachni Scan': finding = Finding.objects.filter( title=item.title, test__id=test.id, severity=sev, numerical_severity=Finding.get_numerical_severity( sev), description=item.description) else: finding = Finding.objects.filter( title=item.title, test__id=test.id, severity=sev, numerical_severity=Finding.get_numerical_severity( sev)) if len(finding) == 1: finding = finding[0] if finding.mitigated or finding.is_Mitigated: # it was once fixed, but now back finding.mitigated = None finding.is_Mitigated = False finding.mitigated_by = None finding.active = True finding.verified = verified # existing findings may be from before we had component_name/version fields finding.component_name = finding.component_name if finding.component_name else component_name finding.component_version = finding.component_version if finding.component_version else component_version finding.save() note = Notes( entry="Re-activated by %s re-upload." % scan_type, author=request.user) note.save() finding.notes.add(note) endpoint_status = finding.endpoint_status.all() for status in endpoint_status: status.mitigated_by = None status.mitigated_time = None status.mitigated = False status.last_modified = timezone.now() status.save() reactivated_count += 1 else: # existing findings may be from before we had component_name/version fields if not finding.component_name or not finding.component_version: finding.component_name = finding.component_name if finding.component_name else component_name finding.component_version = finding.component_version if finding.component_version else component_version finding.save(dedupe_option=False, push_to_jira=False) new_items.append(finding.id) else: item.test = test if item.date == timezone.now().date(): item.date = test.target_start.date() item.reporter = request.user item.last_reviewed = timezone.now() item.last_reviewed_by = request.user item.verified = verified item.active = active # Save it item.save(dedupe_option=False) finding_added_count += 1 # Add it to the new items new_items.append(item.id) finding = item if hasattr(item, 'unsaved_req_resp') and len( item.unsaved_req_resp) > 0: for req_resp in item.unsaved_req_resp: if scan_type == "Arachni Scan": burp_rr = BurpRawRequestResponse( finding=item, burpRequestBase64=req_resp["req"], burpResponseBase64=req_resp["resp"], ) else: burp_rr = BurpRawRequestResponse( finding=item, burpRequestBase64=base64.b64encode( req_resp["req"].encode("utf-8")), burpResponseBase64=base64.b64encode( req_resp["resp"].encode("utf-8")), ) burp_rr.clean() burp_rr.save() if item.unsaved_request is not None and item.unsaved_response is not None: burp_rr = BurpRawRequestResponse( finding=finding, burpRequestBase64=base64.b64encode( item.unsaved_request.encode()), burpResponseBase64=base64.b64encode( item.unsaved_response.encode()), ) burp_rr.clean() burp_rr.save() if finding: finding_count += 1 for endpoint in item.unsaved_endpoints: ep, created = Endpoint.objects.get_or_create( protocol=endpoint.protocol, host=endpoint.host, path=endpoint.path, query=endpoint.query, fragment=endpoint.fragment, product=test.engagement.product) eps, created = Endpoint_Status.objects.get_or_create( finding=finding, endpoint=ep) ep.endpoint_status.add(eps) finding.endpoints.add(ep) finding.endpoint_status.add(eps) for endpoint in form.cleaned_data['endpoints']: ep, created = Endpoint.objects.get_or_create( protocol=endpoint.protocol, host=endpoint.host, path=endpoint.path, query=endpoint.query, fragment=endpoint.fragment, product=test.engagement.product) eps, created = Endpoint_Status.objects.get_or_create( finding=finding, endpoint=ep) ep.endpoint_status.add(eps) finding.endpoints.add(ep) finding.endpoint_status.add(eps) if item.unsaved_tags is not None: finding.tags = item.unsaved_tags # Save it. This may be the second time we save it in this function. finding.save(push_to_jira=push_to_jira) # calculate the difference to_mitigate = set(original_items) - set(new_items) for finding_id in to_mitigate: finding = Finding.objects.get(id=finding_id) if not finding.mitigated or not finding.is_Mitigated: finding.mitigated = scan_date_time finding.is_Mitigated = True finding.mitigated_by = request.user finding.active = False finding.save() note = Notes(entry="Mitigated by %s re-upload." % scan_type, author=request.user) note.save() finding.notes.add(note) mitigated_count += 1 endpoint_status = finding.endpoint_status.all() for status in endpoint_status: status.mitigated_by = request.user status.mitigated_time = timezone.now() status.mitigated = True status.last_modified = timezone.now() status.save() test.updated = max_safe([scan_date_time, test.updated]) test.engagement.updated = max_safe( [scan_date_time, test.engagement.updated]) test.save() test.engagement.save() messages.add_message( request, messages.SUCCESS, '%s processed, a total of ' % scan_type + message(finding_count, 'finding', 'processed'), extra_tags='alert-success') if finding_added_count > 0: messages.add_message( request, messages.SUCCESS, 'A total of ' + message(finding_added_count, 'finding', 'added') + ', that are new to scan.', extra_tags='alert-success') if reactivated_count > 0: messages.add_message( request, messages.SUCCESS, 'A total of ' + message(reactivated_count, 'finding', 'reactivated') + ', that are back in scan results.', extra_tags='alert-success') if mitigated_count > 0: messages.add_message( request, messages.SUCCESS, 'A total of ' + message(mitigated_count, 'finding', 'mitigated') + '. Please manually verify each one.', extra_tags='alert-success') create_notification(event='scan_added', title=str(finding_count) + " findings for " + test.engagement.product.name, finding_count=finding_count, test=test, engagement=test.engagement, url=reverse('view_test', args=(test.id, ))) return HttpResponseRedirect( reverse('view_test', args=(test.id, ))) except SyntaxError: messages.add_message( request, messages.ERROR, 'There appears to be an error in the XML report, please check and try again.', extra_tags='alert-danger') product_tab = Product_Tab(engagement.product.id, title="Re-upload a %s" % scan_type, tab="engagements") product_tab.setEngagement(engagement) form.fields['endpoints'].queryset = Endpoint.objects.filter( product__id=product_tab.product.id) return render( request, 'dojo/import_scan_results.html', { 'form': form, 'product_tab': product_tab, 'eid': engagement.id, 'additional_message': additional_message, 'jform': jform, })
def process_parsed_findings(self, test, parsed_findings, scan_type, user, active, verified, minimum_severity=None, endpoints_to_add=None, push_to_jira=None, group_by=None, now=timezone.now(), service=None, scan_date=None, **kwargs): items = parsed_findings original_items = list(test.finding_set.all()) new_items = [] mitigated_count = 0 finding_count = 0 finding_added_count = 0 reactivated_count = 0 reactivated_items = [] unchanged_count = 0 unchanged_items = [] logger.debug('starting reimport of %i items.', len(items) if items else 0) from dojo.importers.reimporter.utils import ( get_deduplication_algorithm_from_conf, match_new_finding_to_existing_finding, update_endpoint_status, reactivate_endpoint_status) deduplication_algorithm = get_deduplication_algorithm_from_conf( scan_type) i = 0 logger.debug( 'STEP 1: looping over findings from the reimported report and trying to match them to existing findings' ) deduplicationLogger.debug( 'Algorithm used for matching new findings to existing findings: %s', deduplication_algorithm) for item in items: # FIXME hack to remove when all parsers have unit tests for this attribute if item.severity.lower().startswith( 'info') and item.severity != 'Info': item.severity = 'Info' item.numerical_severity = Finding.get_numerical_severity( item.severity) if minimum_severity and (Finding.SEVERITIES[item.severity] > Finding.SEVERITIES[minimum_severity]): # finding's severity is below the configured threshold : ignoring the finding continue # existing findings may be from before we had component_name/version fields component_name = item.component_name if hasattr( item, 'component_name') else None component_version = item.component_version if hasattr( item, 'component_version') else None if not hasattr(item, 'test'): item.test = test item.service = service item.hash_code = item.compute_hash_code() deduplicationLogger.debug("item's hash_code: %s", item.hash_code) findings = match_new_finding_to_existing_finding( item, test, deduplication_algorithm, scan_type) deduplicationLogger.debug( 'found %i findings matching with current new finding', len(findings)) if findings: # existing finding found finding = findings[0] if finding.false_p or finding.out_of_scope or finding.risk_accepted: logger.debug( '%i: skipping existing finding (it is marked as false positive:%s and/or out of scope:%s or is a risk accepted:%s): %i:%s:%s:%s', i, finding.false_p, finding.out_of_scope, finding.risk_accepted, finding.id, finding, finding.component_name, finding.component_version) elif finding.mitigated or finding.is_mitigated: logger.debug('%i: reactivating: %i:%s:%s:%s', i, finding.id, finding, finding.component_name, finding.component_version) finding.mitigated = None finding.is_mitigated = False finding.mitigated_by = None finding.active = True finding.verified = verified # existing findings may be from before we had component_name/version fields finding.component_name = finding.component_name if finding.component_name else component_name finding.component_version = finding.component_version if finding.component_version else component_version # don't dedupe before endpoints are added finding.save(dedupe_option=False) note = Notes(entry="Re-activated by %s re-upload." % scan_type, author=user) note.save() endpoint_statuses = finding.endpoint_status.all() # Determine if this can be run async if settings.ASYNC_FINDING_IMPORT: chunk_list = importer_utils.chunk_list( endpoint_statuses) # If there is only one chunk, then do not bother with async if len(chunk_list) < 2: reactivate_endpoint_status(endpoint_statuses, sync=True) logger.debug('IMPORT_SCAN: Split endpoints into ' + str(len(chunk_list)) + ' chunks of ' + str(chunk_list[0])) # First kick off all the workers for endpoint_status_list in chunk_list: reactivate_endpoint_status(endpoint_status_list, sync=False) else: reactivate_endpoint_status(endpoint_statuses, sync=True) finding.notes.add(note) reactivated_items.append(finding) reactivated_count += 1 else: # existing findings may be from before we had component_name/version fields logger.debug('%i: updating existing finding: %i:%s:%s:%s', i, finding.id, finding, finding.component_name, finding.component_version) if not finding.component_name or not finding.component_version: finding.component_name = finding.component_name if finding.component_name else component_name finding.component_version = finding.component_version if finding.component_version else component_version finding.save(dedupe_option=False) unchanged_items.append(finding) unchanged_count += 1 if finding.dynamic_finding: logger.debug( "Re-import found an existing dynamic finding for this new finding. Checking the status of endpoints" ) update_endpoint_status(finding, item, user) else: # no existing finding found item.reporter = user item.last_reviewed = timezone.now() item.last_reviewed_by = user item.verified = verified item.active = active # if scan_date was provided, override value from parser if scan_date: item.date = scan_date # Save it. Don't dedupe before endpoints are added. item.save(dedupe_option=False) logger.debug( '%i: reimport created new finding as no existing finding match: %i:%s:%s:%s', i, item.id, item, item.component_name, item.component_version) # only new items get auto grouped to avoid confusion around already existing items that are already grouped if settings.FEATURE_FINDING_GROUPS and group_by: finding_helper.add_finding_to_auto_group(item, group_by) finding_added_count += 1 new_items.append(item) finding = item if hasattr(item, 'unsaved_req_resp'): for req_resp in item.unsaved_req_resp: burp_rr = BurpRawRequestResponse( finding=finding, burpRequestBase64=base64.b64encode( req_resp["req"].encode("utf-8")), burpResponseBase64=base64.b64encode( req_resp["resp"].encode("utf-8"))) burp_rr.clean() burp_rr.save() if item.unsaved_request and item.unsaved_response: burp_rr = BurpRawRequestResponse( finding=finding, burpRequestBase64=base64.b64encode( item.unsaved_request.encode()), burpResponseBase64=base64.b64encode( item.unsaved_response.encode())) burp_rr.clean() burp_rr.save() # for existing findings: make sure endpoints are present or created if finding: finding_count += 1 if settings.ASYNC_FINDING_IMPORT: importer_utils.chunk_endpoints_and_disperse( finding, test, item.unsaved_endpoints) else: importer_utils.add_endpoints_to_unsaved_finding( finding, test, item.unsaved_endpoints, sync=True) if endpoints_to_add: if settings.ASYNC_FINDING_IMPORT: importer_utils.chunk_endpoints_and_disperse( finding, test, endpoints_to_add) else: importer_utils.add_endpoints_to_unsaved_finding( finding, test, endpoints_to_add, sync=True) if item.unsaved_tags: finding.tags = item.unsaved_tags if item.unsaved_files: for unsaved_file in item.unsaved_files: data = base64.b64decode(unsaved_file.get('data')) title = unsaved_file.get('title', '<No title>') file_upload, file_upload_created = FileUpload.objects.get_or_create( title=title, ) file_upload.file.save(title, ContentFile(data)) file_upload.save() finding.files.add(file_upload) # existing findings may be from before we had component_name/version fields finding.component_name = finding.component_name if finding.component_name else component_name finding.component_version = finding.component_version if finding.component_version else component_version # finding = new finding or existing finding still in the upload report # to avoid pushing a finding group multiple times, we push those outside of the loop if settings.FEATURE_FINDING_GROUPS and finding.finding_group: finding.save() else: finding.save(push_to_jira=push_to_jira) to_mitigate = set(original_items) - set(reactivated_items) - set( unchanged_items) untouched = set(unchanged_items) - set(to_mitigate) if settings.FEATURE_FINDING_GROUPS and push_to_jira: for finding_group in set([ finding.finding_group for finding in reactivated_items + unchanged_items + new_items if finding.finding_group is not None ]): jira_helper.push_to_jira(finding_group) sync = kwargs.get('sync', False) if not sync: serialized_new_items = [ serializers.serialize('json', [ finding, ]) for finding in new_items ] serialized_reactivated_items = [ serializers.serialize('json', [ finding, ]) for finding in reactivated_items ] serialized_to_mitigate = [ serializers.serialize('json', [ finding, ]) for finding in to_mitigate ] serialized_untouched = [ serializers.serialize('json', [ finding, ]) for finding in untouched ] return serialized_new_items, serialized_reactivated_items, serialized_to_mitigate, serialized_untouched return new_items, reactivated_items, to_mitigate, untouched
def save(self): data = self.validated_data close_old_findings = data['close_old_findings'] active = data['active'] verified = data['verified'] test_type, created = Test_Type.objects.get_or_create( name=data.get('test_type', data['scan_type'])) environment, created = Development_Environment.objects.get_or_create( name='Development') test = Test( engagement=data['engagement'], lead=data['lead'], test_type=test_type, target_start=data['scan_date'], target_end=data['scan_date'], environment=environment, percent_complete=100) try: test.full_clean() except ValidationError: pass test.save() if 'tags' in data: test.tags = ' '.join(data['tags']) try: parser = import_parser_factory(data['file'], test, active, verified, data['scan_type'],) except ValueError: raise Exception('FileParser ValueError') skipped_hashcodes = [] try: for item in parser.items: sev = item.severity if sev == 'Information' or sev == 'Informational': sev = 'Info' item.severity = sev if (Finding.SEVERITIES[sev] > Finding.SEVERITIES[data['minimum_severity']]): continue item.test = test item.date = test.target_start item.reporter = self.context['request'].user item.last_reviewed = timezone.now() item.last_reviewed_by = self.context['request'].user item.active = data['active'] item.verified = data['verified'] item.save(dedupe_option=False) if (hasattr(item, 'unsaved_req_resp') and len(item.unsaved_req_resp) > 0): for req_resp in item.unsaved_req_resp: burp_rr = BurpRawRequestResponse( finding=item, burpRequestBase64=req_resp["req"], burpResponseBase64=req_resp["resp"]) burp_rr.clean() burp_rr.save() if (item.unsaved_request is not None and item.unsaved_response is not None): burp_rr = BurpRawRequestResponse( finding=item, burpRequestBase64=item.unsaved_request, burpResponseBase64=item.unsaved_response) burp_rr.clean() burp_rr.save() for endpoint in item.unsaved_endpoints: ep, created = Endpoint.objects.get_or_create( protocol=endpoint.protocol, host=endpoint.host, path=endpoint.path, query=endpoint.query, fragment=endpoint.fragment, product=test.engagement.product) item.endpoints.add(ep) if item.unsaved_tags is not None: item.tags = item.unsaved_tags item.save() except SyntaxError: raise Exception('Parser SyntaxError') if close_old_findings: # Close old active findings that are not reported by this scan. new_hash_codes = test.finding_set.values('hash_code') old_findings = None if test.engagement.deduplication_on_engagement: old_findings = Finding.objects.exclude(test=test) \ .exclude(hash_code__in=new_hash_codes) \ .exclude(hash_code__in=skipped_hashcodes) \ .filter(test__engagement=test.engagement, test__test_type=test_type, active=True) else: old_findings = Finding.objects.exclude(test=test) \ .exclude(hash_code__in=new_hash_codes) \ .exclude(hash_code__in=skipped_hashcodes) \ .filter(test__engagement__product=test.engagement.product, test__test_type=test_type, active=True) for old_finding in old_findings: old_finding.active = False old_finding.mitigated = datetime.datetime.combine( test.target_start, timezone.now().time()) if settings.USE_TZ: old_finding.mitigated = timezone.make_aware( old_finding.mitigated, timezone.get_default_timezone()) old_finding.mitigated_by = self.context['request'].user old_finding.notes.create(author=self.context['request'].user, entry="This finding has been automatically closed" " as it is not present anymore in recent scans.") Tag.objects.add_tag(old_finding, 'stale') old_finding.save() title = 'An old finding has been closed for "{}".' \ .format(test.engagement.product.name) description = 'See <a href="{}">{}</a>' \ .format(reverse('view_finding', args=(old_finding.id, )), old_finding.title) create_notification(event='other', title=title, description=description, icon='bullseye', objowner=self.context['request'].user) return test
def process_parsed_findings(self, test, parsed_findings, scan_type, user, active, verified, minimum_severity=None, endpoints_to_add=None, push_to_jira=None, now=timezone.now()): items = parsed_findings original_items = list(test.finding_set.all()) new_items = [] mitigated_count = 0 finding_count = 0 finding_added_count = 0 reactivated_count = 0 reactivated_items = [] unchanged_count = 0 unchanged_items = [] logger.debug('starting reimport of %i items.', len(items) if items else 0) from dojo.importers.reimporter.utils import get_deduplication_algorithm_from_conf, match_new_finding_to_existing_finding, update_endpoint_status deduplication_algorithm = get_deduplication_algorithm_from_conf( scan_type) i = 0 logger.debug( 'STEP 1: looping over findings from the reimported report and trying to match them to existing findings' ) deduplicationLogger.debug( 'Algorithm used for matching new findings to existing findings: %s', deduplication_algorithm) for item in items: sev = item.severity if sev == 'Information' or sev == 'Informational': sev = 'Info' item.severity = sev item.numerical_severity = Finding.get_numerical_severity(sev) if (Finding.SEVERITIES[sev] > Finding.SEVERITIES[minimum_severity]): # finding's severity is below the configured threshold : ignoring the finding continue # existing findings may be from before we had component_name/version fields component_name = item.component_name if hasattr( item, 'component_name') else None component_version = item.component_version if hasattr( item, 'component_version') else None if not hasattr(item, 'test'): item.test = test item.hash_code = item.compute_hash_code() deduplicationLogger.debug("item's hash_code: %s", item.hash_code) findings = match_new_finding_to_existing_finding( item, test, deduplication_algorithm, scan_type) deduplicationLogger.debug( 'found %i findings matching with current new finding', len(findings)) if findings: # existing finding found finding = findings[0] if finding.false_p or finding.out_of_scope or finding.risk_accepted: logger.debug( '%i: skipping existing finding (it is marked as false positive:%s and/or out of scope:%s or is a risk accepted:%s): %i:%s:%s:%s', i, finding.false_p, finding.out_of_scope, finding.risk_accepted, finding.id, finding, finding.component_name, finding.component_version) elif finding.mitigated or finding.is_Mitigated: logger.debug('%i: reactivating: %i:%s:%s:%s', i, finding.id, finding, finding.component_name, finding.component_version) finding.mitigated = None finding.is_Mitigated = False finding.mitigated_by = None finding.active = True finding.verified = verified # existing findings may be from before we had component_name/version fields finding.component_name = finding.component_name if finding.component_name else component_name finding.component_version = finding.component_version if finding.component_version else component_version # don't dedupe before endpoints are added finding.save(dedupe_option=False) note = Notes(entry="Re-activated by %s re-upload." % scan_type, author=user) note.save() endpoint_status = finding.endpoint_status.all() for status in endpoint_status: status.mitigated_by = None status.mitigated_time = None status.mitigated = False status.last_modified = timezone.now() status.save() finding.notes.add(note) reactivated_items.append(finding) reactivated_count += 1 else: # existing findings may be from before we had component_name/version fields logger.debug('%i: updating existing finding: %i:%s:%s:%s', i, finding.id, finding, finding.component_name, finding.component_version) if not finding.component_name or not finding.component_version: finding.component_name = finding.component_name if finding.component_name else component_name finding.component_version = finding.component_version if finding.component_version else component_version finding.save(dedupe_option=False) unchanged_items.append(finding) unchanged_count += 1 if finding.dynamic_finding: logger.debug( "Re-import found an existing dynamic finding for this new finding. Checking the status of endpoints" ) update_endpoint_status(finding, item, user) else: # no existing finding found item.reporter = user item.last_reviewed = timezone.now() item.last_reviewed_by = user item.verified = verified item.active = active # Save it. Don't dedupe before endpoints are added. item.save(dedupe_option=False) logger.debug( '%i: reimport creating new finding as no existing finding match: %i:%s:%s:%s', i, item.id, item, item.component_name, item.component_version) finding_added_count += 1 new_items.append(item) finding = item if hasattr(item, 'unsaved_req_resp'): for req_resp in item.unsaved_req_resp: burp_rr = BurpRawRequestResponse( finding=finding, burpRequestBase64=base64.b64encode( req_resp["req"].encode("utf-8")), burpResponseBase64=base64.b64encode( req_resp["resp"].encode("utf-8"))) burp_rr.clean() burp_rr.save() if item.unsaved_request and item.unsaved_response: burp_rr = BurpRawRequestResponse( finding=finding, burpRequestBase64=base64.b64encode( item.unsaved_request.encode()), burpResponseBase64=base64.b64encode( item.unsaved_response.encode())) burp_rr.clean() burp_rr.save() # for existing findings: make sure endpoints are present or created if finding: finding_count += 1 for endpoint in item.unsaved_endpoints: try: ep, created = Endpoint.objects.get_or_create( protocol=endpoint.protocol, host=endpoint.host, path=endpoint.path, query=endpoint.query, fragment=endpoint.fragment, product=test.engagement.product) except (MultipleObjectsReturned): pass try: eps, created = Endpoint_Status.objects.get_or_create( finding=finding, endpoint=ep) except (MultipleObjectsReturned): pass ep.endpoint_status.add(eps) finding.endpoints.add(ep) finding.endpoint_status.add(eps) if endpoints_to_add: for endpoint in endpoints_to_add: # TODO Not sure what happens here, we get an endpoint model and try to create it again? try: ep, created = Endpoint.objects.get_or_create( protocol=endpoint.protocol, host=endpoint.host, path=endpoint.path, query=endpoint.query, fragment=endpoint.fragment, product=test.engagement.product) except (MultipleObjectsReturned): pass try: eps, created = Endpoint_Status.objects.get_or_create( finding=finding, endpoint=ep) except (MultipleObjectsReturned): pass ep.endpoint_status.add(eps) finding.endpoints.add(ep) finding.endpoint_status.add(eps) if item.unsaved_tags: finding.tags = item.unsaved_tags # existing findings may be from before we had component_name/version fields finding.component_name = finding.component_name if finding.component_name else component_name finding.component_version = finding.component_version if finding.component_version else component_version finding.save(push_to_jira=push_to_jira) to_mitigate = set(original_items) - set(reactivated_items) - set( unchanged_items) untouched = set(unchanged_items) - set(to_mitigate) return new_items, reactivated_items, to_mitigate, untouched
def re_import_scan_results(request, tid): additional_message = "When re-uploading a scan, any findings not found in original scan will be updated as " \ "mitigated. The process attempts to identify the differences, however manual verification " \ "is highly recommended." t = get_object_or_404(Test, id=tid) scan_type = t.test_type.name engagement = t.engagement form = ReImportScanForm() form.initial['tags'] = [tag.name for tag in t.tags] if request.method == "POST": form = ReImportScanForm(request.POST, request.FILES) if form.is_valid(): scan_date = form.cleaned_data['scan_date'] min_sev = form.cleaned_data['minimum_severity'] file = request.FILES['file'] scan_type = t.test_type.name active = form.cleaned_data['active'] verified = form.cleaned_data['verified'] tags = request.POST.getlist('tags') ts = ", ".join(tags) t.tags = ts try: parser = import_parser_factory(file, t) except ValueError: raise Http404() try: items = parser.items original_items = t.finding_set.all().values_list("id", flat=True) new_items = [] mitigated_count = 0 finding_count = 0 finding_added_count = 0 reactivated_count = 0 for item in items: sev = item.severity if sev == 'Information' or sev == 'Informational': sev = 'Info' if Finding.SEVERITIES[sev] > Finding.SEVERITIES[min_sev]: continue if scan_type == 'Veracode Scan' or scan_type == 'Arachni Scan': find = Finding.objects.filter(title=item.title, test__id=t.id, severity=sev, numerical_severity=Finding.get_numerical_severity(sev), description=item.description ) else: find = Finding.objects.filter(title=item.title, test__id=t.id, severity=sev, numerical_severity=Finding.get_numerical_severity(sev), ) if len(find) == 1: find = find[0] if find.mitigated: # it was once fixed, but now back find.mitigated = None find.mitigated_by = None find.active = True find.verified = verified find.save() note = Notes(entry="Re-activated by %s re-upload." % scan_type, author=request.user) note.save() find.notes.add(note) reactivated_count += 1 new_items.append(find.id) else: item.test = t item.date = t.target_start item.reporter = request.user item.last_reviewed = timezone.now() item.last_reviewed_by = request.user item.verified = verified item.active = active item.save() finding_added_count += 1 new_items.append(item.id) find = item if hasattr(item, 'unsaved_req_resp') and len(item.unsaved_req_resp) > 0: for req_resp in item.unsaved_req_resp: burp_rr = BurpRawRequestResponse(finding=find, burpRequestBase64=req_resp["req"], burpResponseBase64=req_resp["resp"], ) burp_rr.clean() burp_rr.save() if item.unsaved_request is not None and item.unsaved_response is not None: burp_rr = BurpRawRequestResponse(finding=find, burpRequestBase64=item.unsaved_request, burpResponseBase64=item.unsaved_response, ) burp_rr.clean() burp_rr.save() if find: finding_count += 1 for endpoint in item.unsaved_endpoints: ep, created = Endpoint.objects.get_or_create(protocol=endpoint.protocol, host=endpoint.host, path=endpoint.path, query=endpoint.query, fragment=endpoint.fragment, product=t.engagement.product) find.endpoints.add(ep) if item.unsaved_tags is not None: find.tags = item.unsaved_tags # calculate the difference to_mitigate = set(original_items) - set(new_items) for finding_id in to_mitigate: finding = Finding.objects.get(id=finding_id) finding.mitigated = datetime.combine(scan_date, timezone.now().time()) finding.mitigated_by = request.user finding.active = False finding.save() note = Notes(entry="Mitigated by %s re-upload." % scan_type, author=request.user) note.save() finding.notes.add(note) mitigated_count += 1 messages.add_message(request, messages.SUCCESS, '%s processed, a total of ' % scan_type + message(finding_count, 'finding', 'processed'), extra_tags='alert-success') if finding_added_count > 0: messages.add_message(request, messages.SUCCESS, 'A total of ' + message(finding_added_count, 'finding', 'added') + ', that are new to scan.', extra_tags='alert-success') if reactivated_count > 0: messages.add_message(request, messages.SUCCESS, 'A total of ' + message(reactivated_count, 'finding', 'reactivated') + ', that are back in scan results.', extra_tags='alert-success') if mitigated_count > 0: messages.add_message(request, messages.SUCCESS, 'A total of ' + message(mitigated_count, 'finding', 'mitigated') + '. Please manually verify each one.', extra_tags='alert-success') create_notification(event='results_added', title='Results added', finding_count=finding_count, test=t, engagement=engagement, url=request.build_absolute_uri(reverse('view_test', args=(t.id,)))) return HttpResponseRedirect(reverse('view_test', args=(t.id,))) except SyntaxError: messages.add_message(request, messages.ERROR, 'There appears to be an error in the XML report, please check and try again.', extra_tags='alert-danger') add_breadcrumb(parent=t, title="Re-upload a %s" % scan_type, top_level=False, request=request) return render(request, 'dojo/import_scan_results.html', {'form': form, 'eid': engagement.id, 'additional_message': additional_message, })
def process_parsed_findings(self, test, parsed_findings, scan_type, user, active, verified, minimum_severity=None, endpoints_to_add=None, push_to_jira=None, group_by=None, now=timezone.now()): logger.debug('endpoints_to_add: %s', endpoints_to_add) new_findings = [] items = parsed_findings logger.debug('starting import of %i items.', len(items) if items else 0) i = 0 for item in items: sev = item.severity if sev == 'Information' or sev == 'Informational': sev = 'Info' item.severity = sev item.numerical_severity = Finding.get_numerical_severity(sev) if minimum_severity and (Finding.SEVERITIES[sev] > Finding.SEVERITIES[minimum_severity]): continue item.test = test item.reporter = user if user else get_current_user item.last_reviewed = now item.last_reviewed_by = user if user else get_current_user # Only set active/verified flags if they were NOT set by default value(True) if item.active: item.active = active if item.verified: item.verified = verified item.created = now item.updated = now item.save(dedupe_option=False) if settings.FEATURE_FINDING_GROUPS and group_by: finding_helper.add_finding_to_auto_group(item, group_by) if (hasattr(item, 'unsaved_req_resp') and len(item.unsaved_req_resp) > 0): for req_resp in item.unsaved_req_resp: burp_rr = BurpRawRequestResponse( finding=item, burpRequestBase64=base64.b64encode( req_resp["req"].encode("utf-8")), burpResponseBase64=base64.b64encode( req_resp["resp"].encode("utf-8"))) burp_rr.clean() burp_rr.save() if (item.unsaved_request is not None and item.unsaved_response is not None): burp_rr = BurpRawRequestResponse( finding=item, burpRequestBase64=base64.b64encode( item.unsaved_request.encode()), burpResponseBase64=base64.b64encode( item.unsaved_response.encode())) burp_rr.clean() burp_rr.save() for endpoint in item.unsaved_endpoints: try: endpoint.clean() except ValidationError as e: logger.warning( "DefectDojo is storing broken endpoint because cleaning wasn't successful: " "{}".format(e)) try: ep, created = endpoint_get_or_create( protocol=endpoint.protocol, userinfo=endpoint.userinfo, host=endpoint.host, port=endpoint.port, path=endpoint.path, query=endpoint.query, fragment=endpoint.fragment, product=test.engagement.product) except (MultipleObjectsReturned): pass try: eps, created = Endpoint_Status.objects.get_or_create( finding=item, endpoint=ep) except (MultipleObjectsReturned): pass ep.endpoint_status.add(eps) item.endpoint_status.add(eps) item.endpoints.add(ep) if endpoints_to_add: for endpoint in endpoints_to_add: logger.debug('adding endpoint %s', endpoint) # TODO Not sure what happens here, we get an endpoint model and try to create it again? try: endpoint.clean() except ValidationError as e: logger.warning( "DefectDojo is storing broken endpoint because cleaning wasn't successful: " "{}".format(e)) try: ep, created = endpoint_get_or_create( protocol=endpoint.protocol, userinfo=endpoint.userinfo, host=endpoint.host, port=endpoint.port, path=endpoint.path, query=endpoint.query, fragment=endpoint.fragment, product=test.engagement.product) except (MultipleObjectsReturned): pass try: eps, created = Endpoint_Status.objects.get_or_create( finding=item, endpoint=ep) except (MultipleObjectsReturned): pass ep.endpoint_status.add(eps) item.endpoints.add(ep) item.endpoint_status.add(eps) if item.unsaved_tags: item.tags = item.unsaved_tags new_findings.append(item) # to avoid pushing a finding group multiple times, we push those outside of the loop if settings.FEATURE_FINDING_GROUPS and item.finding_group: item.save() else: item.save(push_to_jira=push_to_jira) if settings.FEATURE_FINDING_GROUPS and push_to_jira: for finding_group in set([ finding.finding_group for finding in new_findings if finding.finding_group is not None ]): jira_helper.push_to_jira(finding_group) return new_findings
def obj_create(self, bundle, **kwargs): bundle.obj = ImportScanObject(initial=kwargs) self.is_valid(bundle) if bundle.errors: raise ImmediateHttpResponse(response=self.error_response(bundle.request, bundle.errors)) bundle = self.full_hydrate(bundle) # We now have all the options we need and will just replicate the process in views.py tt, t_created = Test_Type.objects.get_or_create(name=bundle.data['scan_type']) # will save in development environment environment, env_created = Development_Environment.objects.get_or_create(name="Development") scan_date = datetime.strptime(bundle.data['scan_date'], '%Y-%m-%d') t = Test(engagement=bundle.obj.__getattr__('engagement_obj'), test_type=tt, target_start=scan_date, target_end=scan_date, environment=environment, percent_complete=100) t.full_clean() t.save() t.tags = bundle.data['tags'] try: parser = import_parser_factory(bundle.data['file'], t) except ValueError: raise NotFound("Parser ValueError") try: for item in parser.items: sev = item.severity if sev == 'Information' or sev == 'Informational': sev = 'Info' item.severity = sev if Finding.SEVERITIES[sev] > Finding.SEVERITIES[bundle.data['minimum_severity']]: continue item.test = t item.date = t.target_start item.reporter = bundle.request.user item.last_reviewed = timezone.now() item.last_reviewed_by = bundle.request.user item.active = bundle.data['active'] item.verified = bundle.data['verified'] item.save() if hasattr(item, 'unsaved_req_resp') and len(item.unsaved_req_resp) > 0: for req_resp in item.unsaved_req_resp: burp_rr = BurpRawRequestResponse(finding=item, burpRequestBase64=req_resp["req"], burpResponseBase64=req_resp["resp"], ) burp_rr.clean() burp_rr.save() if item.unsaved_request is not None and item.unsaved_response is not None: burp_rr = BurpRawRequestResponse(finding=item, burpRequestBase64=item.unsaved_request, burpResponseBase64=item.unsaved_response, ) burp_rr.clean() burp_rr.save() for endpoint in item.unsaved_endpoints: ep, created = Endpoint.objects.get_or_create(protocol=endpoint.protocol, host=endpoint.host, path=endpoint.path, query=endpoint.query, fragment=endpoint.fragment, product=t.engagement.product) item.endpoints.add(ep) if item.unsaved_tags is not None: item.tags = item.unsaved_tags except SyntaxError: raise NotFound("Parser SyntaxError") # Everything executed fine. We successfully imported the scan. res = TestResource() uri = res.get_resource_uri(t) raise ImmediateHttpResponse(HttpCreated(location = uri))
def import_scan_results(request, eid): engagement = get_object_or_404(Engagement, id=eid) finding_count = 0 form = ImportScanForm() if request.method == "POST": form = ImportScanForm(request.POST, request.FILES) if form.is_valid(): file = request.FILES['file'] scan_date = form.cleaned_data['scan_date'] min_sev = form.cleaned_data['minimum_severity'] scan_type = request.POST['scan_type'] if not any(scan_type in code for code in ImportScanForm.SCAN_TYPE_CHOICES): raise Http404() tt, t_created = Test_Type.objects.get_or_create(name=scan_type) # will save in development environment environment, env_created = Development_Environment.objects.get_or_create(name="Development") t = Test(engagement=engagement, test_type=tt, target_start=scan_date, target_end=scan_date, environment=environment, percent_complete=100) t.full_clean() t.save() try: parser = import_parser_factory(file, t) except ValueError: raise Http404() try: for item in parser.items: sev = item.severity if sev == 'Information' or sev == 'Informational': sev = 'Info' item.severity = sev if Finding.SEVERITIES[sev] > Finding.SEVERITIES[min_sev]: continue item.test = t item.date = t.target_start item.reporter = request.user item.last_reviewed = datetime.now(tz=localtz) item.last_reviewed_by = request.user item.save() if item.unsaved_request is not None and item.unsaved_response is not None: burp_rr = BurpRawRequestResponse(finding=item, burpRequestBase64=item.unsaved_request, burpResponseBase64=item.unsaved_response, ) burp_rr.clean() burp_rr.save() for endpoint in item.unsaved_endpoints: ep, created = Endpoint.objects.get_or_create(protocol=endpoint.protocol, host=endpoint.host, path=endpoint.path, query=endpoint.query, fragment=endpoint.fragment, product=t.engagement.product) item.endpoints.add(ep) finding_count += 1 messages.add_message(request, messages.SUCCESS, scan_type + ' processed, a total of ' + message(finding_count, 'finding', 'processed'), extra_tags='alert-success') return HttpResponseRedirect(reverse('view_test', args=(t.id,))) except SyntaxError: messages.add_message(request, messages.ERROR, 'There appears to be an error in the XML report, please check and try again.', extra_tags='alert-danger') add_breadcrumb(parent=engagement, title="Import Scan Results", top_level=False, request=request) return render(request, 'dojo/import_scan_results.html', {'form': form, 'eid': engagement.id, })
def re_import_scan_results(request, tid): additional_message = "When re-uploading a scan, any findings not found in original scan will be updated as " \ "mitigated. The process attempts to identify the differences, however manual verification " \ "is highly recommended." t = get_object_or_404(Test, id=tid) scan_type = t.test_type.name engagement = t.engagement form = ReImportScanForm() form.initial['tags'] = [tag.name for tag in t.tags] if request.method == "POST": form = ReImportScanForm(request.POST, request.FILES) if form.is_valid(): scan_date = form.cleaned_data['scan_date'] min_sev = form.cleaned_data['minimum_severity'] file = request.FILES['file'] scan_type = t.test_type.name active = form.cleaned_data['active'] verified = form.cleaned_data['verified'] tags = request.POST.getlist('tags') ts = ", ".join(tags) t.tags = ts try: parser = import_parser_factory(file, t, active, verified) except ValueError: raise Http404() try: items = parser.items original_items = t.finding_set.all().values_list("id", flat=True) new_items = [] mitigated_count = 0 finding_count = 0 finding_added_count = 0 reactivated_count = 0 for item in items: sev = item.severity if sev == 'Information' or sev == 'Informational': sev = 'Info' item.severity = sev if Finding.SEVERITIES[sev] > Finding.SEVERITIES[min_sev]: continue if scan_type == 'Veracode Scan' or scan_type == 'Arachni Scan': find = Finding.objects.filter(title=item.title, test__id=t.id, severity=sev, numerical_severity=Finding.get_numerical_severity(sev), description=item.description ) else: find = Finding.objects.filter(title=item.title, test__id=t.id, severity=sev, numerical_severity=Finding.get_numerical_severity(sev), ) if len(find) == 1: find = find[0] if find.mitigated: # it was once fixed, but now back find.mitigated = None find.mitigated_by = None find.active = True find.verified = verified find.save() note = Notes(entry="Re-activated by %s re-upload." % scan_type, author=request.user) note.save() find.notes.add(note) reactivated_count += 1 new_items.append(find.id) else: item.test = t item.date = scan_date item.reporter = request.user item.last_reviewed = timezone.now() item.last_reviewed_by = request.user item.verified = verified item.active = active item.save(dedupe_option=False) finding_added_count += 1 new_items.append(item.id) find = item if hasattr(item, 'unsaved_req_resp') and len(item.unsaved_req_resp) > 0: for req_resp in item.unsaved_req_resp: if scan_type == "Arachni Scan": burp_rr = BurpRawRequestResponse( finding=item, burpRequestBase64=req_resp["req"], burpResponseBase64=req_resp["resp"], ) else: burp_rr = BurpRawRequestResponse( finding=item, burpRequestBase64=req_resp["req"].encode("utf-8"), burpResponseBase64=req_resp["resp"].encode("utf-8"), ) burp_rr.clean() burp_rr.save() if item.unsaved_request is not None and item.unsaved_response is not None: burp_rr = BurpRawRequestResponse(finding=find, burpRequestBase64=item.unsaved_request.encode("utf-8"), burpResponseBase64=item.unsaved_response.encode("utf-8"), ) burp_rr.clean() burp_rr.save() if find: finding_count += 1 for endpoint in item.unsaved_endpoints: ep, created = Endpoint.objects.get_or_create(protocol=endpoint.protocol, host=endpoint.host, path=endpoint.path, query=endpoint.query, fragment=endpoint.fragment, product=t.engagement.product) find.endpoints.add(ep) if item.unsaved_tags is not None: find.tags = item.unsaved_tags find.save() # calculate the difference to_mitigate = set(original_items) - set(new_items) for finding_id in to_mitigate: finding = Finding.objects.get(id=finding_id) finding.mitigated = datetime.combine(scan_date, timezone.now().time()) finding.mitigated_by = request.user finding.active = False finding.save() note = Notes(entry="Mitigated by %s re-upload." % scan_type, author=request.user) note.save() finding.notes.add(note) mitigated_count += 1 messages.add_message(request, messages.SUCCESS, '%s processed, a total of ' % scan_type + message(finding_count, 'finding', 'processed'), extra_tags='alert-success') if finding_added_count > 0: messages.add_message(request, messages.SUCCESS, 'A total of ' + message(finding_added_count, 'finding', 'added') + ', that are new to scan.', extra_tags='alert-success') if reactivated_count > 0: messages.add_message(request, messages.SUCCESS, 'A total of ' + message(reactivated_count, 'finding', 'reactivated') + ', that are back in scan results.', extra_tags='alert-success') if mitigated_count > 0: messages.add_message(request, messages.SUCCESS, 'A total of ' + message(mitigated_count, 'finding', 'mitigated') + '. Please manually verify each one.', extra_tags='alert-success') create_notification(event='results_added', title=str(finding_count) + " findings for " + engagement.product.name, finding_count=finding_count, test=t, engagement=engagement, url=reverse('view_test', args=(t.id,))) return HttpResponseRedirect(reverse('view_test', args=(t.id,))) except SyntaxError: messages.add_message(request, messages.ERROR, 'There appears to be an error in the XML report, please check and try again.', extra_tags='alert-danger') product_tab = Product_Tab(engagement.product.id, title="Re-upload a %s" % scan_type, tab="engagements") product_tab.setEngagement(engagement) return render(request, 'dojo/import_scan_results.html', {'form': form, 'product_tab': product_tab, 'eid': engagement.id, 'additional_message': additional_message, })
def save(self): data = self.validated_data skip_duplicates = data['skip_duplicates'] close_old_findings = data['close_old_findings'] test_type, created = Test_Type.objects.get_or_create( name=data.get('test_type', data['scan_type'])) environment, created = Development_Environment.objects.get_or_create( name='Development') test = Test( engagement=data['engagement'], lead=data['lead'], test_type=test_type, target_start=data['scan_date'], target_end=data['scan_date'], environment=environment, percent_complete=100) try: test.full_clean() except ValidationError: pass test.save() test.tags = u' '.join(data['tags']) try: parser = import_parser_factory(data['file'], test, data['scan_type'],) except ValueError: raise Exception('FileParser ValueError') skipped_hashcodes = [] try: for item in parser.items: if skip_duplicates: hash_code = item.compute_hash_code() if Finding.objects.filter(Q(active=True) | Q(false_p=True) | Q(duplicate=True), test__engagement__product=test.engagement.product, hash_code=hash_code).exists(): skipped_hashcodes.append(hash_code) continue sev = item.severity if sev == 'Information' or sev == 'Informational': sev = 'Info' item.severity = sev if (Finding.SEVERITIES[sev] > Finding.SEVERITIES[data['minimum_severity']]): continue item.test = test item.date = test.target_start item.reporter = self.context['request'].user item.last_reviewed = timezone.now() item.last_reviewed_by = self.context['request'].user item.active = data['active'] item.verified = data['verified'] item.save() if (hasattr(item, 'unsaved_req_resp') and len(item.unsaved_req_resp) > 0): for req_resp in item.unsaved_req_resp: burp_rr = BurpRawRequestResponse( finding=item, burpRequestBase64=req_resp["req"], burpResponseBase64=req_resp["resp"]) burp_rr.clean() burp_rr.save() if (item.unsaved_request is not None and item.unsaved_response is not None): burp_rr = BurpRawRequestResponse( finding=item, burpRequestBase64=item.unsaved_request, burpResponseBase64=item.unsaved_response) burp_rr.clean() burp_rr.save() for endpoint in item.unsaved_endpoints: ep, created = Endpoint.objects.get_or_create( protocol=endpoint.protocol, host=endpoint.host, path=endpoint.path, query=endpoint.query, fragment=endpoint.fragment, product=test.engagement.product) item.endpoints.add(ep) # if item.unsaved_tags is not None: # item.tags = item.unsaved_tags except SyntaxError: raise Exception('Parser SyntaxError') if close_old_findings: # Close old active findings that are not reported by this scan. new_hash_codes = test.finding_set.values('hash_code') for old_finding in Finding.objects.exclude(test=test) \ .exclude(hash_code__in=new_hash_codes) \ .exclude(hash_code__in=skipped_hashcodes) \ .filter(test__engagement__product=test.engagement.product, test__test_type=test_type, active=True): old_finding.active = False old_finding.mitigated = datetime.datetime.combine( test.target_start, timezone.now().time()) old_finding.mitigated_by = self.context['request'].user old_finding.notes.create(author=self.context['request'].user, entry="This finding has been automatically closed" " as it is not present anymore in recent scans.") old_finding.save() title = 'An old finding has been closed for "{}".' \ .format(test.engagement.product.name) description = 'See <a href="{}">{}</a>' \ .format(reverse('view_finding', args=(old_finding.id, )), old_finding.title) create_notification(event='other', title=title, description=description, icon='bullseye', objowner=self.context['request'].user) return test