def obj_create(self, bundle, **kwargs): bundle.obj = ImportScanObject(initial=kwargs) self.is_valid(bundle) if bundle.errors: raise ImmediateHttpResponse( response=self.error_response(bundle.request, bundle.errors)) bundle = self.full_hydrate(bundle) test = bundle.obj.__getattr__('test_obj') scan_type = bundle.obj.__getattr__('scan_type') min_sev = bundle.obj.__getattr__('minimum_severity') scan_date = bundle.obj.__getattr__('scan_date') verified = bundle.obj.__getattr__('verified') active = bundle.obj.__getattr__('active') try: parser = import_parser_factory(bundle.data['file'], test) except ValueError: raise NotFound("Parser ValueError") try: items = parser.items original_items = test.finding_set.all().values_list("id", flat=True) new_items = [] mitigated_count = 0 finding_count = 0 finding_added_count = 0 reactivated_count = 0 for item in items: sev = item.severity if sev == 'Information' or sev == 'Informational': sev = 'Info' if Finding.SEVERITIES[sev] > Finding.SEVERITIES[min_sev]: continue if scan_type == 'Veracode Scan' or scan_type == 'Arachni Scan': find = Finding.objects.filter( title=item.title, test__id=test.id, severity=sev, numerical_severity=Finding.get_numerical_severity(sev), description=item.description) else: find = Finding.objects.filter( title=item.title, test__id=test.id, severity=sev, numerical_severity=Finding.get_numerical_severity(sev), ) if len(find) == 1: find = find[0] if find.mitigated: # it was once fixed, but now back find.mitigated = None find.mitigated_by = None find.active = True find.verified = verified find.save() note = Notes(entry="Re-activated by %s re-upload." % scan_type, author=bundle.request.user) note.save() find.notes.add(note) reactivated_count += 1 new_items.append(find.id) else: item.test = test item.date = test.target_start item.reporter = bundle.request.user item.last_reviewed = timezone.now() item.last_reviewed_by = bundle.request.user item.verified = verified item.active = active item.save() finding_added_count += 1 new_items.append(item.id) find = item if hasattr(item, 'unsaved_req_resp') and len( item.unsaved_req_resp) > 0: for req_resp in item.unsaved_req_resp: burp_rr = BurpRawRequestResponse( finding=find, burpRequestBase64=req_resp["req"], burpResponseBase64=req_resp["resp"], ) burp_rr.clean() burp_rr.save() if item.unsaved_request is not None and item.unsaved_response is not None: burp_rr = BurpRawRequestResponse( finding=find, burpRequestBase64=item.unsaved_request, burpResponseBase64=item.unsaved_response, ) burp_rr.clean() burp_rr.save() if find: finding_count += 1 for endpoint in item.unsaved_endpoints: ep, created = Endpoint.objects.get_or_create( protocol=endpoint.protocol, host=endpoint.host, path=endpoint.path, query=endpoint.query, fragment=endpoint.fragment, product=test.engagement.product) find.endpoints.add(ep) if item.unsaved_tags is not None: find.tags = item.unsaved_tags # calculate the difference to_mitigate = set(original_items) - set(new_items) for finding_id in to_mitigate: finding = Finding.objects.get(id=finding_id) finding.mitigated = datetime.combine(scan_date, timezone.now().time()) finding.mitigated_by = bundle.request.user finding.active = False finding.save() note = Notes(entry="Mitigated by %s re-upload." % scan_type, author=bundle.request.user) note.save() finding.notes.add(note) mitigated_count += 1 except SyntaxError: raise NotFound("Parser SyntaxError") # Everything executed fine. We successfully imported the scan. raise ImmediateHttpResponse( HttpCreated(location=bundle.obj.__getattr__('test')))
def import_scan_results(request, eid=None, pid=None): engagement = None form = ImportScanForm() cred_form = CredMappingForm() finding_count = 0 if eid: engagement = get_object_or_404(Engagement, id=eid) cred_form.fields["cred_user"].queryset = Cred_Mapping.objects.filter(engagement=engagement).order_by('cred_id') if request.method == "POST": form = ImportScanForm(request.POST, request.FILES) cred_form = CredMappingForm(request.POST) cred_form.fields["cred_user"].queryset = Cred_Mapping.objects.filter( engagement=engagement).order_by('cred_id') if form.is_valid(): # Allows for a test to be imported with an engagement created on the fly if engagement is None: engagement = Engagement() product = get_object_or_404(Product, id=pid) engagement.name = "AdHoc Import - " + strftime("%a, %d %b %Y %X", timezone.now().timetuple()) engagement.threat_model = False engagement.api_test = False engagement.pen_test = False engagement.check_list = False engagement.target_start = timezone.now().date() engagement.target_end = timezone.now().date() engagement.product = product engagement.active = True engagement.status = 'In Progress' engagement.save() file = request.FILES['file'] scan_date = form.cleaned_data['scan_date'] min_sev = form.cleaned_data['minimum_severity'] active = form.cleaned_data['active'] verified = form.cleaned_data['verified'] scan_type = request.POST['scan_type'] if not any(scan_type in code for code in ImportScanForm.SCAN_TYPE_CHOICES): raise Http404() tt, t_created = Test_Type.objects.get_or_create(name=scan_type) # will save in development environment environment, env_created = Development_Environment.objects.get_or_create( name="Development") t = Test( engagement=engagement, test_type=tt, target_start=scan_date, target_end=scan_date, environment=environment, percent_complete=100) t.lead = request.user t.full_clean() t.save() tags = request.POST.getlist('tags') ts = ", ".join(tags) t.tags = ts # Save the credential to the test if cred_form.is_valid(): if cred_form.cleaned_data['cred_user']: # Select the credential mapping object from the selected list and only allow if the credential is associated with the product cred_user = Cred_Mapping.objects.filter( pk=cred_form.cleaned_data['cred_user'].id, engagement=eid).first() new_f = cred_form.save(commit=False) new_f.test = t new_f.cred_id = cred_user.cred_id new_f.save() parser = import_parser_factory(file, t) try: for item in parser.items: print "item blowup" print item sev = item.severity if sev == 'Information' or sev == 'Informational': sev = 'Info' item.severity = sev if Finding.SEVERITIES[sev] > Finding.SEVERITIES[min_sev]: continue item.test = t if item.date == timezone.now().date(): item.date = t.target_start item.reporter = request.user item.last_reviewed = timezone.now() item.last_reviewed_by = request.user item.active = active item.verified = verified item.save(dedupe_option=False, false_history=True) if hasattr(item, 'unsaved_req_resp') and len( item.unsaved_req_resp) > 0: for req_resp in item.unsaved_req_resp: burp_rr = BurpRawRequestResponse( finding=item, burpRequestBase64=req_resp["req"], burpResponseBase64=req_resp["resp"], ) burp_rr.clean() burp_rr.save() if item.unsaved_request is not None and item.unsaved_response is not None: burp_rr = BurpRawRequestResponse( finding=item, burpRequestBase64=item.unsaved_request, burpResponseBase64=item.unsaved_response, ) burp_rr.clean() burp_rr.save() for endpoint in item.unsaved_endpoints: ep, created = Endpoint.objects.get_or_create( protocol=endpoint.protocol, host=endpoint.host, path=endpoint.path, query=endpoint.query, fragment=endpoint.fragment, product=t.engagement.product) item.endpoints.add(ep) item.save(false_history=True) if item.unsaved_tags is not None: item.tags = item.unsaved_tags finding_count += 1 messages.add_message( request, messages.SUCCESS, scan_type + ' processed, a total of ' + message( finding_count, 'finding', 'processed'), extra_tags='alert-success') create_notification( event='results_added', title=str(finding_count) + " findings for " + engagement.product.name, finding_count=finding_count, test=t, engagement=engagement, url=request.build_absolute_uri( reverse('view_test', args=(t.id, )))) return HttpResponseRedirect( reverse('view_test', args=(t.id, ))) except SyntaxError: messages.add_message( request, messages.ERROR, 'There appears to be an error in the XML report, please check and try again.', extra_tags='alert-danger') prod_id = None custom_breadcrumb = None title = "Import Scan Results" if engagement: prod_id = engagement.product.id product_tab = Product_Tab(prod_id, title=title, tab="engagements") product_tab.setEngagement(engagement) else: prod_id = pid custom_breadcrumb = {"", ""} product_tab = Product_Tab(prod_id, title=title, tab="findings") return render(request, 'dojo/import_scan_results.html', { 'form': form, 'product_tab': product_tab, 'custom_breadcrumb': custom_breadcrumb, 'title': title, 'cred_form': cred_form, })
def obj_create(self, bundle, **kwargs): bundle.obj = ImportScanObject(initial=kwargs) self.is_valid(bundle) if bundle.errors: raise ImmediateHttpResponse( response=self.error_response(bundle.request, bundle.errors)) bundle = self.full_hydrate(bundle) # We now have all the options we need and will just replicate the process in views.py tt, t_created = Test_Type.objects.get_or_create( name=bundle.data['scan_type']) # will save in development environment environment, env_created = Development_Environment.objects.get_or_create( name="Development") scan_date = datetime.strptime(bundle.data['scan_date'], '%Y/%m/%d') t = Test(engagement=bundle.obj.__getattr__('engagement_obj'), test_type=tt, target_start=scan_date, target_end=scan_date, environment=environment, percent_complete=100) t.full_clean() t.save() t.tags = bundle.data['tags'] try: parser = import_parser_factory(bundle.data['file'], t) except ValueError: raise NotFound("Parser ValueError") try: for item in parser.items: sev = item.severity if sev == 'Information' or sev == 'Informational': sev = 'Info' item.severity = sev if Finding.SEVERITIES[sev] > Finding.SEVERITIES[ bundle.data['minimum_severity']]: continue item.test = t item.date = t.target_start item.reporter = bundle.request.user item.last_reviewed = datetime.now(tz=localtz) item.last_reviewed_by = bundle.request.user item.active = bundle.data['active'] item.verified = bundle.data['verified'] item.save() if hasattr( item, 'unsaved_req_resp') and len(item.unsaved_req_resp) > 0: for req_resp in item.unsaved_req_resp: burp_rr = BurpRawRequestResponse( finding=item, burpRequestBase64=req_resp["req"], burpResponseBase64=req_resp["resp"], ) burp_rr.clean() burp_rr.save() if item.unsaved_request is not None and item.unsaved_response is not None: burp_rr = BurpRawRequestResponse( finding=item, burpRequestBase64=item.unsaved_request, burpResponseBase64=item.unsaved_response, ) burp_rr.clean() burp_rr.save() for endpoint in item.unsaved_endpoints: ep, created = Endpoint.objects.get_or_create( protocol=endpoint.protocol, host=endpoint.host, path=endpoint.path, query=endpoint.query, fragment=endpoint.fragment, product=t.engagement.product) item.endpoints.add(ep) if item.unsaved_tags is not None: item.tags = item.unsaved_tags except SyntaxError: raise NotFound("Parser SyntaxError") # Everything executed fine. We successfully imported the scan. res = TestResource() uri = res.get_resource_uri(t) raise ImmediateHttpResponse(HttpCreated(location=uri))
def import_scan_results(request, eid=None, pid=None): engagement = None form = ImportScanForm() cred_form = CredMappingForm() finding_count = 0 if eid: engagement = get_object_or_404(Engagement, id=eid) cred_form.fields["cred_user"].queryset = Cred_Mapping.objects.filter( engagement=engagement).order_by('cred_id') if request.method == "POST": form = ImportScanForm(request.POST, request.FILES) cred_form = CredMappingForm(request.POST) cred_form.fields["cred_user"].queryset = Cred_Mapping.objects.filter( engagement=engagement).order_by('cred_id') if form.is_valid(): # Allows for a test to be imported with an engagement created on the fly if engagement is None: engagement = Engagement() product = get_object_or_404(Product, id=pid) engagement.name = "AdHoc Import - " + strftime( "%a, %d %b %Y %X", timezone.now().timetuple()) engagement.threat_model = False engagement.api_test = False engagement.pen_test = False engagement.check_list = False engagement.target_start = timezone.now().date() engagement.target_end = timezone.now().date() engagement.product = product engagement.active = True engagement.status = 'In Progress' engagement.save() file = request.FILES['file'] scan_date = form.cleaned_data['scan_date'] min_sev = form.cleaned_data['minimum_severity'] active = form.cleaned_data['active'] verified = form.cleaned_data['verified'] scan_type = request.POST['scan_type'] if not any(scan_type in code for code in ImportScanForm.SCAN_TYPE_CHOICES): raise Http404() tt, t_created = Test_Type.objects.get_or_create(name=scan_type) # will save in development environment environment, env_created = Development_Environment.objects.get_or_create( name="Development") t = Test(engagement=engagement, test_type=tt, target_start=scan_date, target_end=scan_date, environment=environment, percent_complete=100) t.lead = request.user t.full_clean() t.save() tags = request.POST.getlist('tags') ts = ", ".join(tags) t.tags = ts # Save the credential to the test if cred_form.is_valid(): if cred_form.cleaned_data['cred_user']: # Select the credential mapping object from the selected list and only allow if the credential is associated with the product cred_user = Cred_Mapping.objects.filter( pk=cred_form.cleaned_data['cred_user'].id, engagement=eid).first() new_f = cred_form.save(commit=False) new_f.test = t new_f.cred_id = cred_user.cred_id new_f.save() parser = import_parser_factory(file, t) try: for item in parser.items: print "item blowup" print item sev = item.severity if sev == 'Information' or sev == 'Informational': sev = 'Info' item.severity = sev if Finding.SEVERITIES[sev] > Finding.SEVERITIES[min_sev]: continue item.test = t if item.date == timezone.now().date(): item.date = t.target_start item.reporter = request.user item.last_reviewed = timezone.now() item.last_reviewed_by = request.user item.active = active item.verified = verified item.save(dedupe_option=False, false_history=True) if hasattr(item, 'unsaved_req_resp') and len( item.unsaved_req_resp) > 0: for req_resp in item.unsaved_req_resp: burp_rr = BurpRawRequestResponse( finding=item, burpRequestBase64=req_resp["req"], burpResponseBase64=req_resp["resp"], ) burp_rr.clean() burp_rr.save() if item.unsaved_request is not None and item.unsaved_response is not None: burp_rr = BurpRawRequestResponse( finding=item, burpRequestBase64=item.unsaved_request, burpResponseBase64=item.unsaved_response, ) burp_rr.clean() burp_rr.save() for endpoint in item.unsaved_endpoints: ep, created = Endpoint.objects.get_or_create( protocol=endpoint.protocol, host=endpoint.host, path=endpoint.path, query=endpoint.query, fragment=endpoint.fragment, product=t.engagement.product) item.endpoints.add(ep) item.save(false_history=True) if item.unsaved_tags is not None: item.tags = item.unsaved_tags finding_count += 1 messages.add_message( request, messages.SUCCESS, scan_type + ' processed, a total of ' + message(finding_count, 'finding', 'processed'), extra_tags='alert-success') create_notification(event='results_added', title=str(finding_count) + " findings for " + engagement.product.name, finding_count=finding_count, test=t, engagement=engagement, url=request.build_absolute_uri( reverse('view_test', args=(t.id, )))) return HttpResponseRedirect(reverse('view_test', args=(t.id, ))) except SyntaxError: messages.add_message( request, messages.ERROR, 'There appears to be an error in the XML report, please check and try again.', extra_tags='alert-danger') prod_id = None custom_breadcrumb = None title = "Import Scan Results" if engagement: prod_id = engagement.product.id product_tab = Product_Tab(prod_id, title=title, tab="engagements") product_tab.setEngagement(engagement) else: prod_id = pid custom_breadcrumb = {"", ""} product_tab = Product_Tab(prod_id, title=title, tab="findings") return render( request, 'dojo/import_scan_results.html', { 'form': form, 'product_tab': product_tab, 'custom_breadcrumb': custom_breadcrumb, 'title': title, 'cred_form': cred_form, })
def test_acunetix_one_finding(self): testfile = open('dojo/unittests/scans/acunetix/one_finding.xml') parser = import_parser_factory(testfile, Test(), False, False, 'Acunetix Scan') findings = parser.get_findings(testfile, Test()) testfile.close() self.assertEqual(1, len(findings))
def re_import_scan_results(request, tid): additional_message = "When re-uploading a scan, any findings not found in original scan will be updated as " \ "mitigated. The process attempts to identify the differences, however manual verification " \ "is highly recommended." t = get_object_or_404(Test, id=tid) scan_type = t.test_type.name engagement = t.engagement form = ReImportScanForm() form.initial['tags'] = [tag.name for tag in t.tags] if request.method == "POST": form = ReImportScanForm(request.POST, request.FILES) if form.is_valid(): scan_date = form.cleaned_data['scan_date'] min_sev = form.cleaned_data['minimum_severity'] file = request.FILES['file'] scan_type = t.test_type.name active = form.cleaned_data['active'] verified = form.cleaned_data['verified'] tags = request.POST.getlist('tags') ts = ", ".join(tags) t.tags = ts try: parser = import_parser_factory(file, t) except ValueError: raise Http404() try: items = parser.items original_items = t.finding_set.all().values_list("id", flat=True) new_items = [] mitigated_count = 0 finding_count = 0 finding_added_count = 0 reactivated_count = 0 for item in items: sev = item.severity if sev == 'Information' or sev == 'Informational': sev = 'Info' if Finding.SEVERITIES[sev] > Finding.SEVERITIES[min_sev]: continue if scan_type == 'Veracode Scan' or scan_type == 'Arachni Scan': find = Finding.objects.filter( title=item.title, test__id=t.id, severity=sev, numerical_severity=Finding.get_numerical_severity( sev), description=item.description) else: find = Finding.objects.filter( title=item.title, test__id=t.id, severity=sev, numerical_severity=Finding.get_numerical_severity( sev), ) if len(find) == 1: find = find[0] if find.mitigated: # it was once fixed, but now back find.mitigated = None find.mitigated_by = None find.active = True find.verified = verified find.save() note = Notes( entry="Re-activated by %s re-upload." % scan_type, author=request.user) note.save() find.notes.add(note) reactivated_count += 1 new_items.append(find.id) else: item.test = t item.date = t.target_start item.reporter = request.user item.last_reviewed = datetime.now(tz=localtz) item.last_reviewed_by = request.user item.verified = verified item.active = active item.save() finding_added_count += 1 new_items.append(item.id) find = item if hasattr(item, 'unsaved_req_resp') and len( item.unsaved_req_resp) > 0: for req_resp in item.unsaved_req_resp: burp_rr = BurpRawRequestResponse( finding=find, burpRequestBase64=req_resp["req"], burpResponseBase64=req_resp["resp"], ) burp_rr.clean() burp_rr.save() if item.unsaved_request is not None and item.unsaved_response is not None: burp_rr = BurpRawRequestResponse( finding=find, burpRequestBase64=item.unsaved_request, burpResponseBase64=item.unsaved_response, ) burp_rr.clean() burp_rr.save() if find: finding_count += 1 for endpoint in item.unsaved_endpoints: ep, created = Endpoint.objects.get_or_create( protocol=endpoint.protocol, host=endpoint.host, path=endpoint.path, query=endpoint.query, fragment=endpoint.fragment, product=t.engagement.product) find.endpoints.add(ep) if item.unsaved_tags is not None: find.tags = item.unsaved_tags # calculate the difference to_mitigate = set(original_items) - set(new_items) for finding_id in to_mitigate: finding = Finding.objects.get(id=finding_id) finding.mitigated = datetime.combine( scan_date, datetime.now(tz=localtz).time()) finding.mitigated_by = request.user finding.active = False finding.save() note = Notes(entry="Mitigated by %s re-upload." % scan_type, author=request.user) note.save() finding.notes.add(note) mitigated_count += 1 messages.add_message( request, messages.SUCCESS, '%s processed, a total of ' % scan_type + message(finding_count, 'finding', 'processed'), extra_tags='alert-success') if finding_added_count > 0: messages.add_message( request, messages.SUCCESS, 'A total of ' + message(finding_added_count, 'finding', 'added') + ', that are new to scan.', extra_tags='alert-success') if reactivated_count > 0: messages.add_message( request, messages.SUCCESS, 'A total of ' + message(reactivated_count, 'finding', 'reactivated') + ', that are back in scan results.', extra_tags='alert-success') if mitigated_count > 0: messages.add_message( request, messages.SUCCESS, 'A total of ' + message(mitigated_count, 'finding', 'mitigated') + '. Please manually verify each one.', extra_tags='alert-success') return HttpResponseRedirect(reverse('view_test', args=(t.id, ))) except SyntaxError: messages.add_message( request, messages.ERROR, 'There appears to be an error in the XML report, please check and try again.', extra_tags='alert-danger') add_breadcrumb(parent=t, title="Re-upload a %s" % scan_type, top_level=False, request=request) return render( request, 'dojo/import_scan_results.html', { 'form': form, 'eid': engagement.id, 'additional_message': additional_message, })
def save(self): data = self.validated_data skip_duplicates = data['skip_duplicates'] close_old_findings = data['close_old_findings'] active = data['active'] verified = data['verified'] test_type, created = Test_Type.objects.get_or_create( name=data.get('test_type', data['scan_type'])) environment, created = Development_Environment.objects.get_or_create( name='Development') test = Test( engagement=data['engagement'], lead=data['lead'], test_type=test_type, target_start=data['scan_date'], target_end=data['scan_date'], environment=environment, percent_complete=100) try: test.full_clean() except ValidationError: pass test.save() test.tags = u' '.join(data['tags']) try: parser = import_parser_factory(data['file'], test, active, verified, data['scan_type'],) except ValueError: raise Exception('FileParser ValueError') skipped_hashcodes = [] try: for item in parser.items: if skip_duplicates: hash_code = item.compute_hash_code() if ((test.engagement.deduplication_on_engagement and Finding.objects.filter(Q(active=True) | Q(false_p=True) | Q(duplicate=True), test__engagement=test.engagement, hash_code=hash_code).exists()) or (not test.engagement.deduplication_on_engagement and Finding.objects.filter(Q(active=True) | Q(false_p=True) | Q(duplicate=True), test__engagement__product=test.engagement.product, hash_code=hash_code).exists())): skipped_hashcodes.append(hash_code) continue sev = item.severity if sev == 'Information' or sev == 'Informational': sev = 'Info' item.severity = sev if (Finding.SEVERITIES[sev] > Finding.SEVERITIES[data['minimum_severity']]): continue item.test = test item.date = test.target_start item.reporter = self.context['request'].user item.last_reviewed = timezone.now() item.last_reviewed_by = self.context['request'].user item.active = data['active'] item.verified = data['verified'] item.save() if (hasattr(item, 'unsaved_req_resp') and len(item.unsaved_req_resp) > 0): for req_resp in item.unsaved_req_resp: burp_rr = BurpRawRequestResponse( finding=item, burpRequestBase64=req_resp["req"], burpResponseBase64=req_resp["resp"]) burp_rr.clean() burp_rr.save() if (item.unsaved_request is not None and item.unsaved_response is not None): burp_rr = BurpRawRequestResponse( finding=item, burpRequestBase64=item.unsaved_request, burpResponseBase64=item.unsaved_response) burp_rr.clean() burp_rr.save() for endpoint in item.unsaved_endpoints: ep, created = Endpoint.objects.get_or_create( protocol=endpoint.protocol, host=endpoint.host, path=endpoint.path, query=endpoint.query, fragment=endpoint.fragment, product=test.engagement.product) item.endpoints.add(ep) # if item.unsaved_tags is not None: # item.tags = item.unsaved_tags except SyntaxError: raise Exception('Parser SyntaxError') if close_old_findings: # Close old active findings that are not reported by this scan. new_hash_codes = test.finding_set.values('hash_code') old_findings = None if test.engagement.deduplication_on_engagement: old_findings = Finding.objects.exclude(test=test) \ .exclude(hash_code__in=new_hash_codes) \ .exclude(hash_code__in=skipped_hashcodes) \ .filter(test__engagement=test.engagement, test__test_type=test_type, active=True) else: old_findings = Finding.objects.exclude(test=test) \ .exclude(hash_code__in=new_hash_codes) \ .exclude(hash_code__in=skipped_hashcodes) \ .filter(test__engagement__product=test.engagement.product, test__test_type=test_type, active=True) for old_finding in old_findings: old_finding.active = False old_finding.mitigated = datetime.datetime.combine( test.target_start, timezone.now().time()) old_finding.mitigated_by = self.context['request'].user old_finding.notes.create(author=self.context['request'].user, entry="This finding has been automatically closed" " as it is not present anymore in recent scans.") Tag.objects.add_tag(old_finding, 'stale') old_finding.save() title = 'An old finding has been closed for "{}".' \ .format(test.engagement.product.name) description = 'See <a href="{}">{}</a>' \ .format(reverse('view_finding', args=(old_finding.id, )), old_finding.title) create_notification(event='other', title=title, description=description, icon='bullseye', objowner=self.context['request'].user) return test
def import_scan_results(request, eid): engagement = get_object_or_404(Engagement, id=eid) finding_count = 0 form = ImportScanForm() if request.method == "POST": form = ImportScanForm(request.POST, request.FILES) if form.is_valid(): file = request.FILES['file'] scan_date = form.cleaned_data['scan_date'] min_sev = form.cleaned_data['minimum_severity'] scan_type = request.POST['scan_type'] if not any(scan_type in code for code in ImportScanForm.SCAN_TYPE_CHOICES): raise Http404() tt, t_created = Test_Type.objects.get_or_create(name=scan_type) # will save in development environment environment, env_created = Development_Environment.objects.get_or_create(name="Development") t = Test(engagement=engagement, test_type=tt, target_start=scan_date, target_end=scan_date, environment=environment, percent_complete=100) t.full_clean() t.save() try: parser = import_parser_factory(file, t) except ValueError: raise Http404() try: for item in parser.items: sev = item.severity if sev == 'Information' or sev == 'Informational': sev = 'Info' item.severity = sev if Finding.SEVERITIES[sev] > Finding.SEVERITIES[min_sev]: continue item.test = t item.date = t.target_start item.reporter = request.user item.last_reviewed = datetime.now(tz=localtz) item.last_reviewed_by = request.user item.save() if item.unsaved_request is not None and item.unsaved_response is not None: burp_rr = BurpRawRequestResponse(finding=item, burpRequestBase64=item.unsaved_request, burpResponseBase64=item.unsaved_response, ) burp_rr.clean() burp_rr.save() for endpoint in item.unsaved_endpoints: ep, created = Endpoint.objects.get_or_create(protocol=endpoint.protocol, host=endpoint.host, path=endpoint.path, query=endpoint.query, fragment=endpoint.fragment, product=t.engagement.product) item.endpoints.add(ep) finding_count += 1 messages.add_message(request, messages.SUCCESS, scan_type + ' processed, a total of ' + message(finding_count, 'finding', 'processed'), extra_tags='alert-success') return HttpResponseRedirect(reverse('view_test', args=(t.id,))) except SyntaxError: messages.add_message(request, messages.ERROR, 'There appears to be an error in the XML report, please check and try again.', extra_tags='alert-danger') add_breadcrumb(parent=engagement, title="Import Scan Results", top_level=False, request=request) return render(request, 'dojo/import_scan_results.html', {'form': form, 'eid': engagement.id, })
def save(self): data = self.validated_data test = data['test'] scan_type = data['scan_type'] min_sev = data['minimum_severity'] scan_date = data['scan_date'] verified = data['verified'] active = data['active'] try: parser = import_parser_factory(data['file'], test, data['scan_type'],) except ValueError: raise Exception("Parser ValueError") try: items = parser.items original_items = list(test.finding_set.all()) new_items = [] mitigated_count = 0 finding_count = 0 finding_added_count = 0 reactivated_count = 0 for item in items: sev = item.severity if sev == 'Information' or sev == 'Informational': sev = 'Info' if (Finding.SEVERITIES[sev] > Finding.SEVERITIES[min_sev]): continue if scan_type == 'Veracode Scan' or scan_type == 'Arachni Scan': findings = Finding.objects.filter( title=item.title, test=test, severity=sev, numerical_severity=Finding.get_numerical_severity(sev), description=item.description).all() else: findings = Finding.objects.filter( title=item.title, test=test, severity=sev, numerical_severity=Finding.get_numerical_severity(sev)).all() if findings: finding = findings[0] if finding.mitigated: finding.mitigated = None finding.mitigated_by = None finding.active = True finding.verified = verified finding.save() note = Notes( entry="Re-activated by %s re-upload." % scan_type, author=self.context['request'].user) note.save() finding.notes.add(note) reactivated_count += 1 new_items.append(finding) else: item.test = test item.date = test.target_start item.reporter = self.context['request'].user item.last_reviewed = timezone.now() item.last_reviewed_by = self.context['request'].user item.verified = verified item.active = active item.save() finding_added_count += 1 new_items.append(item.id) finding = item if hasattr(item, 'unsaved_req_resp'): for req_resp in item.unsaved_req_resp: burp_rr = BurpRawRequestResponse( finding=finding, burpRequestBase64=req_resp['req'], burpResponseBase64=req_resp['resp']) burp_rr.clean() burp_rr.save() if item.unsaved_request and item.unsaved_response: burp_rr = BurpRawRequestResponse( finding=finding, burpRequestBase64=item.unsaved_request, burpResponseBase64=item.unsaved_response) burp_rr.clean() burp_rr.save() if finding: finding_count += 1 for endpoint in item.unsaved_endpoints: ep, created = Endpoint.objects.get_or_create( protocol=endpoint.protocol, host=endpoint.host, path=endpoint.path, query=endpoint.query, fragment=endpoint.fragment, product=test.engagement.product) finding.endpoints.add(ep) # if item.unsaved_tags: # finding.tags = item.unsaved_tags to_mitigate = set(original_items) - set(new_items) for finding in to_mitigate: finding.mitigated = datetime.datetime.combine( scan_date, timezone.now().time()) finding.mitigated_by = self.context['request'].user finding.active = False finding.save() note = Notes(entry="Mitigated by %s re-upload." % scan_type, author=self.context['request'].user) note.save() finding.notes.add(note) mitigated_count += 1 except SyntaxError: raise Exception("Parser SyntaxError") return test
def import_scan_results(request, eid): engagement = get_object_or_404(Engagement, id=eid) finding_count = 0 form = ImportScanForm() cred_form = CredMappingForm() cred_form.fields["cred_user"].queryset = Cred_Mapping.objects.filter( engagement=engagement).order_by('cred_id') if request.method == "POST": form = ImportScanForm(request.POST, request.FILES) cred_form = CredMappingForm(request.POST) cred_form.fields["cred_user"].queryset = Cred_Mapping.objects.filter( engagement=engagement).order_by('cred_id') if form.is_valid(): file = request.FILES['file'] scan_date = form.cleaned_data['scan_date'] min_sev = form.cleaned_data['minimum_severity'] active = form.cleaned_data['active'] verified = form.cleaned_data['verified'] scan_type = request.POST['scan_type'] if not any(scan_type in code for code in ImportScanForm.SCAN_TYPE_CHOICES): raise Http404() tt, t_created = Test_Type.objects.get_or_create(name=scan_type) # will save in development environment environment, env_created = Development_Environment.objects.get_or_create( name="Development") t = Test(engagement=engagement, test_type=tt, target_start=scan_date, target_end=scan_date, environment=environment, percent_complete=100) t.lead = request.user t.full_clean() t.save() tags = request.POST.getlist('tags') ts = ", ".join(tags) t.tags = ts #Save the credential to the test if cred_form.is_valid(): if cred_form.cleaned_data['cred_user']: #Select the credential mapping object from the selected list and only allow if the credential is associated with the product cred_user = Cred_Mapping.objects.filter( pk=cred_form.cleaned_data['cred_user'].id, engagement=eid).first() new_f = cred_form.save(commit=False) new_f.test = t new_f.cred_id = cred_user.cred_id new_f.save() try: parser = import_parser_factory(file, t) except ValueError: raise Http404() try: for item in parser.items: sev = item.severity if sev == 'Information' or sev == 'Informational': sev = 'Info' item.severity = sev if Finding.SEVERITIES[sev] > Finding.SEVERITIES[min_sev]: continue item.test = t item.date = t.target_start item.reporter = request.user item.last_reviewed = datetime.now(tz=localtz) item.last_reviewed_by = request.user item.active = active item.verified = verified item.save() if hasattr(item, 'unsaved_req_resp') and len( item.unsaved_req_resp) > 0: for req_resp in item.unsaved_req_resp: burp_rr = BurpRawRequestResponse( finding=item, burpRequestBase64=req_resp["req"], burpResponseBase64=req_resp["resp"], ) burp_rr.clean() burp_rr.save() if item.unsaved_request is not None and item.unsaved_response is not None: burp_rr = BurpRawRequestResponse( finding=item, burpRequestBase64=item.unsaved_request, burpResponseBase64=item.unsaved_response, ) burp_rr.clean() burp_rr.save() for endpoint in item.unsaved_endpoints: ep, created = Endpoint.objects.get_or_create( protocol=endpoint.protocol, host=endpoint.host, path=endpoint.path, query=endpoint.query, fragment=endpoint.fragment, product=t.engagement.product) item.endpoints.add(ep) if item.unsaved_tags is not None: item.tags = item.unsaved_tags finding_count += 1 messages.add_message( request, messages.SUCCESS, scan_type + ' processed, a total of ' + message(finding_count, 'finding', 'processed'), extra_tags='alert-success') return HttpResponseRedirect(reverse('view_test', args=(t.id, ))) except SyntaxError: messages.add_message( request, messages.ERROR, 'There appears to be an error in the XML report, please check and try again.', extra_tags='alert-danger') add_breadcrumb(parent=engagement, title="Import Scan Results", top_level=False, request=request) return render(request, 'dojo/import_scan_results.html', { 'form': form, 'eid': engagement.id, 'cred_form': cred_form, })
def save(self): data = self.validated_data skip_duplicates = data['skip_duplicates'] close_old_findings = data['close_old_findings'] test_type, created = Test_Type.objects.get_or_create( name=data.get('test_type', data['scan_type'])) environment, created = Development_Environment.objects.get_or_create( name='Development') test = Test( engagement=data['engagement'], lead=data['lead'], test_type=test_type, target_start=data['scan_date'], target_end=data['scan_date'], environment=environment, percent_complete=100) try: test.full_clean() except ValidationError: pass test.save() test.tags = u' '.join(data['tags']) try: parser = import_parser_factory(data['file'], test, data['scan_type'],) except ValueError: raise Exception('FileParser ValueError') skipped_hashcodes = [] try: for item in parser.items: if skip_duplicates: hash_code = item.compute_hash_code() if Finding.objects.filter(Q(active=True) | Q(false_p=True) | Q(duplicate=True), test__engagement__product=test.engagement.product, hash_code=hash_code).exists(): skipped_hashcodes.append(hash_code) continue sev = item.severity if sev == 'Information' or sev == 'Informational': sev = 'Info' item.severity = sev if (Finding.SEVERITIES[sev] > Finding.SEVERITIES[data['minimum_severity']]): continue item.test = test item.date = test.target_start item.reporter = self.context['request'].user item.last_reviewed = timezone.now() item.last_reviewed_by = self.context['request'].user item.active = data['active'] item.verified = data['verified'] item.save() if (hasattr(item, 'unsaved_req_resp') and len(item.unsaved_req_resp) > 0): for req_resp in item.unsaved_req_resp: burp_rr = BurpRawRequestResponse( finding=item, burpRequestBase64=req_resp["req"], burpResponseBase64=req_resp["resp"]) burp_rr.clean() burp_rr.save() if (item.unsaved_request is not None and item.unsaved_response is not None): burp_rr = BurpRawRequestResponse( finding=item, burpRequestBase64=item.unsaved_request, burpResponseBase64=item.unsaved_response) burp_rr.clean() burp_rr.save() for endpoint in item.unsaved_endpoints: ep, created = Endpoint.objects.get_or_create( protocol=endpoint.protocol, host=endpoint.host, path=endpoint.path, query=endpoint.query, fragment=endpoint.fragment, product=test.engagement.product) item.endpoints.add(ep) # if item.unsaved_tags is not None: # item.tags = item.unsaved_tags except SyntaxError: raise Exception('Parser SyntaxError') if close_old_findings: # Close old active findings that are not reported by this scan. new_hash_codes = test.finding_set.values('hash_code') for old_finding in Finding.objects.exclude(test=test) \ .exclude(hash_code__in=new_hash_codes) \ .exclude(hash_code__in=skipped_hashcodes) \ .filter(test__engagement__product=test.engagement.product, test__test_type=test_type, active=True): old_finding.active = False old_finding.mitigated = datetime.datetime.combine( test.target_start, timezone.now().time()) old_finding.mitigated_by = self.context['request'].user old_finding.notes.create(author=self.context['request'].user, entry="This finding has been automatically closed" " as it is not present anymore in recent scans.") old_finding.save() title = 'An old finding has been closed for "{}".' \ .format(test.engagement.product.name) description = 'See <a href="{}">{}</a>' \ .format(reverse('view_finding', args=(old_finding.id, )), old_finding.title) create_notification(event='other', title=title, description=description, icon='bullseye', objowner=self.context['request'].user) return test
def import_scan_results(request, eid=None, pid=None): engagement = None form = ImportScanForm() cred_form = CredMappingForm() finding_count = 0 jform = None user = request.user if eid: engagement = get_object_or_404(Engagement, id=eid) engagement_or_product = engagement cred_form.fields["cred_user"].queryset = Cred_Mapping.objects.filter( engagement=engagement).order_by('cred_id') elif pid: product = get_object_or_404(Product, id=pid) engagement_or_product = product elif not user.is_staff: raise PermissionDenied if not user_is_authorized(user, 'staff', engagement_or_product): raise PermissionDenied push_all_jira_issues = jira_helper.is_push_all_issues( engagement_or_product) if request.method == "POST": form = ImportScanForm(request.POST, request.FILES) cred_form = CredMappingForm(request.POST) cred_form.fields["cred_user"].queryset = Cred_Mapping.objects.filter( engagement=engagement).order_by('cred_id') if jira_helper.get_jira_project(engagement_or_product): jform = JIRAImportScanForm(request.POST, push_all=push_all_jira_issues, prefix='jiraform') logger.debug('jform valid: %s', jform.is_valid()) logger.debug('jform errors: %s', jform.errors) if form.is_valid() and (jform is None or jform.is_valid()): # Allows for a test to be imported with an engagement created on the fly if engagement is None: engagement = Engagement() # product = get_object_or_404(Product, id=pid) engagement.name = "AdHoc Import - " + strftime( "%a, %d %b %Y %X", timezone.now().timetuple()) engagement.threat_model = False engagement.api_test = False engagement.pen_test = False engagement.check_list = False engagement.target_start = timezone.now().date() engagement.target_end = timezone.now().date() engagement.product = product engagement.active = True engagement.status = 'In Progress' engagement.save() file = request.FILES.get('file', None) scan_date = form.cleaned_data['scan_date'] min_sev = form.cleaned_data['minimum_severity'] active = form.cleaned_data['active'] verified = form.cleaned_data['verified'] scan_type = request.POST['scan_type'] if not any(scan_type in code for code in ImportScanForm.SCAN_TYPE_CHOICES): raise Http404() if file and is_scan_file_too_large(file): messages.add_message( request, messages.ERROR, "Report file is too large. Maximum supported size is {} MB" .format(settings.SCAN_FILE_MAX_SIZE), extra_tags='alert-danger') return HttpResponseRedirect( reverse('import_scan_results', args=(engagement, ))) tt, t_created = Test_Type.objects.get_or_create(name=scan_type) # will save in development environment environment, env_created = Development_Environment.objects.get_or_create( name="Development") t = Test(engagement=engagement, test_type=tt, target_start=scan_date, target_end=scan_date, environment=environment, percent_complete=100) t.lead = user t.full_clean() t.save() tags = request.POST.getlist('tags') ts = ", ".join(tags) t.tags = ts # Save the credential to the test if cred_form.is_valid(): if cred_form.cleaned_data['cred_user']: # Select the credential mapping object from the selected list and only allow if the credential is associated with the product cred_user = Cred_Mapping.objects.filter( pk=cred_form.cleaned_data['cred_user'].id, engagement=eid).first() new_f = cred_form.save(commit=False) new_f.test = t new_f.cred_id = cred_user.cred_id new_f.save() try: parser = import_parser_factory(file, t, active, verified) except Exception as e: messages.add_message( request, messages.ERROR, "An error has occurred in the parser, please see error " "log for details.", extra_tags='alert-danger') parse_logger.exception(e) parse_logger.error("Error in parser: {}".format(str(e))) return HttpResponseRedirect( reverse('import_scan_results', args=(engagement.id, ))) try: # can't use helper as when push_all_jira_issues is True, the checkbox gets disabled and is always false # push_to_jira = jira_helper.is_push_to_jira(new_finding, jform.cleaned_data.get('push_to_jira')) push_to_jira = push_all_jira_issues or ( jform and jform.cleaned_data.get('push_to_jira')) for item in parser.items: # print("item blowup") # print(item) sev = item.severity if sev == 'Information' or sev == 'Informational': sev = 'Info' item.severity = sev if Finding.SEVERITIES[sev] > Finding.SEVERITIES[min_sev]: continue item.test = t item.reporter = user item.last_reviewed = timezone.now() item.last_reviewed_by = user if not handles_active_verified_statuses( form.get_scan_type()): item.active = active item.verified = verified item.save(dedupe_option=False, false_history=True) if hasattr(item, 'unsaved_req_resp') and len( item.unsaved_req_resp) > 0: for req_resp in item.unsaved_req_resp: if form.get_scan_type() == "Arachni Scan": burp_rr = BurpRawRequestResponse( finding=item, burpRequestBase64=req_resp["req"], burpResponseBase64=req_resp["resp"], ) else: burp_rr = BurpRawRequestResponse( finding=item, burpRequestBase64=base64.b64encode( req_resp["req"].encode("utf-8")), burpResponseBase64=base64.b64encode( req_resp["resp"].encode("utf-8")), ) burp_rr.clean() burp_rr.save() if item.unsaved_request is not None and item.unsaved_response is not None: burp_rr = BurpRawRequestResponse( finding=item, burpRequestBase64=base64.b64encode( item.unsaved_request.encode()), burpResponseBase64=base64.b64encode( item.unsaved_response.encode()), ) burp_rr.clean() burp_rr.save() for endpoint in item.unsaved_endpoints: ep, created = Endpoint.objects.get_or_create( protocol=endpoint.protocol, host=endpoint.host, path=endpoint.path, query=endpoint.query, fragment=endpoint.fragment, product=t.engagement.product) eps, created = Endpoint_Status.objects.get_or_create( finding=item, endpoint=ep) ep.endpoint_status.add(eps) item.endpoints.add(ep) item.endpoint_status.add(eps) for endpoint in form.cleaned_data['endpoints']: ep, created = Endpoint.objects.get_or_create( protocol=endpoint.protocol, host=endpoint.host, path=endpoint.path, query=endpoint.query, fragment=endpoint.fragment, product=t.engagement.product) eps, created = Endpoint_Status.objects.get_or_create( finding=item, endpoint=ep) ep.endpoint_status.add(eps) item.endpoints.add(ep) item.endpoint_status.add(eps) item.save(false_history=True, push_to_jira=push_to_jira) if item.unsaved_tags is not None: item.tags = item.unsaved_tags finding_count += 1 messages.add_message( request, messages.SUCCESS, scan_type + ' processed, a total of ' + message(finding_count, 'finding', 'processed'), extra_tags='alert-success') create_notification(initiator=user, event='scan_added', title=str(finding_count) + " findings for " + engagement.product.name, finding_count=finding_count, test=t, engagement=engagement, url=reverse('view_test', args=(t.id, ))) return HttpResponseRedirect(reverse('view_test', args=(t.id, ))) except SyntaxError: messages.add_message( request, messages.ERROR, 'There appears to be an error in the XML report, please check and try again.', extra_tags='alert-danger') prod_id = None custom_breadcrumb = None title = "Import Scan Results" if engagement: prod_id = engagement.product.id product_tab = Product_Tab(prod_id, title=title, tab="engagements") product_tab.setEngagement(engagement) else: prod_id = pid custom_breadcrumb = {"", ""} product_tab = Product_Tab(prod_id, title=title, tab="findings") if jira_helper.get_jira_project(engagement_or_product): jform = JIRAImportScanForm(push_all=push_all_jira_issues, prefix='jiraform') form.fields['endpoints'].queryset = Endpoint.objects.filter( product__id=product_tab.product.id) return render( request, 'dojo/import_scan_results.html', { 'form': form, 'product_tab': product_tab, 'engagement_or_product': engagement_or_product, 'custom_breadcrumb': custom_breadcrumb, 'title': title, 'cred_form': cred_form, 'jform': jform })
def import_scan_results(request, eid): engagement = get_object_or_404(Engagement, id=eid) finding_count = 0 form = ImportScanForm() if request.method == "POST": form = ImportScanForm(request.POST, request.FILES) if form.is_valid(): file = request.FILES['file'] scan_date = form.cleaned_data['scan_date'] min_sev = form.cleaned_data['minimum_severity'] scan_type = request.POST['scan_type'] if not any(scan_type in code for code in ImportScanForm.SCAN_TYPE_CHOICES): raise Http404() tt, t_created = Test_Type.objects.get_or_create(name=scan_type) # will save in development environment environment, env_created = Development_Environment.objects.get_or_create( name="Development") t = Test(engagement=engagement, test_type=tt, target_start=scan_date, target_end=scan_date, environment=environment, percent_complete=100) t.full_clean() t.save() try: parser = import_parser_factory(file, t) except ValueError: raise Http404() try: for item in parser.items: sev = item.severity if sev == 'Information': sev = 'Info' item.severity = sev if Finding.SEVERITIES[sev] > Finding.SEVERITIES[min_sev]: continue item.test = t item.date = t.target_start item.reporter = request.user item.last_reviewed = datetime.now(tz=localtz) item.last_reviewed_by = request.user item.save() if item.unsaved_request is not None and item.unsaved_response is not None: burp_rr = BurpRawRequestResponse( finding=item, burpRequestBase64=item.unsaved_request, burpResponseBase64=item.unsaved_response, ) burp_rr.clean() burp_rr.save() for endpoint in item.unsaved_endpoints: ep, created = Endpoint.objects.get_or_create( protocol=endpoint.protocol, host=endpoint.host, path=endpoint.path, query=endpoint.query, fragment=endpoint.fragment, product=t.engagement.product) item.endpoints.add(ep) finding_count += 1 messages.add_message( request, messages.SUCCESS, scan_type + ' processed, a total of ' + message(finding_count, 'finding', 'processed'), extra_tags='alert-success') return HttpResponseRedirect(reverse('view_test', args=(t.id, ))) except SyntaxError: messages.add_message( request, messages.ERROR, 'There appears to be an error in the XML report, please check and try again.', extra_tags='alert-danger') add_breadcrumb(parent=engagement, title="Import Scan Results", top_level=False, request=request) return render(request, 'dojo/import_scan_results.html', { 'form': form, 'eid': engagement.id, })
def obj_create(self, bundle, **kwargs): bundle.obj = ImportScanObject(initial=kwargs) self.is_valid(bundle) if bundle.errors: raise ImmediateHttpResponse(response=self.error_response(bundle.request, bundle.errors)) bundle = self.full_hydrate(bundle) test = bundle.obj.__getattr__('test_obj') scan_type = bundle.obj.__getattr__('scan_type') min_sev = bundle.obj.__getattr__('minimum_severity') scan_date = bundle.obj.__getattr__('scan_date') verified = bundle.obj.__getattr__('verified') active = bundle.obj.__getattr__('active') try: parser = import_parser_factory(bundle.data['file'], test) except ValueError: raise NotFound("Parser ValueError") try: items = parser.items original_items = test.finding_set.all().values_list("id", flat=True) new_items = [] mitigated_count = 0 finding_count = 0 finding_added_count = 0 reactivated_count = 0 for item in items: sev = item.severity if sev == 'Information' or sev == 'Informational': sev = 'Info' if Finding.SEVERITIES[sev] > Finding.SEVERITIES[min_sev]: continue if scan_type == 'Veracode Scan' or scan_type == 'Arachni Scan': find = Finding.objects.filter(title=item.title, test__id=test.id, severity=sev, numerical_severity=Finding.get_numerical_severity(sev), description=item.description ) else: find = Finding.objects.filter(title=item.title, test__id=test.id, severity=sev, numerical_severity=Finding.get_numerical_severity(sev), ) if len(find) == 1: find = find[0] if find.mitigated: # it was once fixed, but now back find.mitigated = None find.mitigated_by = None find.active = True find.verified = verified find.save() note = Notes(entry="Re-activated by %s re-upload." % scan_type, author=bundle.request.user) note.save() find.notes.add(note) reactivated_count += 1 new_items.append(find.id) else: item.test = test item.date = test.target_start item.reporter = bundle.request.user item.last_reviewed = timezone.now() item.last_reviewed_by = bundle.request.user item.verified = verified item.active = active item.save() finding_added_count += 1 new_items.append(item.id) find = item if hasattr(item, 'unsaved_req_resp') and len(item.unsaved_req_resp) > 0: for req_resp in item.unsaved_req_resp: burp_rr = BurpRawRequestResponse(finding=find, burpRequestBase64=req_resp["req"], burpResponseBase64=req_resp["resp"], ) burp_rr.clean() burp_rr.save() if item.unsaved_request is not None and item.unsaved_response is not None: burp_rr = BurpRawRequestResponse(finding=find, burpRequestBase64=item.unsaved_request, burpResponseBase64=item.unsaved_response, ) burp_rr.clean() burp_rr.save() if find: finding_count += 1 for endpoint in item.unsaved_endpoints: ep, created = Endpoint.objects.get_or_create(protocol=endpoint.protocol, host=endpoint.host, path=endpoint.path, query=endpoint.query, fragment=endpoint.fragment, product=test.engagement.product) find.endpoints.add(ep) if item.unsaved_tags is not None: find.tags = item.unsaved_tags # calculate the difference to_mitigate = set(original_items) - set(new_items) for finding_id in to_mitigate: finding = Finding.objects.get(id=finding_id) finding.mitigated = datetime.combine(scan_date, timezone.now().time()) finding.mitigated_by = bundle.request.user finding.active = False finding.save() note = Notes(entry="Mitigated by %s re-upload." % scan_type, author=bundle.request.user) note.save() finding.notes.add(note) mitigated_count += 1 except SyntaxError: raise NotFound("Parser SyntaxError") # Everything executed fine. We successfully imported the scan. raise ImmediateHttpResponse(HttpCreated(location = bundle.obj.__getattr__('test')))
def save(self): data = self.validated_data test_type, created = Test_Type.objects.get_or_create( name=data['scan_type']) environment, created = Development_Environment.objects.get_or_create( name='Development') test = Test(engagement=data['engagement'], lead=data['lead'], test_type=test_type, target_start=data['scan_date'], target_end=data['scan_date'], environment=environment, percent_complete=100) try: test.full_clean() except ValidationError: pass test.save() try: parser = import_parser_factory( data['file'], test, data['scan_type'], ) except ValueError: raise Exception('FileParser ValueError') try: for item in parser.items: sev = item.severity if sev == 'Information' or sev == 'Informational': sev = 'Info' item.severity = sev if (Finding.SEVERITIES[sev] > Finding.SEVERITIES[data['minimum_severity']]): continue item.test = test item.date = test.target_start item.reporter = self.context['request'].user item.last_reviewed = timezone.now() item.last_reviewed_by = self.context['request'].user item.active = data['active'] item.verified = data['verified'] item.save() if (hasattr(item, 'unsaved_req_resp') and len(item.unsaved_req_resp) > 0): for req_resp in item.unsaved_req_resp: burp_rr = BurpRawRequestResponse( finding=item, burpRequestBase64=req_resp["req"], burpResponseBase64=req_resp["resp"]) burp_rr.clean() burp_rr.save() if (item.unsaved_request is not None and item.unsaved_response is not None): burp_rr = BurpRawRequestResponse( finding=item, burpRequestBase64=item.unsaved_request, burpResponseBase64=item.unsaved_response) burp_rr.clean() burp_rr.save() for endpoint in item.unsaved_endpoints: ep, created = Endpoint.objects.get_or_create( protocol=endpoint.protocol, host=endpoint.host, path=endpoint.path, query=endpoint.query, fragment=endpoint.fragment, product=test.engagement.product) item.endpoints.add(ep) # if item.unsaved_tags is not None: # item.tags = item.unsaved_tags except SyntaxError: raise Exception('Parser SyntaxError') return test
def obj_create(self, bundle, **kwargs): bundle.obj = ImportScanObject(initial=kwargs) self.is_valid(bundle) if bundle.errors: raise ImmediateHttpResponse(response=self.error_response(bundle.request, bundle.errors)) bundle = self.full_hydrate(bundle) # We now have all the options we need and will just replicate the process in views.py tt, t_created = Test_Type.objects.get_or_create(name=bundle.data['scan_type']) # will save in development environment environment, env_created = Development_Environment.objects.get_or_create(name="Development") scan_date = datetime.strptime(bundle.data['scan_date'], '%Y-%m-%d') t = Test(engagement=bundle.obj.__getattr__('engagement_obj'), lead = bundle.obj.__getattr__('user_obj'), test_type=tt, target_start=scan_date, target_end=scan_date, environment=environment, percent_complete=100) try: t.full_clean() except ValidationError: print "Error Validating Test Object" print ValidationError t.save() t.tags = bundle.data['tags'] try: parser = import_parser_factory(bundle.data['file'], t) except ValueError: raise NotFound("Parser ValueError") try: for item in parser.items: sev = item.severity if sev == 'Information' or sev == 'Informational': sev = 'Info' item.severity = sev if Finding.SEVERITIES[sev] > Finding.SEVERITIES[bundle.data['minimum_severity']]: continue item.test = t item.date = t.target_start item.reporter = bundle.request.user item.last_reviewed = timezone.now() item.last_reviewed_by = bundle.request.user item.active = bundle.data['active'] item.verified = bundle.data['verified'] item.save() if hasattr(item, 'unsaved_req_resp') and len(item.unsaved_req_resp) > 0: for req_resp in item.unsaved_req_resp: burp_rr = BurpRawRequestResponse(finding=item, burpRequestBase64=req_resp["req"], burpResponseBase64=req_resp["resp"], ) burp_rr.clean() burp_rr.save() if item.unsaved_request is not None and item.unsaved_response is not None: burp_rr = BurpRawRequestResponse(finding=item, burpRequestBase64=item.unsaved_request, burpResponseBase64=item.unsaved_response, ) burp_rr.clean() burp_rr.save() for endpoint in item.unsaved_endpoints: ep, created = Endpoint.objects.get_or_create(protocol=endpoint.protocol, host=endpoint.host, path=endpoint.path, query=endpoint.query, fragment=endpoint.fragment, product=t.engagement.product) item.endpoints.add(ep) if item.unsaved_tags is not None: item.tags = item.unsaved_tags except SyntaxError: raise NotFound("Parser SyntaxError") # Everything executed fine. We successfully imported the scan. res = TestResource() uri = res.get_resource_uri(t) raise ImmediateHttpResponse(HttpCreated(location = uri))
def save(self): data = self.validated_data test = data['test'] scan_type = data['scan_type'] min_sev = data['minimum_severity'] scan_date = data['scan_date'] verified = data['verified'] active = data['active'] try: parser = import_parser_factory( data['file'], test, data['scan_type'], ) except ValueError: raise Exception("Parser ValueError") try: items = parser.items original_items = list(test.finding_set.all()) new_items = [] mitigated_count = 0 finding_count = 0 finding_added_count = 0 reactivated_count = 0 for item in items: sev = item.severity if sev == 'Information' or sev == 'Informational': sev = 'Info' if (Finding.SEVERITIES[sev] > Finding.SEVERITIES[min_sev]): continue if scan_type == 'Veracode Scan' or scan_type == 'Arachni Scan': findings = Finding.objects.filter( title=item.title, test=test, severity=sev, numerical_severity=Finding.get_numerical_severity(sev), description=item.description).all() else: findings = Finding.objects.filter( title=item.title, test=test, severity=sev, numerical_severity=Finding.get_numerical_severity( sev)).all() if findings: finding = findings[0] if finding.mitigated: finding.mitigated = None finding.mitigated_by = None finding.active = True finding.verified = verified finding.save() note = Notes(entry="Re-activated by %s re-upload." % scan_type, author=self.context['request'].user) note.save() finding.notes.add(note) reactivated_count += 1 new_items.append(finding) else: item.test = test item.date = test.target_start item.reporter = self.context['request'].user item.last_reviewed = timezone.now() item.last_reviewed_by = self.context['request'].user item.verified = verified item.active = active item.save() finding_added_count += 1 new_items.append(item.id) finding = item if hasattr(item, 'unsaved_req_resp'): for req_resp in item.unsaved_req_resp: burp_rr = BurpRawRequestResponse( finding=finding, burpRequestBase64=req_resp['req'], burpResponseBase64=req_resp['resp']) burp_rr.clean() burp_rr.save() if item.unsaved_request and item.unsaved_response: burp_rr = BurpRawRequestResponse( finding=finding, burpRequestBase64=item.unsaved_request, burpResponseBase64=item.unsaved_response) burp_rr.clean() burp_rr.save() if finding: finding_count += 1 for endpoint in item.unsaved_endpoints: ep, created = Endpoint.objects.get_or_create( protocol=endpoint.protocol, host=endpoint.host, path=endpoint.path, query=endpoint.query, fragment=endpoint.fragment, product=test.engagement.product) finding.endpoints.add(ep) # if item.unsaved_tags: # finding.tags = item.unsaved_tags to_mitigate = set(original_items) - set(new_items) for finding in to_mitigate: finding.mitigated = datetime.datetime.combine( scan_date, timezone.now().time()) finding.mitigated_by = self.context['request'].user finding.active = False finding.save() note = Notes(entry="Mitigated by %s re-upload." % scan_type, author=self.context['request'].user) note.save() finding.notes.add(note) mitigated_count += 1 except SyntaxError: raise Exception("Parser SyntaxError") return test
def re_import_scan_results(request, tid): additional_message = "When re-uploading a scan, any findings not found in original scan will be updated as " \ "mitigated. The process attempts to identify the differences, however manual verification " \ "is highly recommended." t = get_object_or_404(Test, id=tid) scan_type = t.test_type.name engagement = t.engagement form = ReImportScanForm() form.initial['tags'] = [tag.name for tag in t.tags] if request.method == "POST": form = ReImportScanForm(request.POST, request.FILES) if form.is_valid(): scan_date = form.cleaned_data['scan_date'] min_sev = form.cleaned_data['minimum_severity'] file = request.FILES['file'] scan_type = t.test_type.name active = form.cleaned_data['active'] verified = form.cleaned_data['verified'] tags = request.POST.getlist('tags') ts = ", ".join(tags) t.tags = ts try: parser = import_parser_factory(file, t) except ValueError: raise Http404() try: items = parser.items original_items = t.finding_set.all().values_list("id", flat=True) new_items = [] mitigated_count = 0 finding_count = 0 finding_added_count = 0 reactivated_count = 0 for item in items: sev = item.severity if sev == 'Information' or sev == 'Informational': sev = 'Info' if Finding.SEVERITIES[sev] > Finding.SEVERITIES[min_sev]: continue if scan_type == 'Veracode Scan' or scan_type == 'Arachni Scan': find = Finding.objects.filter(title=item.title, test__id=t.id, severity=sev, numerical_severity=Finding.get_numerical_severity(sev), description=item.description ) else: find = Finding.objects.filter(title=item.title, test__id=t.id, severity=sev, numerical_severity=Finding.get_numerical_severity(sev), ) if len(find) == 1: find = find[0] if find.mitigated: # it was once fixed, but now back find.mitigated = None find.mitigated_by = None find.active = True find.verified = verified find.save() note = Notes(entry="Re-activated by %s re-upload." % scan_type, author=request.user) note.save() find.notes.add(note) reactivated_count += 1 new_items.append(find.id) else: item.test = t item.date = t.target_start item.reporter = request.user item.last_reviewed = timezone.now() item.last_reviewed_by = request.user item.verified = verified item.active = active item.save() finding_added_count += 1 new_items.append(item.id) find = item if hasattr(item, 'unsaved_req_resp') and len(item.unsaved_req_resp) > 0: for req_resp in item.unsaved_req_resp: burp_rr = BurpRawRequestResponse(finding=find, burpRequestBase64=req_resp["req"], burpResponseBase64=req_resp["resp"], ) burp_rr.clean() burp_rr.save() if item.unsaved_request is not None and item.unsaved_response is not None: burp_rr = BurpRawRequestResponse(finding=find, burpRequestBase64=item.unsaved_request, burpResponseBase64=item.unsaved_response, ) burp_rr.clean() burp_rr.save() if find: finding_count += 1 for endpoint in item.unsaved_endpoints: ep, created = Endpoint.objects.get_or_create(protocol=endpoint.protocol, host=endpoint.host, path=endpoint.path, query=endpoint.query, fragment=endpoint.fragment, product=t.engagement.product) find.endpoints.add(ep) if item.unsaved_tags is not None: find.tags = item.unsaved_tags # calculate the difference to_mitigate = set(original_items) - set(new_items) for finding_id in to_mitigate: finding = Finding.objects.get(id=finding_id) finding.mitigated = datetime.combine(scan_date, timezone.now().time()) finding.mitigated_by = request.user finding.active = False finding.save() note = Notes(entry="Mitigated by %s re-upload." % scan_type, author=request.user) note.save() finding.notes.add(note) mitigated_count += 1 messages.add_message(request, messages.SUCCESS, '%s processed, a total of ' % scan_type + message(finding_count, 'finding', 'processed'), extra_tags='alert-success') if finding_added_count > 0: messages.add_message(request, messages.SUCCESS, 'A total of ' + message(finding_added_count, 'finding', 'added') + ', that are new to scan.', extra_tags='alert-success') if reactivated_count > 0: messages.add_message(request, messages.SUCCESS, 'A total of ' + message(reactivated_count, 'finding', 'reactivated') + ', that are back in scan results.', extra_tags='alert-success') if mitigated_count > 0: messages.add_message(request, messages.SUCCESS, 'A total of ' + message(mitigated_count, 'finding', 'mitigated') + '. Please manually verify each one.', extra_tags='alert-success') create_notification(event='results_added', title='Results added', finding_count=finding_count, test=t, engagement=engagement, url=request.build_absolute_uri(reverse('view_test', args=(t.id,)))) return HttpResponseRedirect(reverse('view_test', args=(t.id,))) except SyntaxError: messages.add_message(request, messages.ERROR, 'There appears to be an error in the XML report, please check and try again.', extra_tags='alert-danger') add_breadcrumb(parent=t, title="Re-upload a %s" % scan_type, top_level=False, request=request) return render(request, 'dojo/import_scan_results.html', {'form': form, 'eid': engagement.id, 'additional_message': additional_message, })
def test_anchore_one_finding(self): testfile = open("dojo/unittests/scans/anchore/one_vuln.json") parser = import_parser_factory(testfile, Test(), False, False, 'Anchore Engine Scan') findings = parser.get_findings(testfile, Test()) testfile.close() self.assertEqual(1, len(findings))