def import_scan_results(request, eid): engagement = get_object_or_404(Engagement, id=eid) finding_count = 0 form = ImportScanForm() cred_form = CredMappingForm() cred_form.fields["cred_user"].queryset = Cred_Mapping.objects.filter( engagement=engagement).order_by('cred_id') if request.method == "POST": form = ImportScanForm(request.POST, request.FILES) cred_form = CredMappingForm(request.POST) cred_form.fields["cred_user"].queryset = Cred_Mapping.objects.filter( engagement=engagement).order_by('cred_id') if form.is_valid(): file = request.FILES['file'] scan_date = form.cleaned_data['scan_date'] min_sev = form.cleaned_data['minimum_severity'] active = form.cleaned_data['active'] verified = form.cleaned_data['verified'] scan_type = request.POST['scan_type'] if not any(scan_type in code for code in ImportScanForm.SCAN_TYPE_CHOICES): raise Http404() tt, t_created = Test_Type.objects.get_or_create(name=scan_type) # will save in development environment environment, env_created = Development_Environment.objects.get_or_create( name="Development") t = Test(engagement=engagement, test_type=tt, target_start=scan_date, target_end=scan_date, environment=environment, percent_complete=100) t.lead = request.user t.full_clean() t.save() tags = request.POST.getlist('tags') ts = ", ".join(tags) t.tags = ts #Save the credential to the test if cred_form.is_valid(): if cred_form.cleaned_data['cred_user']: #Select the credential mapping object from the selected list and only allow if the credential is associated with the product cred_user = Cred_Mapping.objects.filter( pk=cred_form.cleaned_data['cred_user'].id, engagement=eid).first() new_f = cred_form.save(commit=False) new_f.test = t new_f.cred_id = cred_user.cred_id new_f.save() try: parser = import_parser_factory(file, t) except ValueError: raise Http404() try: for item in parser.items: sev = item.severity if sev == 'Information' or sev == 'Informational': sev = 'Info' item.severity = sev if Finding.SEVERITIES[sev] > Finding.SEVERITIES[min_sev]: continue item.test = t item.date = t.target_start item.reporter = request.user item.last_reviewed = datetime.now(tz=localtz) item.last_reviewed_by = request.user item.active = active item.verified = verified item.save() if hasattr(item, 'unsaved_req_resp') and len( item.unsaved_req_resp) > 0: for req_resp in item.unsaved_req_resp: burp_rr = BurpRawRequestResponse( finding=item, burpRequestBase64=req_resp["req"], burpResponseBase64=req_resp["resp"], ) burp_rr.clean() burp_rr.save() if item.unsaved_request is not None and item.unsaved_response is not None: burp_rr = BurpRawRequestResponse( finding=item, burpRequestBase64=item.unsaved_request, burpResponseBase64=item.unsaved_response, ) burp_rr.clean() burp_rr.save() for endpoint in item.unsaved_endpoints: ep, created = Endpoint.objects.get_or_create( protocol=endpoint.protocol, host=endpoint.host, path=endpoint.path, query=endpoint.query, fragment=endpoint.fragment, product=t.engagement.product) item.endpoints.add(ep) if item.unsaved_tags is not None: item.tags = item.unsaved_tags finding_count += 1 messages.add_message( request, messages.SUCCESS, scan_type + ' processed, a total of ' + message(finding_count, 'finding', 'processed'), extra_tags='alert-success') return HttpResponseRedirect(reverse('view_test', args=(t.id, ))) except SyntaxError: messages.add_message( request, messages.ERROR, 'There appears to be an error in the XML report, please check and try again.', extra_tags='alert-danger') add_breadcrumb(parent=engagement, title="Import Scan Results", top_level=False, request=request) return render(request, 'dojo/import_scan_results.html', { 'form': form, 'eid': engagement.id, 'cred_form': cred_form, })
def re_import_scan_results(request, tid): additional_message = "When re-uploading a scan, any findings not found in original scan will be updated as " \ "mitigated. The process attempts to identify the differences, however manual verification " \ "is highly recommended." t = get_object_or_404(Test, id=tid) scan_type = t.test_type.name engagement = t.engagement form = ReImportScanForm() form.initial['tags'] = [tag.name for tag in t.tags] if request.method == "POST": form = ReImportScanForm(request.POST, request.FILES) if form.is_valid(): scan_date = form.cleaned_data['scan_date'] min_sev = form.cleaned_data['minimum_severity'] file = request.FILES['file'] scan_type = t.test_type.name active = form.cleaned_data['active'] verified = form.cleaned_data['verified'] tags = request.POST.getlist('tags') ts = ", ".join(tags) t.tags = ts try: parser = import_parser_factory(file, t, active, verified) except ValueError: raise Http404() try: items = parser.items original_items = t.finding_set.all().values_list("id", flat=True) new_items = [] mitigated_count = 0 finding_count = 0 finding_added_count = 0 reactivated_count = 0 for item in items: sev = item.severity if sev == 'Information' or sev == 'Informational': sev = 'Info' item.severity = sev if Finding.SEVERITIES[sev] > Finding.SEVERITIES[min_sev]: continue if scan_type == 'Veracode Scan' or scan_type == 'Arachni Scan': find = Finding.objects.filter(title=item.title, test__id=t.id, severity=sev, numerical_severity=Finding.get_numerical_severity(sev), description=item.description ) else: find = Finding.objects.filter(title=item.title, test__id=t.id, severity=sev, numerical_severity=Finding.get_numerical_severity(sev), ) if len(find) == 1: find = find[0] if find.mitigated: # it was once fixed, but now back find.mitigated = None find.mitigated_by = None find.active = True find.verified = verified find.save() note = Notes(entry="Re-activated by %s re-upload." % scan_type, author=request.user) note.save() find.notes.add(note) reactivated_count += 1 new_items.append(find.id) else: item.test = t item.date = scan_date item.reporter = request.user item.last_reviewed = timezone.now() item.last_reviewed_by = request.user item.verified = verified item.active = active item.save(dedupe_option=False) finding_added_count += 1 new_items.append(item.id) find = item if hasattr(item, 'unsaved_req_resp') and len(item.unsaved_req_resp) > 0: for req_resp in item.unsaved_req_resp: if scan_type == "Arachni Scan": burp_rr = BurpRawRequestResponse( finding=item, burpRequestBase64=req_resp["req"], burpResponseBase64=req_resp["resp"], ) else: burp_rr = BurpRawRequestResponse( finding=item, burpRequestBase64=req_resp["req"].encode("utf-8"), burpResponseBase64=req_resp["resp"].encode("utf-8"), ) burp_rr.clean() burp_rr.save() if item.unsaved_request is not None and item.unsaved_response is not None: burp_rr = BurpRawRequestResponse(finding=find, burpRequestBase64=item.unsaved_request.encode("utf-8"), burpResponseBase64=item.unsaved_response.encode("utf-8"), ) burp_rr.clean() burp_rr.save() if find: finding_count += 1 for endpoint in item.unsaved_endpoints: ep, created = Endpoint.objects.get_or_create(protocol=endpoint.protocol, host=endpoint.host, path=endpoint.path, query=endpoint.query, fragment=endpoint.fragment, product=t.engagement.product) find.endpoints.add(ep) if item.unsaved_tags is not None: find.tags = item.unsaved_tags find.save() # calculate the difference to_mitigate = set(original_items) - set(new_items) for finding_id in to_mitigate: finding = Finding.objects.get(id=finding_id) finding.mitigated = datetime.combine(scan_date, timezone.now().time()) finding.mitigated_by = request.user finding.active = False finding.save() note = Notes(entry="Mitigated by %s re-upload." % scan_type, author=request.user) note.save() finding.notes.add(note) mitigated_count += 1 messages.add_message(request, messages.SUCCESS, '%s processed, a total of ' % scan_type + message(finding_count, 'finding', 'processed'), extra_tags='alert-success') if finding_added_count > 0: messages.add_message(request, messages.SUCCESS, 'A total of ' + message(finding_added_count, 'finding', 'added') + ', that are new to scan.', extra_tags='alert-success') if reactivated_count > 0: messages.add_message(request, messages.SUCCESS, 'A total of ' + message(reactivated_count, 'finding', 'reactivated') + ', that are back in scan results.', extra_tags='alert-success') if mitigated_count > 0: messages.add_message(request, messages.SUCCESS, 'A total of ' + message(mitigated_count, 'finding', 'mitigated') + '. Please manually verify each one.', extra_tags='alert-success') create_notification(event='results_added', title=str(finding_count) + " findings for " + engagement.product.name, finding_count=finding_count, test=t, engagement=engagement, url=reverse('view_test', args=(t.id,))) return HttpResponseRedirect(reverse('view_test', args=(t.id,))) except SyntaxError: messages.add_message(request, messages.ERROR, 'There appears to be an error in the XML report, please check and try again.', extra_tags='alert-danger') product_tab = Product_Tab(engagement.product.id, title="Re-upload a %s" % scan_type, tab="engagements") product_tab.setEngagement(engagement) return render(request, 'dojo/import_scan_results.html', {'form': form, 'product_tab': product_tab, 'eid': engagement.id, 'additional_message': additional_message, })
def re_import_scan_results(request, tid): additional_message = "When re-uploading a scan, any findings not found in original scan will be updated as " \ "mitigated. The process attempts to identify the differences, however manual verification " \ "is highly recommended." test = get_object_or_404(Test, id=tid) scan_type = test.test_type.name engagement = test.engagement form = ReImportScanForm() jform = None push_all_jira_issues = False # Decide if we need to present the Push to JIRA form if get_system_setting( 'enable_jira') and engagement.product.jira_pkey_set.first( ) is not None: push_all_jira_issues = engagement.product.jira_pkey_set.first( ).push_all_issues jform = JIRAImportScanForm(push_all=push_all_jira_issues, prefix='jiraform') form.initial['tags'] = [tag.name for tag in test.tags] if request.method == "POST": form = ReImportScanForm(request.POST, request.FILES) if form.is_valid(): scan_date = form.cleaned_data['scan_date'] scan_date_time = datetime.combine(scan_date, timezone.now().time()) if settings.USE_TZ: scan_date_time = timezone.make_aware( scan_date_time, timezone.get_default_timezone()) min_sev = form.cleaned_data['minimum_severity'] file = request.FILES.get('file', None) scan_type = test.test_type.name active = form.cleaned_data['active'] verified = form.cleaned_data['verified'] tags = request.POST.getlist('tags') ts = ", ".join(tags) test.tags = ts if file and is_scan_file_too_large(file): messages.add_message( request, messages.ERROR, "Report file is too large. Maximum supported size is {} MB" .format(settings.SCAN_FILE_MAX_SIZE), extra_tags='alert-danger') return HttpResponseRedirect( reverse('re_import_scan_results', args=(test.id, ))) try: parser = import_parser_factory(file, test, active, verified) except ValueError: raise Http404() except Exception as e: messages.add_message( request, messages.ERROR, "An error has occurred in the parser, please see error " "log for details.", extra_tags='alert-danger') parse_logger.exception(e) parse_logger.error("Error in parser: {}".format(str(e))) return HttpResponseRedirect( reverse('re_import_scan_results', args=(test.id, ))) try: items = parser.items original_items = test.finding_set.all().values_list("id", flat=True) new_items = [] mitigated_count = 0 finding_count = 0 finding_added_count = 0 reactivated_count = 0 # Push to Jira? push_to_jira = False if push_all_jira_issues: push_to_jira = True elif 'jiraform-push_to_jira' in request.POST: jform = JIRAImportScanForm(request.POST, prefix='jiraform', push_all=push_all_jira_issues) if jform.is_valid(): push_to_jira = jform.cleaned_data.get('push_to_jira') for item in items: sev = item.severity if sev == 'Information' or sev == 'Informational': sev = 'Info' item.severity = sev # existing findings may be from before we had component_name/version fields component_name = item.component_name if hasattr( item, 'component_name') else None component_version = item.component_version if hasattr( item, 'component_version') else None # If it doesn't clear minimum severity, move on if Finding.SEVERITIES[sev] > Finding.SEVERITIES[min_sev]: continue # Try to find the existing finding # If it's Veracode or Arachni, then we consider the description for some # reason... from titlecase import titlecase item.title = titlecase(item.title) if scan_type == 'Veracode Scan' or scan_type == 'Arachni Scan': finding = Finding.objects.filter( title=item.title, test__id=test.id, severity=sev, numerical_severity=Finding.get_numerical_severity( sev), description=item.description) else: finding = Finding.objects.filter( title=item.title, test__id=test.id, severity=sev, numerical_severity=Finding.get_numerical_severity( sev)) if len(finding) == 1: finding = finding[0] if finding.mitigated or finding.is_Mitigated: # it was once fixed, but now back finding.mitigated = None finding.is_Mitigated = False finding.mitigated_by = None finding.active = True finding.verified = verified # existing findings may be from before we had component_name/version fields finding.component_name = finding.component_name if finding.component_name else component_name finding.component_version = finding.component_version if finding.component_version else component_version finding.save() note = Notes( entry="Re-activated by %s re-upload." % scan_type, author=request.user) note.save() finding.notes.add(note) endpoint_status = finding.endpoint_status.all() for status in endpoint_status: status.mitigated_by = None status.mitigated_time = None status.mitigated = False status.last_modified = timezone.now() status.save() reactivated_count += 1 else: # existing findings may be from before we had component_name/version fields if not finding.component_name or not finding.component_version: finding.component_name = finding.component_name if finding.component_name else component_name finding.component_version = finding.component_version if finding.component_version else component_version finding.save(dedupe_option=False, push_to_jira=False) new_items.append(finding.id) else: item.test = test if item.date == timezone.now().date(): item.date = test.target_start.date() item.reporter = request.user item.last_reviewed = timezone.now() item.last_reviewed_by = request.user item.verified = verified item.active = active # Save it item.save(dedupe_option=False) finding_added_count += 1 # Add it to the new items new_items.append(item.id) finding = item if hasattr(item, 'unsaved_req_resp') and len( item.unsaved_req_resp) > 0: for req_resp in item.unsaved_req_resp: if scan_type == "Arachni Scan": burp_rr = BurpRawRequestResponse( finding=item, burpRequestBase64=req_resp["req"], burpResponseBase64=req_resp["resp"], ) else: burp_rr = BurpRawRequestResponse( finding=item, burpRequestBase64=base64.b64encode( req_resp["req"].encode("utf-8")), burpResponseBase64=base64.b64encode( req_resp["resp"].encode("utf-8")), ) burp_rr.clean() burp_rr.save() if item.unsaved_request is not None and item.unsaved_response is not None: burp_rr = BurpRawRequestResponse( finding=finding, burpRequestBase64=base64.b64encode( item.unsaved_request.encode()), burpResponseBase64=base64.b64encode( item.unsaved_response.encode()), ) burp_rr.clean() burp_rr.save() if finding: finding_count += 1 for endpoint in item.unsaved_endpoints: ep, created = Endpoint.objects.get_or_create( protocol=endpoint.protocol, host=endpoint.host, path=endpoint.path, query=endpoint.query, fragment=endpoint.fragment, product=test.engagement.product) eps, created = Endpoint_Status.objects.get_or_create( finding=finding, endpoint=ep) ep.endpoint_status.add(eps) finding.endpoints.add(ep) finding.endpoint_status.add(eps) for endpoint in form.cleaned_data['endpoints']: ep, created = Endpoint.objects.get_or_create( protocol=endpoint.protocol, host=endpoint.host, path=endpoint.path, query=endpoint.query, fragment=endpoint.fragment, product=test.engagement.product) eps, created = Endpoint_Status.objects.get_or_create( finding=finding, endpoint=ep) ep.endpoint_status.add(eps) finding.endpoints.add(ep) finding.endpoint_status.add(eps) if item.unsaved_tags is not None: finding.tags = item.unsaved_tags # Save it. This may be the second time we save it in this function. finding.save(push_to_jira=push_to_jira) # calculate the difference to_mitigate = set(original_items) - set(new_items) for finding_id in to_mitigate: finding = Finding.objects.get(id=finding_id) if not finding.mitigated or not finding.is_Mitigated: finding.mitigated = scan_date_time finding.is_Mitigated = True finding.mitigated_by = request.user finding.active = False finding.save() note = Notes(entry="Mitigated by %s re-upload." % scan_type, author=request.user) note.save() finding.notes.add(note) mitigated_count += 1 endpoint_status = finding.endpoint_status.all() for status in endpoint_status: status.mitigated_by = request.user status.mitigated_time = timezone.now() status.mitigated = True status.last_modified = timezone.now() status.save() test.updated = max_safe([scan_date_time, test.updated]) test.engagement.updated = max_safe( [scan_date_time, test.engagement.updated]) test.save() test.engagement.save() messages.add_message( request, messages.SUCCESS, '%s processed, a total of ' % scan_type + message(finding_count, 'finding', 'processed'), extra_tags='alert-success') if finding_added_count > 0: messages.add_message( request, messages.SUCCESS, 'A total of ' + message(finding_added_count, 'finding', 'added') + ', that are new to scan.', extra_tags='alert-success') if reactivated_count > 0: messages.add_message( request, messages.SUCCESS, 'A total of ' + message(reactivated_count, 'finding', 'reactivated') + ', that are back in scan results.', extra_tags='alert-success') if mitigated_count > 0: messages.add_message( request, messages.SUCCESS, 'A total of ' + message(mitigated_count, 'finding', 'mitigated') + '. Please manually verify each one.', extra_tags='alert-success') create_notification(event='scan_added', title=str(finding_count) + " findings for " + test.engagement.product.name, finding_count=finding_count, test=test, engagement=test.engagement, url=reverse('view_test', args=(test.id, ))) return HttpResponseRedirect( reverse('view_test', args=(test.id, ))) except SyntaxError: messages.add_message( request, messages.ERROR, 'There appears to be an error in the XML report, please check and try again.', extra_tags='alert-danger') product_tab = Product_Tab(engagement.product.id, title="Re-upload a %s" % scan_type, tab="engagements") product_tab.setEngagement(engagement) form.fields['endpoints'].queryset = Endpoint.objects.filter( product__id=product_tab.product.id) return render( request, 'dojo/import_scan_results.html', { 'form': form, 'product_tab': product_tab, 'eid': engagement.id, 'additional_message': additional_message, 'jform': jform, })
def re_import_scan_results(request, tid): additional_message = "When re-uploading a scan, any findings not found in original scan will be updated as " \ "mitigated. The process attempts to identify the differences, however manual verification " \ "is highly recommended." t = get_object_or_404(Test, id=tid) scan_type = t.test_type.name engagement = t.engagement form = ReImportScanForm() form.initial['tags'] = [tag.name for tag in t.tags] if request.method == "POST": form = ReImportScanForm(request.POST, request.FILES) if form.is_valid(): scan_date = form.cleaned_data['scan_date'] min_sev = form.cleaned_data['minimum_severity'] file = request.FILES['file'] scan_type = t.test_type.name active = form.cleaned_data['active'] verified = form.cleaned_data['verified'] tags = request.POST.getlist('tags') ts = ", ".join(tags) t.tags = ts try: parser = import_parser_factory(file, t) except ValueError: raise Http404() try: items = parser.items original_items = t.finding_set.all().values_list("id", flat=True) new_items = [] mitigated_count = 0 finding_count = 0 finding_added_count = 0 reactivated_count = 0 for item in items: sev = item.severity if sev == 'Information' or sev == 'Informational': sev = 'Info' if Finding.SEVERITIES[sev] > Finding.SEVERITIES[min_sev]: continue if scan_type == 'Veracode Scan' or scan_type == 'Arachni Scan': find = Finding.objects.filter(title=item.title, test__id=t.id, severity=sev, numerical_severity=Finding.get_numerical_severity(sev), description=item.description ) else: find = Finding.objects.filter(title=item.title, test__id=t.id, severity=sev, numerical_severity=Finding.get_numerical_severity(sev), ) if len(find) == 1: find = find[0] if find.mitigated: # it was once fixed, but now back find.mitigated = None find.mitigated_by = None find.active = True find.verified = verified find.save() note = Notes(entry="Re-activated by %s re-upload." % scan_type, author=request.user) note.save() find.notes.add(note) reactivated_count += 1 new_items.append(find.id) else: item.test = t item.date = t.target_start item.reporter = request.user item.last_reviewed = timezone.now() item.last_reviewed_by = request.user item.verified = verified item.active = active item.save() finding_added_count += 1 new_items.append(item.id) find = item if hasattr(item, 'unsaved_req_resp') and len(item.unsaved_req_resp) > 0: for req_resp in item.unsaved_req_resp: burp_rr = BurpRawRequestResponse(finding=find, burpRequestBase64=req_resp["req"], burpResponseBase64=req_resp["resp"], ) burp_rr.clean() burp_rr.save() if item.unsaved_request is not None and item.unsaved_response is not None: burp_rr = BurpRawRequestResponse(finding=find, burpRequestBase64=item.unsaved_request, burpResponseBase64=item.unsaved_response, ) burp_rr.clean() burp_rr.save() if find: finding_count += 1 for endpoint in item.unsaved_endpoints: ep, created = Endpoint.objects.get_or_create(protocol=endpoint.protocol, host=endpoint.host, path=endpoint.path, query=endpoint.query, fragment=endpoint.fragment, product=t.engagement.product) find.endpoints.add(ep) if item.unsaved_tags is not None: find.tags = item.unsaved_tags # calculate the difference to_mitigate = set(original_items) - set(new_items) for finding_id in to_mitigate: finding = Finding.objects.get(id=finding_id) finding.mitigated = datetime.combine(scan_date, timezone.now().time()) finding.mitigated_by = request.user finding.active = False finding.save() note = Notes(entry="Mitigated by %s re-upload." % scan_type, author=request.user) note.save() finding.notes.add(note) mitigated_count += 1 messages.add_message(request, messages.SUCCESS, '%s processed, a total of ' % scan_type + message(finding_count, 'finding', 'processed'), extra_tags='alert-success') if finding_added_count > 0: messages.add_message(request, messages.SUCCESS, 'A total of ' + message(finding_added_count, 'finding', 'added') + ', that are new to scan.', extra_tags='alert-success') if reactivated_count > 0: messages.add_message(request, messages.SUCCESS, 'A total of ' + message(reactivated_count, 'finding', 'reactivated') + ', that are back in scan results.', extra_tags='alert-success') if mitigated_count > 0: messages.add_message(request, messages.SUCCESS, 'A total of ' + message(mitigated_count, 'finding', 'mitigated') + '. Please manually verify each one.', extra_tags='alert-success') create_notification(event='results_added', title='Results added', finding_count=finding_count, test=t, engagement=engagement, url=request.build_absolute_uri(reverse('view_test', args=(t.id,)))) return HttpResponseRedirect(reverse('view_test', args=(t.id,))) except SyntaxError: messages.add_message(request, messages.ERROR, 'There appears to be an error in the XML report, please check and try again.', extra_tags='alert-danger') add_breadcrumb(parent=t, title="Re-upload a %s" % scan_type, top_level=False, request=request) return render(request, 'dojo/import_scan_results.html', {'form': form, 'eid': engagement.id, 'additional_message': additional_message, })
def import_scan_results(request, eid=None, pid=None): engagement = None form = ImportScanForm() cred_form = CredMappingForm() finding_count = 0 enabled = False jform = None if eid: engagement = get_object_or_404(Engagement, id=eid) cred_form.fields["cred_user"].queryset = Cred_Mapping.objects.filter( engagement=engagement).order_by('cred_id') if get_system_setting( 'enable_jira') and engagement.product.jira_pkey_set.first( ) is not None: enabled = engagement.product.jira_pkey_set.first().push_all_issues jform = JIRAFindingForm(enabled=enabled, prefix='jiraform') elif pid: product = get_object_or_404(Product, id=pid) if get_system_setting( 'enable_jira') and product.jira_pkey_set.first() is not None: enabled = product.jira_pkey_set.first().push_all_issues jform = JIRAFindingForm(enabled=enabled, prefix='jiraform') if request.method == "POST": form = ImportScanForm(request.POST, request.FILES) cred_form = CredMappingForm(request.POST) cred_form.fields["cred_user"].queryset = Cred_Mapping.objects.filter( engagement=engagement).order_by('cred_id') if form.is_valid(): # Allows for a test to be imported with an engagement created on the fly if engagement is None: engagement = Engagement() # product = get_object_or_404(Product, id=pid) engagement.name = "AdHoc Import - " + strftime( "%a, %d %b %Y %X", timezone.now().timetuple()) engagement.threat_model = False engagement.api_test = False engagement.pen_test = False engagement.check_list = False engagement.target_start = timezone.now().date() engagement.target_end = timezone.now().date() engagement.product = product engagement.active = True engagement.status = 'In Progress' engagement.save() file = request.FILES.get('file') scan_date = form.cleaned_data['scan_date'] min_sev = form.cleaned_data['minimum_severity'] active = form.cleaned_data['active'] verified = form.cleaned_data['verified'] scan_type = request.POST['scan_type'] if not any(scan_type in code for code in ImportScanForm.SCAN_TYPE_CHOICES): raise Http404() tt, t_created = Test_Type.objects.get_or_create(name=scan_type) # will save in development environment environment, env_created = Development_Environment.objects.get_or_create( name="Development") t = Test(engagement=engagement, test_type=tt, target_start=scan_date, target_end=scan_date, environment=environment, percent_complete=100) t.lead = request.user t.full_clean() t.save() tags = request.POST.getlist('tags') ts = ", ".join(tags) t.tags = ts # Save the credential to the test if cred_form.is_valid(): if cred_form.cleaned_data['cred_user']: # Select the credential mapping object from the selected list and only allow if the credential is associated with the product cred_user = Cred_Mapping.objects.filter( pk=cred_form.cleaned_data['cred_user'].id, engagement=eid).first() new_f = cred_form.save(commit=False) new_f.test = t new_f.cred_id = cred_user.cred_id new_f.save() try: parser = import_parser_factory(file, t, active, verified) except Exception as e: messages.add_message( request, messages.ERROR, "An error has occurred in the parser, please see error " "log for details.", extra_tags='alert-danger') parse_logger.exception(e) parse_logger.error("Error in parser: {}".format(str(e))) return HttpResponseRedirect( reverse('import_scan_results', args=(eid, ))) try: # Push to Jira? push_to_jira = False if enabled: push_to_jira = True elif 'jiraform-push_to_jira' in request.POST: jform = JIRAFindingForm(request.POST, prefix='jiraform', enabled=enabled) if jform.is_valid(): push_to_jira = jform.cleaned_data.get('push_to_jira') for item in parser.items: print("item blowup") print(item) sev = item.severity if sev == 'Information' or sev == 'Informational': sev = 'Info' item.severity = sev if Finding.SEVERITIES[sev] > Finding.SEVERITIES[min_sev]: continue item.test = t if item.date == timezone.now().date(): item.date = t.target_start item.reporter = request.user item.last_reviewed = timezone.now() item.last_reviewed_by = request.user if not handles_active_verified_statuses( form.get_scan_type()): item.active = active item.verified = verified item.save(dedupe_option=False, false_history=True) if hasattr(item, 'unsaved_req_resp') and len( item.unsaved_req_resp) > 0: for req_resp in item.unsaved_req_resp: if form.get_scan_type() == "Arachni Scan": burp_rr = BurpRawRequestResponse( finding=item, burpRequestBase64=req_resp["req"], burpResponseBase64=req_resp["resp"], ) else: burp_rr = BurpRawRequestResponse( finding=item, burpRequestBase64=base64.b64encode( req_resp["req"].encode("utf-8")), burpResponseBase64=base64.b64encode( req_resp["resp"].encode("utf-8")), ) burp_rr.clean() burp_rr.save() if item.unsaved_request is not None and item.unsaved_response is not None: burp_rr = BurpRawRequestResponse( finding=item, burpRequestBase64=base64.b64encode( item.unsaved_request.encode()), burpResponseBase64=base64.b64encode( item.unsaved_response.encode()), ) burp_rr.clean() burp_rr.save() for endpoint in item.unsaved_endpoints: ep, created = Endpoint.objects.get_or_create( protocol=endpoint.protocol, host=endpoint.host, path=endpoint.path, query=endpoint.query, fragment=endpoint.fragment, product=t.engagement.product) item.endpoints.add(ep) for endpoint in form.cleaned_data['endpoints']: ep, created = Endpoint.objects.get_or_create( protocol=endpoint.protocol, host=endpoint.host, path=endpoint.path, query=endpoint.query, fragment=endpoint.fragment, product=t.engagement.product) item.endpoints.add(ep) item.save(false_history=True, push_to_jira=push_to_jira) if item.unsaved_tags is not None: item.tags = item.unsaved_tags finding_count += 1 messages.add_message( request, messages.SUCCESS, scan_type + ' processed, a total of ' + message(finding_count, 'finding', 'processed'), extra_tags='alert-success') create_notification(event='results_added', initiator=request.user, title=str(finding_count) + " findings for " + engagement.product.name, finding_count=finding_count, test=t, engagement=engagement, url=reverse('view_test', args=(t.id, ))) return HttpResponseRedirect(reverse('view_test', args=(t.id, ))) except SyntaxError: messages.add_message( request, messages.ERROR, 'There appears to be an error in the XML report, please check and try again.', extra_tags='alert-danger') prod_id = None custom_breadcrumb = None title = "Import Scan Results" if engagement: prod_id = engagement.product.id product_tab = Product_Tab(prod_id, title=title, tab="engagements") product_tab.setEngagement(engagement) else: prod_id = pid custom_breadcrumb = {"", ""} product_tab = Product_Tab(prod_id, title=title, tab="findings") form.fields['endpoints'].queryset = Endpoint.objects.filter( product__id=product_tab.product.id) return render( request, 'dojo/import_scan_results.html', { 'form': form, 'product_tab': product_tab, 'custom_breadcrumb': custom_breadcrumb, 'title': title, 'cred_form': cred_form, 'jform': jform })
def import_scan_results(request, eid=None, pid=None): engagement = None form = ImportScanForm() cred_form = CredMappingForm() finding_count = 0 if eid: engagement = get_object_or_404(Engagement, id=eid) cred_form.fields["cred_user"].queryset = Cred_Mapping.objects.filter( engagement=engagement).order_by('cred_id') if request.method == "POST": form = ImportScanForm(request.POST, request.FILES) cred_form = CredMappingForm(request.POST) cred_form.fields["cred_user"].queryset = Cred_Mapping.objects.filter( engagement=engagement).order_by('cred_id') if form.is_valid(): # Allows for a test to be imported with an engagement created on the fly if engagement is None: engagement = Engagement() product = get_object_or_404(Product, id=pid) engagement.name = "AdHoc Import - " + strftime( "%a, %d %b %Y %X", timezone.now().timetuple()) engagement.threat_model = False engagement.api_test = False engagement.pen_test = False engagement.check_list = False engagement.target_start = timezone.now().date() engagement.target_end = timezone.now().date() engagement.product = product engagement.active = True engagement.status = 'In Progress' engagement.save() file = request.FILES['file'] scan_date = form.cleaned_data['scan_date'] min_sev = form.cleaned_data['minimum_severity'] active = form.cleaned_data['active'] verified = form.cleaned_data['verified'] scan_type = request.POST['scan_type'] if not any(scan_type in code for code in ImportScanForm.SCAN_TYPE_CHOICES): raise Http404() tt, t_created = Test_Type.objects.get_or_create(name=scan_type) # will save in development environment environment, env_created = Development_Environment.objects.get_or_create( name="Development") t = Test(engagement=engagement, test_type=tt, target_start=scan_date, target_end=scan_date, environment=environment, percent_complete=100) t.lead = request.user t.full_clean() t.save() tags = request.POST.getlist('tags') ts = ", ".join(tags) t.tags = ts # Save the credential to the test if cred_form.is_valid(): if cred_form.cleaned_data['cred_user']: # Select the credential mapping object from the selected list and only allow if the credential is associated with the product cred_user = Cred_Mapping.objects.filter( pk=cred_form.cleaned_data['cred_user'].id, engagement=eid).first() new_f = cred_form.save(commit=False) new_f.test = t new_f.cred_id = cred_user.cred_id new_f.save() try: parser = import_parser_factory(file, t) except ValueError: raise Http404() try: for item in parser.items: sev = item.severity if sev == 'Information' or sev == 'Informational': sev = 'Info' item.severity = sev if Finding.SEVERITIES[sev] > Finding.SEVERITIES[min_sev]: continue item.test = t if item.date == timezone.now().date(): item.date = t.target_start item.reporter = request.user item.last_reviewed = timezone.now() item.last_reviewed_by = request.user item.active = active item.verified = verified item.save(dedupe_option=False) if hasattr(item, 'unsaved_req_resp') and len( item.unsaved_req_resp) > 0: for req_resp in item.unsaved_req_resp: burp_rr = BurpRawRequestResponse( finding=item, burpRequestBase64=req_resp["req"], burpResponseBase64=req_resp["resp"], ) burp_rr.clean() burp_rr.save() if item.unsaved_request is not None and item.unsaved_response is not None: burp_rr = BurpRawRequestResponse( finding=item, burpRequestBase64=item.unsaved_request, burpResponseBase64=item.unsaved_response, ) burp_rr.clean() burp_rr.save() for endpoint in item.unsaved_endpoints: ep, created = Endpoint.objects.get_or_create( protocol=endpoint.protocol, host=endpoint.host, path=endpoint.path, query=endpoint.query, fragment=endpoint.fragment, product=t.engagement.product) item.endpoints.add(ep) item.save() if item.unsaved_tags is not None: item.tags = item.unsaved_tags # patched: I have to add the jira creation in line below add_issue(item, True) finding_count += 1 messages.add_message( request, messages.SUCCESS, scan_type + ' processed, a total of ' + message(finding_count, 'finding', 'processed'), extra_tags='alert-success') create_notification(event='results_added', title=str(finding_count) + " findings for " + engagement.product.name, finding_count=finding_count, test=t, engagement=engagement, url=request.build_absolute_uri( reverse('view_test', args=(t.id, )))) return HttpResponseRedirect(reverse('view_test', args=(t.id, ))) except SyntaxError: messages.add_message( request, messages.ERROR, 'There appears to be an error in the XML report, please check and try again.', extra_tags='alert-danger') prod_id = None custom_breadcrumb = None title = "Import Scan Results" if engagement: prod_id = engagement.product.id product_tab = Product_Tab(prod_id, title=title, tab="engagements") product_tab.setEngagement(engagement) else: prod_id = pid custom_breadcrumb = {"", ""} product_tab = Product_Tab(prod_id, title=title, tab="findings") return render( request, 'dojo/import_scan_results.html', { 'form': form, 'product_tab': product_tab, 'custom_breadcrumb': custom_breadcrumb, 'title': title, 'cred_form': cred_form, })
def import_scan_results(request, eid): engagement = get_object_or_404(Engagement, id=eid) finding_count = 0 form = ImportScanForm() if request.method == "POST": form = ImportScanForm(request.POST, request.FILES) if form.is_valid(): file = request.FILES['file'] scan_date = form.cleaned_data['scan_date'] min_sev = form.cleaned_data['minimum_severity'] scan_type = request.POST['scan_type'] if not any(scan_type in code for code in ImportScanForm.SCAN_TYPE_CHOICES): raise Http404() tt, t_created = Test_Type.objects.get_or_create(name=scan_type) # will save in development environment environment, env_created = Development_Environment.objects.get_or_create(name="Development") t = Test(engagement=engagement, test_type=tt) t.full_clean() t.save() tags = request.POST.getlist('tags') ts = ", ".join(tags) t.tags = ts try: parser = import_parser_factory(file, t) except ValueError: raise Http404() try: for item in parser.items: sev = item.severity if sev == 'Information' or sev == 'Informational': sev = 'Info' item.severity = sev if Finding.SEVERITIES[sev] > Finding.SEVERITIES[min_sev]: continue item.test = t item.date = timezone.now() item.reporter = request.user item.last_reviewed = timezone.now() item.last_reviewed_by = request.user item.save() if hasattr(item, 'unsaved_req_resp') and len(item.unsaved_req_resp) > 0: for req_resp in item.unsaved_req_resp: burp_rr = BurpRawRequestResponse(finding=item, burpRequestBase64=req_resp["req"], burpResponseBase64=req_resp["resp"], ) burp_rr.clean() burp_rr.save() if item.unsaved_request is not None and item.unsaved_response is not None: burp_rr = BurpRawRequestResponse(finding=item, burpRequestBase64=item.unsaved_request, burpResponseBase64=item.unsaved_response, ) burp_rr.clean() burp_rr.save() for endpoint in item.unsaved_endpoints: ep, created = Endpoint.objects.get_or_create(protocol=endpoint.protocol, host=endpoint.host, fqdn=endpoint.fqdn, port=endpoint.port, path=endpoint.path, query=endpoint.query, fragment=endpoint.fragment, product=t.engagement.product) item.endpoints.add(ep) if item.unsaved_tags is not None: item.tags = item.unsaved_tags finding_count += 1 messages.add_message(request, messages.SUCCESS, scan_type + ' processed, a total of ' + message(finding_count, 'finding', 'processed'), extra_tags='alert-success') create_notification(event='results_added', title='Results added', finding_count=finding_count, test=t, engagement=engagement, url=request.build_absolute_uri(reverse('view_test', args=(t.id,)))) return HttpResponseRedirect(reverse('view_test', args=(t.id,))) except SyntaxError: messages.add_message(request, messages.ERROR, 'There appears to be an error in the XML report, please check and try again.', extra_tags='alert-danger') add_breadcrumb(parent=engagement, title="Import Scan Results", top_level=False, request=request) return render(request, 'dojo/import_scan_results.html', {'form': form, 'eid': engagement.id, })
def import_scan_results(request, eid): engagement = get_object_or_404(Engagement, id=eid) finding_count = 0 form = ImportScanForm() if request.method == "POST": form = ImportScanForm(request.POST, request.FILES) if form.is_valid(): file = request.FILES['file'] scan_date = form.cleaned_data['scan_date'] min_sev = form.cleaned_data['minimum_severity'] scan_type = request.POST['scan_type'] if not any(scan_type in code for code in ImportScanForm.SCAN_TYPE_CHOICES): raise Http404() tt, t_created = Test_Type.objects.get_or_create(name=scan_type) # will save in development environment environment, env_created = Development_Environment.objects.get_or_create(name="Development") t = Test(engagement=engagement, test_type=tt, target_start=scan_date, target_end=scan_date, environment=environment, percent_complete=100) t.full_clean() t.save() try: parser = import_parser_factory(file, t) except ValueError: raise Http404() try: for item in parser.items: sev = item.severity if sev == 'Information' or sev == 'Informational': sev = 'Info' item.severity = sev if Finding.SEVERITIES[sev] > Finding.SEVERITIES[min_sev]: continue item.test = t item.date = t.target_start item.reporter = request.user item.last_reviewed = datetime.now(tz=localtz) item.last_reviewed_by = request.user item.save() if item.unsaved_request is not None and item.unsaved_response is not None: burp_rr = BurpRawRequestResponse(finding=item, burpRequestBase64=item.unsaved_request, burpResponseBase64=item.unsaved_response, ) burp_rr.clean() burp_rr.save() for endpoint in item.unsaved_endpoints: ep, created = Endpoint.objects.get_or_create(protocol=endpoint.protocol, host=endpoint.host, path=endpoint.path, query=endpoint.query, fragment=endpoint.fragment, product=t.engagement.product) item.endpoints.add(ep) finding_count += 1 messages.add_message(request, messages.SUCCESS, scan_type + ' processed, a total of ' + message(finding_count, 'finding', 'processed'), extra_tags='alert-success') return HttpResponseRedirect(reverse('view_test', args=(t.id,))) except SyntaxError: messages.add_message(request, messages.ERROR, 'There appears to be an error in the XML report, please check and try again.', extra_tags='alert-danger') add_breadcrumb(parent=engagement, title="Import Scan Results", top_level=False, request=request) return render(request, 'dojo/import_scan_results.html', {'form': form, 'eid': engagement.id, })
def import_scan_results(request, eid=None, pid=None): engagement = None form = ImportScanForm() cred_form = CredMappingForm() finding_count = 0 if eid: engagement = get_object_or_404(Engagement, id=eid) cred_form.fields["cred_user"].queryset = Cred_Mapping.objects.filter(engagement=engagement).order_by('cred_id') if request.method == "POST": form = ImportScanForm(request.POST, request.FILES) cred_form = CredMappingForm(request.POST) cred_form.fields["cred_user"].queryset = Cred_Mapping.objects.filter( engagement=engagement).order_by('cred_id') if form.is_valid(): # Allows for a test to be imported with an engagement created on the fly if engagement is None: engagement = Engagement() product = get_object_or_404(Product, id=pid) engagement.name = "AdHoc Import - " + strftime("%a, %d %b %Y %X", timezone.now().timetuple()) engagement.threat_model = False engagement.api_test = False engagement.pen_test = False engagement.check_list = False engagement.target_start = timezone.now().date() engagement.target_end = timezone.now().date() engagement.product = product engagement.active = True engagement.status = 'In Progress' engagement.save() file = request.FILES['file'] scan_date = form.cleaned_data['scan_date'] min_sev = form.cleaned_data['minimum_severity'] active = form.cleaned_data['active'] verified = form.cleaned_data['verified'] scan_type = request.POST['scan_type'] if not any(scan_type in code for code in ImportScanForm.SCAN_TYPE_CHOICES): raise Http404() tt, t_created = Test_Type.objects.get_or_create(name=scan_type) # will save in development environment environment, env_created = Development_Environment.objects.get_or_create( name="Development") t = Test( engagement=engagement, test_type=tt, target_start=scan_date, target_end=scan_date, environment=environment, percent_complete=100) t.lead = request.user t.full_clean() t.save() tags = request.POST.getlist('tags') ts = ", ".join(tags) t.tags = ts # Save the credential to the test if cred_form.is_valid(): if cred_form.cleaned_data['cred_user']: # Select the credential mapping object from the selected list and only allow if the credential is associated with the product cred_user = Cred_Mapping.objects.filter( pk=cred_form.cleaned_data['cred_user'].id, engagement=eid).first() new_f = cred_form.save(commit=False) new_f.test = t new_f.cred_id = cred_user.cred_id new_f.save() parser = import_parser_factory(file, t) try: for item in parser.items: print "item blowup" print item sev = item.severity if sev == 'Information' or sev == 'Informational': sev = 'Info' item.severity = sev if Finding.SEVERITIES[sev] > Finding.SEVERITIES[min_sev]: continue item.test = t if item.date == timezone.now().date(): item.date = t.target_start item.reporter = request.user item.last_reviewed = timezone.now() item.last_reviewed_by = request.user item.active = active item.verified = verified item.save(dedupe_option=False, false_history=True) if hasattr(item, 'unsaved_req_resp') and len( item.unsaved_req_resp) > 0: for req_resp in item.unsaved_req_resp: burp_rr = BurpRawRequestResponse( finding=item, burpRequestBase64=req_resp["req"], burpResponseBase64=req_resp["resp"], ) burp_rr.clean() burp_rr.save() if item.unsaved_request is not None and item.unsaved_response is not None: burp_rr = BurpRawRequestResponse( finding=item, burpRequestBase64=item.unsaved_request, burpResponseBase64=item.unsaved_response, ) burp_rr.clean() burp_rr.save() for endpoint in item.unsaved_endpoints: ep, created = Endpoint.objects.get_or_create( protocol=endpoint.protocol, host=endpoint.host, path=endpoint.path, query=endpoint.query, fragment=endpoint.fragment, product=t.engagement.product) item.endpoints.add(ep) item.save(false_history=True) if item.unsaved_tags is not None: item.tags = item.unsaved_tags finding_count += 1 messages.add_message( request, messages.SUCCESS, scan_type + ' processed, a total of ' + message( finding_count, 'finding', 'processed'), extra_tags='alert-success') create_notification( event='results_added', title=str(finding_count) + " findings for " + engagement.product.name, finding_count=finding_count, test=t, engagement=engagement, url=request.build_absolute_uri( reverse('view_test', args=(t.id, )))) return HttpResponseRedirect( reverse('view_test', args=(t.id, ))) except SyntaxError: messages.add_message( request, messages.ERROR, 'There appears to be an error in the XML report, please check and try again.', extra_tags='alert-danger') prod_id = None custom_breadcrumb = None title = "Import Scan Results" if engagement: prod_id = engagement.product.id product_tab = Product_Tab(prod_id, title=title, tab="engagements") product_tab.setEngagement(engagement) else: prod_id = pid custom_breadcrumb = {"", ""} product_tab = Product_Tab(prod_id, title=title, tab="findings") return render(request, 'dojo/import_scan_results.html', { 'form': form, 'product_tab': product_tab, 'custom_breadcrumb': custom_breadcrumb, 'title': title, 'cred_form': cred_form, })