Exemplo n.º 1
0
def re_import_scan_results(request, tid):
    additional_message = "When re-uploading a scan, any findings not found in original scan will be updated as " \
                         "mitigated.  The process attempts to identify the differences, however manual verification " \
                         "is highly recommended."
    t = get_object_or_404(Test, id=tid)
    scan_type = t.test_type.name
    engagement = t.engagement
    form = ReImportScanForm()

    form.initial['tags'] = [tag.name for tag in t.tags]
    if request.method == "POST":
        form = ReImportScanForm(request.POST, request.FILES)
        if form.is_valid():
            scan_date = form.cleaned_data['scan_date']
            min_sev = form.cleaned_data['minimum_severity']
            file = request.FILES['file']
            scan_type = t.test_type.name
            active = form.cleaned_data['active']
            verified = form.cleaned_data['verified']
            tags = request.POST.getlist('tags')
            ts = ", ".join(tags)
            t.tags = ts
            try:
                parser = import_parser_factory(file, t, active, verified)
            except ValueError:
                raise Http404()

            try:
                items = parser.items
                original_items = t.finding_set.all().values_list("id", flat=True)
                new_items = []
                mitigated_count = 0
                finding_count = 0
                finding_added_count = 0
                reactivated_count = 0
                for item in items:
                    sev = item.severity
                    if sev == 'Information' or sev == 'Informational':
                        sev = 'Info'
                        item.severity = sev

                    if Finding.SEVERITIES[sev] > Finding.SEVERITIES[min_sev]:
                        continue

                    if scan_type == 'Veracode Scan' or scan_type == 'Arachni Scan':
                        find = Finding.objects.filter(title=item.title,
                                                      test__id=t.id,
                                                      severity=sev,
                                                      numerical_severity=Finding.get_numerical_severity(sev),
                                                      description=item.description
                                                      )
                    else:
                        find = Finding.objects.filter(title=item.title,
                                                      test__id=t.id,
                                                      severity=sev,
                                                      numerical_severity=Finding.get_numerical_severity(sev),
                                                      )

                    if len(find) == 1:
                        find = find[0]
                        if find.mitigated:
                            # it was once fixed, but now back
                            find.mitigated = None
                            find.mitigated_by = None
                            find.active = True
                            find.verified = verified
                            find.save()
                            note = Notes(entry="Re-activated by %s re-upload." % scan_type,
                                         author=request.user)
                            note.save()
                            find.notes.add(note)
                            reactivated_count += 1
                        new_items.append(find.id)
                    else:
                        item.test = t
                        item.date = scan_date
                        item.reporter = request.user
                        item.last_reviewed = timezone.now()
                        item.last_reviewed_by = request.user
                        item.verified = verified
                        item.active = active
                        item.save(dedupe_option=False)
                        finding_added_count += 1
                        new_items.append(item.id)
                        find = item

                        if hasattr(item, 'unsaved_req_resp') and len(item.unsaved_req_resp) > 0:
                            for req_resp in item.unsaved_req_resp:
                                if scan_type == "Arachni Scan":
                                    burp_rr = BurpRawRequestResponse(
                                        finding=item,
                                        burpRequestBase64=req_resp["req"],
                                        burpResponseBase64=req_resp["resp"],
                                    )
                                else:
                                    burp_rr = BurpRawRequestResponse(
                                        finding=item,
                                        burpRequestBase64=req_resp["req"].encode("utf-8"),
                                        burpResponseBase64=req_resp["resp"].encode("utf-8"),
                                    )
                                burp_rr.clean()
                                burp_rr.save()

                        if item.unsaved_request is not None and item.unsaved_response is not None:
                            burp_rr = BurpRawRequestResponse(finding=find,
                                                             burpRequestBase64=item.unsaved_request.encode("utf-8"),
                                                             burpResponseBase64=item.unsaved_response.encode("utf-8"),
                                                             )
                            burp_rr.clean()
                            burp_rr.save()
                    if find:
                        finding_count += 1
                        for endpoint in item.unsaved_endpoints:
                            ep, created = Endpoint.objects.get_or_create(protocol=endpoint.protocol,
                                                                         host=endpoint.host,
                                                                         path=endpoint.path,
                                                                         query=endpoint.query,
                                                                         fragment=endpoint.fragment,
                                                                         product=t.engagement.product)
                            find.endpoints.add(ep)

                        if item.unsaved_tags is not None:
                            find.tags = item.unsaved_tags

                    find.save()
                # calculate the difference
                to_mitigate = set(original_items) - set(new_items)
                for finding_id in to_mitigate:
                    finding = Finding.objects.get(id=finding_id)
                    finding.mitigated = datetime.combine(scan_date, timezone.now().time())
                    finding.mitigated_by = request.user
                    finding.active = False
                    finding.save()
                    note = Notes(entry="Mitigated by %s re-upload." % scan_type,
                                 author=request.user)
                    note.save()
                    finding.notes.add(note)
                    mitigated_count += 1
                messages.add_message(request,
                                     messages.SUCCESS,
                                     '%s processed, a total of ' % scan_type + message(finding_count, 'finding',
                                                                                       'processed'),
                                     extra_tags='alert-success')
                if finding_added_count > 0:
                    messages.add_message(request,
                                         messages.SUCCESS,
                                         'A total of ' + message(finding_added_count, 'finding',
                                                                 'added') + ', that are new to scan.',
                                         extra_tags='alert-success')
                if reactivated_count > 0:
                    messages.add_message(request,
                                         messages.SUCCESS,
                                         'A total of ' + message(reactivated_count, 'finding',
                                                                 'reactivated') + ', that are back in scan results.',
                                         extra_tags='alert-success')
                if mitigated_count > 0:
                    messages.add_message(request,
                                         messages.SUCCESS,
                                         'A total of ' + message(mitigated_count, 'finding',
                                                                 'mitigated') + '. Please manually verify each one.',
                                         extra_tags='alert-success')

                create_notification(event='results_added', title=str(finding_count) + " findings for " + engagement.product.name, finding_count=finding_count, test=t, engagement=engagement, url=reverse('view_test', args=(t.id,)))

                return HttpResponseRedirect(reverse('view_test', args=(t.id,)))
            except SyntaxError:
                messages.add_message(request,
                                     messages.ERROR,
                                     'There appears to be an error in the XML report, please check and try again.',
                                     extra_tags='alert-danger')

    product_tab = Product_Tab(engagement.product.id, title="Re-upload a %s" % scan_type, tab="engagements")
    product_tab.setEngagement(engagement)
    return render(request,
                  'dojo/import_scan_results.html',
                  {'form': form,
                   'product_tab': product_tab,
                   'eid': engagement.id,
                   'additional_message': additional_message,
                   })
Exemplo n.º 2
0
def re_import_scan_results(request, tid):
    additional_message = "When re-uploading a scan, any findings not found in original scan will be updated as " \
                         "mitigated.  The process attempts to identify the differences, however manual verification " \
                         "is highly recommended."
    test = get_object_or_404(Test, id=tid)
    # by default we keep a trace of the scan_type used to create the test
    # if it's not here, we use the "name" of the test type
    # this feature exists to provide custom label for tests for some parsers
    if test.scan_type:
        scan_type = test.scan_type
    else:
        scan_type = test.test_type.name
    engagement = test.engagement
    form = ReImportScanForm(test=test)
    jform = None
    jira_project = jira_helper.get_jira_project(test)
    push_all_jira_issues = jira_helper.is_push_all_issues(test)

    # Decide if we need to present the Push to JIRA form
    if get_system_setting('enable_jira') and jira_project:
        jform = JIRAImportScanForm(push_all=push_all_jira_issues,
                                   prefix='jiraform')

    if request.method == "POST":
        form = ReImportScanForm(request.POST, request.FILES, test=test)
        if jira_project:
            jform = JIRAImportScanForm(request.POST,
                                       push_all=push_all_jira_issues,
                                       prefix='jiraform')
        if form.is_valid() and (jform is None or jform.is_valid()):
            scan_date = form.cleaned_data['scan_date']

            minimum_severity = form.cleaned_data['minimum_severity']
            scan = request.FILES.get('file', None)
            active = form.cleaned_data['active']
            verified = form.cleaned_data['verified']
            tags = form.cleaned_data['tags']
            version = form.cleaned_data.get('version', None)
            branch_tag = form.cleaned_data.get('branch_tag', None)
            build_id = form.cleaned_data.get('build_id', None)
            commit_hash = form.cleaned_data.get('commit_hash', None)
            api_scan_configuration = form.cleaned_data.get(
                'api_scan_configuration', None)
            service = form.cleaned_data.get('service', None)

            endpoints_to_add = None  # not available on reimport UI

            close_old_findings = form.cleaned_data.get('close_old_findings',
                                                       True)

            group_by = form.cleaned_data.get('group_by', None)

            # Tags are replaced, same behaviour as with django-tagging
            test.tags = tags
            test.version = version
            if scan and is_scan_file_too_large(scan):
                messages.add_message(
                    request,
                    messages.ERROR,
                    "Report file is too large. Maximum supported size is {} MB"
                    .format(settings.SCAN_FILE_MAX_SIZE),
                    extra_tags='alert-danger')
                return HttpResponseRedirect(
                    reverse('re_import_scan_results', args=(test.id, )))

            push_to_jira = push_all_jira_issues or (
                jform and jform.cleaned_data.get('push_to_jira'))
            error = False
            finding_count, new_finding_count, closed_finding_count, reactivated_finding_count, untouched_finding_count = 0, 0, 0, 0, 0
            reimporter = ReImporter()
            try:
                test, finding_count, new_finding_count, closed_finding_count, reactivated_finding_count, untouched_finding_count, _ = \
                    reimporter.reimport_scan(scan, scan_type, test, active=active, verified=verified,
                                                tags=None, minimum_severity=minimum_severity,
                                                endpoints_to_add=endpoints_to_add, scan_date=scan_date,
                                                version=version, branch_tag=branch_tag, build_id=build_id,
                                                commit_hash=commit_hash, push_to_jira=push_to_jira,
                                                close_old_findings=close_old_findings, group_by=group_by,
                                                api_scan_configuration=api_scan_configuration, service=service)
            except Exception as e:
                logger.exception(e)
                add_error_message_to_response(
                    'An exception error occurred during the report import:%s' %
                    str(e))
                error = True

            if not error:
                message = construct_imported_message(
                    scan_type,
                    finding_count,
                    new_finding_count=new_finding_count,
                    closed_finding_count=closed_finding_count,
                    reactivated_finding_count=reactivated_finding_count,
                    untouched_finding_count=untouched_finding_count)
                add_success_message_to_response(message)

            return HttpResponseRedirect(reverse('view_test', args=(test.id, )))

    product_tab = Product_Tab(engagement.product.id,
                              title="Re-upload a %s" % scan_type,
                              tab="engagements")
    product_tab.setEngagement(engagement)
    form.fields['endpoints'].queryset = Endpoint.objects.filter(
        product__id=product_tab.product.id)
    form.initial['api_scan_configuration'] = test.api_scan_configuration
    form.fields[
        'api_scan_configuration'].queryset = Product_API_Scan_Configuration.objects.filter(
            product__id=product_tab.product.id)
    return render(
        request, 'dojo/import_scan_results.html', {
            'form': form,
            'product_tab': product_tab,
            'eid': engagement.id,
            'additional_message': additional_message,
            'jform': jform,
            'scan_types': get_scan_types_sorted(),
        })
Exemplo n.º 3
0
def re_import_scan_results(request, tid):
    additional_message = "When re-uploading a scan, any findings not found in original scan will be updated as " \
                         "mitigated.  The process attempts to identify the differences, however manual verification " \
                         "is highly recommended."
    t = get_object_or_404(Test, id=tid)
    scan_type = t.test_type.name
    engagement = t.engagement
    form = ReImportScanForm()

    form.initial['tags'] = [tag.name for tag in t.tags]
    if request.method == "POST":
        form = ReImportScanForm(request.POST, request.FILES)
        if form.is_valid():
            scan_date = form.cleaned_data['scan_date']
            min_sev = form.cleaned_data['minimum_severity']
            file = request.FILES['file']
            scan_type = t.test_type.name
            active = form.cleaned_data['active']
            verified = form.cleaned_data['verified']
            tags = request.POST.getlist('tags')
            ts = ", ".join(tags)
            t.tags = ts
            try:
                parser = import_parser_factory(file, t)
            except ValueError:
                raise Http404()

            try:
                items = parser.items
                original_items = t.finding_set.all().values_list("id", flat=True)
                new_items = []
                mitigated_count = 0
                finding_count = 0
                finding_added_count = 0
                reactivated_count = 0
                for item in items:
                    sev = item.severity
                    if sev == 'Information' or sev == 'Informational':
                        sev = 'Info'

                    if Finding.SEVERITIES[sev] > Finding.SEVERITIES[min_sev]:
                        continue

                    if scan_type == 'Veracode Scan' or scan_type == 'Arachni Scan':
                        find = Finding.objects.filter(title=item.title,
                                                      test__id=t.id,
                                                      severity=sev,
                                                      numerical_severity=Finding.get_numerical_severity(sev),
                                                      description=item.description
                                                      )
                    else:
                        find = Finding.objects.filter(title=item.title,
                                                      test__id=t.id,
                                                      severity=sev,
                                                      numerical_severity=Finding.get_numerical_severity(sev),
                                                      )

                    if len(find) == 1:
                        find = find[0]
                        if find.mitigated:
                            # it was once fixed, but now back
                            find.mitigated = None
                            find.mitigated_by = None
                            find.active = True
                            find.verified = verified
                            find.save()
                            note = Notes(entry="Re-activated by %s re-upload." % scan_type,
                                         author=request.user)
                            note.save()
                            find.notes.add(note)
                            reactivated_count += 1
                        new_items.append(find.id)
                    else:
                        item.test = t
                        item.date = t.target_start
                        item.reporter = request.user
                        item.last_reviewed = timezone.now()
                        item.last_reviewed_by = request.user
                        item.verified = verified
                        item.active = active
                        item.save()
                        finding_added_count += 1
                        new_items.append(item.id)
                        find = item

                        if hasattr(item, 'unsaved_req_resp') and len(item.unsaved_req_resp) > 0:
                            for req_resp in item.unsaved_req_resp:
                                burp_rr = BurpRawRequestResponse(finding=find,
                                                                 burpRequestBase64=req_resp["req"],
                                                                 burpResponseBase64=req_resp["resp"],
                                                                 )
                                burp_rr.clean()
                                burp_rr.save()

                        if item.unsaved_request is not None and item.unsaved_response is not None:
                            burp_rr = BurpRawRequestResponse(finding=find,
                                                             burpRequestBase64=item.unsaved_request,
                                                             burpResponseBase64=item.unsaved_response,
                                                             )
                            burp_rr.clean()
                            burp_rr.save()
                    if find:
                        finding_count += 1
                        for endpoint in item.unsaved_endpoints:
                            ep, created = Endpoint.objects.get_or_create(protocol=endpoint.protocol,
                                                                         host=endpoint.host,
                                                                         path=endpoint.path,
                                                                         query=endpoint.query,
                                                                         fragment=endpoint.fragment,
                                                                         product=t.engagement.product)
                            find.endpoints.add(ep)

                        if item.unsaved_tags is not None:
                            find.tags = item.unsaved_tags

                # calculate the difference
                to_mitigate = set(original_items) - set(new_items)
                for finding_id in to_mitigate:
                    finding = Finding.objects.get(id=finding_id)
                    finding.mitigated = datetime.combine(scan_date, timezone.now().time())
                    finding.mitigated_by = request.user
                    finding.active = False
                    finding.save()
                    note = Notes(entry="Mitigated by %s re-upload." % scan_type,
                                 author=request.user)
                    note.save()
                    finding.notes.add(note)
                    mitigated_count += 1
                messages.add_message(request,
                                     messages.SUCCESS,
                                     '%s processed, a total of ' % scan_type + message(finding_count, 'finding',
                                                                                       'processed'),
                                     extra_tags='alert-success')
                if finding_added_count > 0:
                    messages.add_message(request,
                                         messages.SUCCESS,
                                         'A total of ' + message(finding_added_count, 'finding',
                                                                 'added') + ', that are new to scan.',
                                         extra_tags='alert-success')
                if reactivated_count > 0:
                    messages.add_message(request,
                                         messages.SUCCESS,
                                         'A total of ' + message(reactivated_count, 'finding',
                                                                 'reactivated') + ', that are back in scan results.',
                                         extra_tags='alert-success')
                if mitigated_count > 0:
                    messages.add_message(request,
                                         messages.SUCCESS,
                                         'A total of ' + message(mitigated_count, 'finding',
                                                                 'mitigated') + '. Please manually verify each one.',
                                         extra_tags='alert-success')

                create_notification(event='results_added', title='Results added', finding_count=finding_count, test=t, engagement=engagement, url=request.build_absolute_uri(reverse('view_test', args=(t.id,))))

                return HttpResponseRedirect(reverse('view_test', args=(t.id,)))
            except SyntaxError:
                messages.add_message(request,
                                     messages.ERROR,
                                     'There appears to be an error in the XML report, please check and try again.',
                                     extra_tags='alert-danger')

    add_breadcrumb(parent=t, title="Re-upload a %s" % scan_type, top_level=False, request=request)
    return render(request,
                  'dojo/import_scan_results.html',
                  {'form': form,
                   'eid': engagement.id,
                   'additional_message': additional_message,
                   })
Exemplo n.º 4
0
def re_import_scan_results(request, tid):
    additional_message = "When re-uploading a scan, any findings not found in original scan will be updated as " \
                         "mitigated.  The process attempts to identify the differences, however manual verification " \
                         "is highly recommended."
    test = get_object_or_404(Test, id=tid)
    scan_type = test.test_type.name
    engagement = test.engagement
    form = ReImportScanForm()
    jform = None
    push_all_jira_issues = False

    # Decide if we need to present the Push to JIRA form
    if get_system_setting(
            'enable_jira') and engagement.product.jira_pkey_set.first(
            ) is not None:
        push_all_jira_issues = engagement.product.jira_pkey_set.first(
        ).push_all_issues
        jform = JIRAImportScanForm(push_all=push_all_jira_issues,
                                   prefix='jiraform')

    form.initial['tags'] = [tag.name for tag in test.tags]
    if request.method == "POST":
        form = ReImportScanForm(request.POST, request.FILES)
        if form.is_valid():
            scan_date = form.cleaned_data['scan_date']

            scan_date_time = datetime.combine(scan_date, timezone.now().time())
            if settings.USE_TZ:
                scan_date_time = timezone.make_aware(
                    scan_date_time, timezone.get_default_timezone())

            min_sev = form.cleaned_data['minimum_severity']
            file = request.FILES.get('file', None)
            scan_type = test.test_type.name
            active = form.cleaned_data['active']
            verified = form.cleaned_data['verified']
            tags = request.POST.getlist('tags')
            ts = ", ".join(tags)
            test.tags = ts
            if file and is_scan_file_too_large(file):
                messages.add_message(
                    request,
                    messages.ERROR,
                    "Report file is too large. Maximum supported size is {} MB"
                    .format(settings.SCAN_FILE_MAX_SIZE),
                    extra_tags='alert-danger')
                return HttpResponseRedirect(
                    reverse('re_import_scan_results', args=(test.id, )))

            try:
                parser = import_parser_factory(file, test, active, verified)
            except ValueError:
                raise Http404()
            except Exception as e:
                messages.add_message(
                    request,
                    messages.ERROR,
                    "An error has occurred in the parser, please see error "
                    "log for details.",
                    extra_tags='alert-danger')
                parse_logger.exception(e)
                parse_logger.error("Error in parser: {}".format(str(e)))
                return HttpResponseRedirect(
                    reverse('re_import_scan_results', args=(test.id, )))

            try:
                items = parser.items
                original_items = test.finding_set.all().values_list("id",
                                                                    flat=True)
                new_items = []
                mitigated_count = 0
                finding_count = 0
                finding_added_count = 0
                reactivated_count = 0
                # Push to Jira?

                push_to_jira = False
                if push_all_jira_issues:
                    push_to_jira = True
                elif 'jiraform-push_to_jira' in request.POST:
                    jform = JIRAImportScanForm(request.POST,
                                               prefix='jiraform',
                                               push_all=push_all_jira_issues)
                    if jform.is_valid():
                        push_to_jira = jform.cleaned_data.get('push_to_jira')
                for item in items:

                    sev = item.severity
                    if sev == 'Information' or sev == 'Informational':
                        sev = 'Info'
                        item.severity = sev

                    # existing findings may be from before we had component_name/version fields
                    component_name = item.component_name if hasattr(
                        item, 'component_name') else None
                    component_version = item.component_version if hasattr(
                        item, 'component_version') else None

                    # If it doesn't clear minimum severity, move on
                    if Finding.SEVERITIES[sev] > Finding.SEVERITIES[min_sev]:
                        continue

                    # Try to find the existing finding
                    # If it's Veracode or Arachni, then we consider the description for some
                    # reason...
                    from titlecase import titlecase
                    item.title = titlecase(item.title)
                    if scan_type == 'Veracode Scan' or scan_type == 'Arachni Scan':
                        finding = Finding.objects.filter(
                            title=item.title,
                            test__id=test.id,
                            severity=sev,
                            numerical_severity=Finding.get_numerical_severity(
                                sev),
                            description=item.description)

                    else:
                        finding = Finding.objects.filter(
                            title=item.title,
                            test__id=test.id,
                            severity=sev,
                            numerical_severity=Finding.get_numerical_severity(
                                sev))

                    if len(finding) == 1:
                        finding = finding[0]
                        if finding.mitigated or finding.is_Mitigated:
                            # it was once fixed, but now back
                            finding.mitigated = None
                            finding.is_Mitigated = False
                            finding.mitigated_by = None
                            finding.active = True
                            finding.verified = verified

                            # existing findings may be from before we had component_name/version fields
                            finding.component_name = finding.component_name if finding.component_name else component_name
                            finding.component_version = finding.component_version if finding.component_version else component_version

                            finding.save()
                            note = Notes(
                                entry="Re-activated by %s re-upload." %
                                scan_type,
                                author=request.user)
                            note.save()
                            finding.notes.add(note)

                            endpoint_status = finding.endpoint_status.all()
                            for status in endpoint_status:
                                status.mitigated_by = None
                                status.mitigated_time = None
                                status.mitigated = False
                                status.last_modified = timezone.now()
                                status.save()

                            reactivated_count += 1
                        else:
                            # existing findings may be from before we had component_name/version fields
                            if not finding.component_name or not finding.component_version:
                                finding.component_name = finding.component_name if finding.component_name else component_name
                                finding.component_version = finding.component_version if finding.component_version else component_version
                                finding.save(dedupe_option=False,
                                             push_to_jira=False)

                        new_items.append(finding.id)
                    else:
                        item.test = test
                        if item.date == timezone.now().date():
                            item.date = test.target_start.date()
                        item.reporter = request.user
                        item.last_reviewed = timezone.now()
                        item.last_reviewed_by = request.user
                        item.verified = verified
                        item.active = active

                        # Save it
                        item.save(dedupe_option=False)
                        finding_added_count += 1
                        # Add it to the new items
                        new_items.append(item.id)
                        finding = item

                        if hasattr(item, 'unsaved_req_resp') and len(
                                item.unsaved_req_resp) > 0:
                            for req_resp in item.unsaved_req_resp:
                                if scan_type == "Arachni Scan":
                                    burp_rr = BurpRawRequestResponse(
                                        finding=item,
                                        burpRequestBase64=req_resp["req"],
                                        burpResponseBase64=req_resp["resp"],
                                    )
                                else:
                                    burp_rr = BurpRawRequestResponse(
                                        finding=item,
                                        burpRequestBase64=base64.b64encode(
                                            req_resp["req"].encode("utf-8")),
                                        burpResponseBase64=base64.b64encode(
                                            req_resp["resp"].encode("utf-8")),
                                    )
                                burp_rr.clean()
                                burp_rr.save()

                        if item.unsaved_request is not None and item.unsaved_response is not None:
                            burp_rr = BurpRawRequestResponse(
                                finding=finding,
                                burpRequestBase64=base64.b64encode(
                                    item.unsaved_request.encode()),
                                burpResponseBase64=base64.b64encode(
                                    item.unsaved_response.encode()),
                            )
                            burp_rr.clean()
                            burp_rr.save()
                    if finding:
                        finding_count += 1
                        for endpoint in item.unsaved_endpoints:
                            ep, created = Endpoint.objects.get_or_create(
                                protocol=endpoint.protocol,
                                host=endpoint.host,
                                path=endpoint.path,
                                query=endpoint.query,
                                fragment=endpoint.fragment,
                                product=test.engagement.product)
                            eps, created = Endpoint_Status.objects.get_or_create(
                                finding=finding, endpoint=ep)
                            ep.endpoint_status.add(eps)

                            finding.endpoints.add(ep)
                            finding.endpoint_status.add(eps)
                        for endpoint in form.cleaned_data['endpoints']:
                            ep, created = Endpoint.objects.get_or_create(
                                protocol=endpoint.protocol,
                                host=endpoint.host,
                                path=endpoint.path,
                                query=endpoint.query,
                                fragment=endpoint.fragment,
                                product=test.engagement.product)
                            eps, created = Endpoint_Status.objects.get_or_create(
                                finding=finding, endpoint=ep)
                            ep.endpoint_status.add(eps)

                            finding.endpoints.add(ep)
                            finding.endpoint_status.add(eps)
                        if item.unsaved_tags is not None:
                            finding.tags = item.unsaved_tags

                    # Save it. This may be the second time we save it in this function.
                    finding.save(push_to_jira=push_to_jira)
                # calculate the difference
                to_mitigate = set(original_items) - set(new_items)
                for finding_id in to_mitigate:
                    finding = Finding.objects.get(id=finding_id)
                    if not finding.mitigated or not finding.is_Mitigated:
                        finding.mitigated = scan_date_time
                        finding.is_Mitigated = True
                        finding.mitigated_by = request.user
                        finding.active = False
                        finding.save()
                        note = Notes(entry="Mitigated by %s re-upload." %
                                     scan_type,
                                     author=request.user)
                        note.save()
                        finding.notes.add(note)
                        mitigated_count += 1

                        endpoint_status = finding.endpoint_status.all()
                        for status in endpoint_status:
                            status.mitigated_by = request.user
                            status.mitigated_time = timezone.now()
                            status.mitigated = True
                            status.last_modified = timezone.now()
                            status.save()

                test.updated = max_safe([scan_date_time, test.updated])
                test.engagement.updated = max_safe(
                    [scan_date_time, test.engagement.updated])

                test.save()
                test.engagement.save()

                messages.add_message(
                    request,
                    messages.SUCCESS,
                    '%s processed, a total of ' % scan_type +
                    message(finding_count, 'finding', 'processed'),
                    extra_tags='alert-success')
                if finding_added_count > 0:
                    messages.add_message(
                        request,
                        messages.SUCCESS,
                        'A total of ' +
                        message(finding_added_count, 'finding', 'added') +
                        ', that are new to scan.',
                        extra_tags='alert-success')
                if reactivated_count > 0:
                    messages.add_message(
                        request,
                        messages.SUCCESS,
                        'A total of ' +
                        message(reactivated_count, 'finding', 'reactivated') +
                        ', that are back in scan results.',
                        extra_tags='alert-success')
                if mitigated_count > 0:
                    messages.add_message(
                        request,
                        messages.SUCCESS,
                        'A total of ' +
                        message(mitigated_count, 'finding', 'mitigated') +
                        '. Please manually verify each one.',
                        extra_tags='alert-success')

                create_notification(event='scan_added',
                                    title=str(finding_count) +
                                    " findings for " +
                                    test.engagement.product.name,
                                    finding_count=finding_count,
                                    test=test,
                                    engagement=test.engagement,
                                    url=reverse('view_test', args=(test.id, )))

                return HttpResponseRedirect(
                    reverse('view_test', args=(test.id, )))
            except SyntaxError:
                messages.add_message(
                    request,
                    messages.ERROR,
                    'There appears to be an error in the XML report, please check and try again.',
                    extra_tags='alert-danger')

    product_tab = Product_Tab(engagement.product.id,
                              title="Re-upload a %s" % scan_type,
                              tab="engagements")
    product_tab.setEngagement(engagement)
    form.fields['endpoints'].queryset = Endpoint.objects.filter(
        product__id=product_tab.product.id)
    return render(
        request, 'dojo/import_scan_results.html', {
            'form': form,
            'product_tab': product_tab,
            'eid': engagement.id,
            'additional_message': additional_message,
            'jform': jform,
        })