예제 #1
0
 def map_column_value(self, finding, column_value):
     if (column_value != ""):
         user = User.objects.all().first()
         note = Notes(entry=column_value,author=user)
         note.save()
         finding.reporter_id = user.id
         finding.save()
         finding.notes.add(note)
예제 #2
0
def upload_risk(request, eid):
    eng = Engagement.objects.get(id=eid)
    # exclude the findings already accepted
    exclude_findings = [
        finding.id for ra in eng.risk_acceptance.all()
        for finding in ra.accepted_findings.all()
    ]
    eng_findings = Finding.objects.filter(active="True", verified="True", duplicate="False", test__in=eng.test_set.all()) \
        .exclude(id__in=exclude_findings).order_by('title')

    if request.method == 'POST':
        form = UploadRiskForm(request.POST, request.FILES)
        if form.is_valid():
            findings = form.cleaned_data['accepted_findings']
            for finding in findings:
                finding.active = False
                finding.save()
            risk = form.save(commit=False)
            risk.reporter = form.cleaned_data['reporter']
            risk.expiration_date = form.cleaned_data['expiration_date']
            risk.accepted_by = form.cleaned_data['accepted_by']
            risk.compensating_control = form.cleaned_data['compensating_control']
            risk.path = form.cleaned_data['path']
            risk.save()  # have to save before findings can be added
            risk.accepted_findings = findings
            if form.cleaned_data['notes']:
                notes = Notes(
                    entry=form.cleaned_data['notes'],
                    author=request.user,
                    date=timezone.now())
                notes.save()
                risk.notes.add(notes)

            risk.save()  # saving notes and findings
            eng.risk_acceptance.add(risk)
            eng.save()
            messages.add_message(
                request,
                messages.SUCCESS,
                'Risk exception saved.',
                extra_tags='alert-success')
            return HttpResponseRedirect(
                reverse('view_engagement', args=(eid, )))
    else:
        form = UploadRiskForm(initial={'reporter': request.user})

    form.fields["accepted_findings"].queryset = eng_findings
    product_tab = Product_Tab(eng.product.id, title="Upload Risk Exception", tab="engagements")
    product_tab.setEngagement(eng)

    return render(request, 'dojo/up_risk.html', {
                  'eng': eng,
                  'product_tab': product_tab,
                  'form': form
                  })
예제 #3
0
def webhook(request):
    # Webhook shouldn't be active if jira isn't enabled
    if not get_system_setting('enable_jira'):
        raise PermissionDenied
    elif not get_system_setting('enable_jira_web_hook'):
        raise PermissionDenied

    if request.method == 'POST':
        parsed = json.loads(request.body)
        if 'issue' in parsed.keys():
            jid = parsed['issue']['id']
            jissue = get_object_or_404(JIRA_Issue, jira_id=jid)
            if jissue.finding is not None:
                finding = jissue.finding
                resolved = True
                if parsed['issue']['fields']['resolution'] is None:
                    resolved = False
                if finding.active == resolved:
                    if finding.active:
                        now = timezone.now()
                        finding.active = False
                        finding.mitigated = now
                        finding.endpoints.clear()
                    else:
                        finding.active = True
                        finding.mitigated = None
                        finding.save()
                    finding.save()
            """
            if jissue.engagement is not None:
                eng = jissue.engagement
                if parsed['issue']['fields']['resolution'] != None:
                    eng.active = False
                    eng.status = 'Completed'
                    eng.save()
           """
        else:
            comment_text = parsed['comment']['body']
            commentor = parsed['comment']['updateAuthor']['displayName']
            jid = parsed['comment']['self'].split('/')[7]
            jissue = JIRA_Issue.objects.get(jira_id=jid)
            finding = jissue.finding
            new_note = Notes()
            new_note.entry = '(%s): %s' % (commentor, comment_text)
            new_note.author, created = User.objects.get_or_create(username='******')
            new_note.save()
            finding.notes.add(new_note)
            finding.save()
    return HttpResponse('')
예제 #4
0
def upload_risk(request, eid):
    eng = Engagement.objects.get(id=eid)
    # exclude the findings already accepted
    exclude_findings = [finding.id for ra in eng.risk_acceptance.all()
                        for finding in ra.accepted_findings.all()]
    eng_findings = Finding.objects.filter(test__in=eng.test_set.all()) \
        .exclude(id__in=exclude_findings).order_by('title')

    if request.method == 'POST':
        form = UploadRiskForm(request.POST, request.FILES)
        if form.is_valid():
            findings = form.cleaned_data['accepted_findings']
            for finding in findings:
                finding.active = False
                finding.save()
            risk = form.save(commit=False)
            risk.reporter = form.cleaned_data['reporter']
            risk.path = form.cleaned_data['path']
            risk.save()  # have to save before findings can be added
            risk.accepted_findings = findings
            if form.cleaned_data['notes']:
                notes = Notes(entry=form.cleaned_data['notes'],
                              author=request.user,
                              date=localtz.localize(datetime.today()))
                notes.save()
                risk.notes.add(notes)

            risk.save()  # saving notes and findings
            eng.risk_acceptance.add(risk)
            eng.save()
            messages.add_message(request,
                                 messages.SUCCESS,
                                 'Risk acceptance saved.',
                                 extra_tags='alert-success')
            return HttpResponseRedirect(reverse('view_engagement', args=(eid,)))
    else:
        form = UploadRiskForm(initial={'reporter': request.user})

    form.fields["accepted_findings"].queryset = eng_findings
    add_breadcrumb(parent=eng, title="Upload Risk Acceptance", top_level=False, request=request)
    return render(request, 'dojo/up_risk.html',
                  {'eng': eng, 'form': form})
예제 #5
0
def upload_risk(request, eid):
    eng = Engagement.objects.get(id=eid)
    # exclude the findings already accepted
    exclude_findings = [finding.id for ra in eng.risk_acceptance.all()
                        for finding in ra.accepted_findings.all()]
    eng_findings = Finding.objects.filter(test__in=eng.test_set.all()) \
        .exclude(id__in=exclude_findings).order_by('title')

    if request.method == 'POST':
        form = UploadRiskForm(request.POST, request.FILES)
        if form.is_valid():
            findings = form.cleaned_data['accepted_findings']
            for finding in findings:
                finding.active = False
                finding.save()
            risk = form.save(commit=False)
            risk.reporter = form.cleaned_data['reporter']
            risk.path = form.cleaned_data['path']
            risk.save()  # have to save before findings can be added
            risk.accepted_findings = findings
            if form.cleaned_data['notes']:
                notes = Notes(entry=form.cleaned_data['notes'],
                              author=request.user,
                              date=localtz.localize(datetime.today()))
                notes.save()
                risk.notes.add(notes)

            risk.save()  # saving notes and findings
            eng.risk_acceptance.add(risk)
            eng.save()
            messages.add_message(request,
                                 messages.SUCCESS,
                                 'Risk acceptance saved.',
                                 extra_tags='alert-success')
            return HttpResponseRedirect(reverse('view_engagement', args=(eid,)))
    else:
        form = UploadRiskForm(initial={'reporter': request.user})

    form.fields["accepted_findings"].queryset = eng_findings
    add_breadcrumb(parent=eng, title="Upload Risk Acceptance", top_level=False, request=request)
    return render(request, 'dojo/up_risk.html',
                  {'eng': eng, 'form': form})
예제 #6
0
    def notes(self, request, pk=None):
        finding = get_object_or_404(Finding.objects, id=pk)
        if request.method == 'POST':
            new_note = serializers.AddNewNoteOptionSerializer(
                data=request.data)
            if new_note.is_valid():
                entry = new_note.validated_data['entry']
                private = new_note.validated_data['private']
            else:
                return Response(new_note.errors,
                                status=status.HTTP_400_BAD_REQUEST)

            author = request.user
            note = Notes(entry=entry, author=author, private=private)
            note.save()
            finding.notes.add(note)

            serialized_note = serializers.NoteSerializer({
                "author": author,
                "entry": entry,
                "private": private
            })
            result = serializers.FindingToNotesSerializer({
                "finding_id":
                finding,
                "notes": [serialized_note.data]
            })
            return Response(serialized_note.data, status=status.HTTP_200_OK)
        notes = finding.notes.all()

        serialized_notes = []
        if notes:
            serialized_notes = serializers.FindingToNotesSerializer({
                "finding_id":
                finding,
                "notes":
                notes
            })
            return Response(serialized_notes.data, status=status.HTTP_200_OK)

        return Response(serialized_notes, status=status.HTTP_200_OK)
예제 #7
0
def request_finding_review(request, fid):
    finding = get_object_or_404(Finding, id=fid)
    user = get_object_or_404(Dojo_User, id=request.user.id)
    # in order to review a finding, we need to capture why a review is needed
    # we can do this with a Note
    if request.method == 'POST':
        form = ReviewFindingForm(request.POST)

        if form.is_valid():
            now = timezone.now()
            new_note = Notes()

            new_note.entry = "Review Request: " + form.cleaned_data['entry']
            new_note.author = request.user
            new_note.date = now
            new_note.save()
            finding.notes.add(new_note)
            finding.active = False
            finding.verified = False
            finding.under_review = True
            finding.review_requested_by = user
            finding.last_reviewed = now
            finding.last_reviewed_by = request.user

            users = form.cleaned_data['reviewers']
            finding.reviewers = users
            finding.save()

            create_notification(event='review_requested',
                               title='Finding review requested',
                               description='User %s has requested that you please review the finding "%s" for accuracy:\n\n%s' \
                                           % (user, finding.title, new_note),
                               icon='check',
                               url=request.build_absolute_uri(reverse("view_finding", args=(finding.id,))))

            messages.add_message(
                request,
                messages.SUCCESS,
                'Finding marked for review and reviewers notified.',
                extra_tags='alert-success')
            return HttpResponseRedirect(
                reverse('view_finding', args=(finding.id, )))

    else:
        form = ReviewFindingForm()

    add_breadcrumb(parent=finding,
                   title="Review Finding",
                   top_level=False,
                   request=request)
    return render(request, 'dojo/review_finding.html', {
        'finding': finding,
        'user': user,
        'form': form
    })
예제 #8
0
def clear_finding_review(request, fid):
    finding = get_object_or_404(Finding, id=fid)
    user = get_object_or_404(Dojo_User, id=request.user.id)
    # in order to clear a review for a finding, we need to capture why and how it was reviewed
    # we can do this with a Note

    if user == finding.review_requested_by or user in finding.reviewers.all():
        pass
    else:
        return HttpResponseForbidden()

    if request.method == 'POST':
        form = ClearFindingReviewForm(request.POST, instance=finding)

        if form.is_valid():
            now = datetime.now(tz=localtz)
            new_note = Notes()
            new_note.entry = "Review Cleared: " + form.cleaned_data['entry']
            new_note.author = request.user
            new_note.date = now
            new_note.save()

            finding = form.save(commit=False)

            finding.under_review = False
            finding.last_reviewed = now
            finding.last_reviewed_by = request.user

            finding.reviewers = []
            finding.save()

            finding.notes.add(new_note)

            messages.add_message(
                request,
                messages.SUCCESS,
                'Finding review has been updated successfully.',
                extra_tags='alert-success')
            return HttpResponseRedirect(
                reverse('view_finding', args=(finding.id, )))

    else:
        form = ClearFindingReviewForm(instance=finding)

    add_breadcrumb(parent=finding,
                   title="Clear Finding Review",
                   top_level=False,
                   request=request)
    return render(request, 'dojo/clear_finding_review.html', {
        'finding': finding,
        'user': user,
        'form': form
    })
예제 #9
0
    def close_old_findings(self, test, to_mitigate, scan_date_time, user, push_to_jira=None):
        logger.debug('IMPORT_SCAN: Closing findings no longer present in scan report')
        mitigated_findings = []
        for finding in to_mitigate:
            if not finding.mitigated or not finding.is_mitigated:
                logger.debug('mitigating finding: %i:%s', finding.id, finding)
                finding.mitigated = scan_date_time
                finding.is_mitigated = True
                finding.mitigated_by = user
                finding.active = False

                endpoint_status = finding.endpoint_status.all()
                for status in endpoint_status:
                    status.mitigated_by = user
                    status.mitigated_time = timezone.now()
                    status.mitigated = True
                    status.last_modified = timezone.now()
                    status.save()

                # to avoid pushing a finding group multiple times, we push those outside of the loop
                if settings.FEATURE_FINDING_GROUPS and finding.finding_group:
                    # don't try to dedupe findings that we are closing
                    finding.save(dedupe_option=False)
                else:
                    finding.save(push_to_jira=push_to_jira, dedupe_option=False)

                note = Notes(entry="Mitigated by %s re-upload." % test.test_type,
                            author=user)
                note.save()
                finding.notes.add(note)
                mitigated_findings.append(finding)

        if settings.FEATURE_FINDING_GROUPS and push_to_jira:
            for finding_group in set([finding.finding_group for finding in to_mitigate if finding.finding_group is not None]):
                jira_helper.push_to_jira(finding_group)

        return mitigated_findings
예제 #10
0
def request_finding_review(request, fid):
    finding = get_object_or_404(Finding, id=fid)
    user = get_object_or_404(Dojo_User, id=request.user.id)
    # in order to review a finding, we need to capture why a review is needed
    # we can do this with a Note
    if request.method == 'POST':
        form = ReviewFindingForm(request.POST)

        if form.is_valid():
            now = datetime.now(tz=localtz)
            new_note = Notes()

            new_note.entry = "Review Request: " + form.cleaned_data['entry']
            new_note.author = request.user
            new_note.date = now
            new_note.save()
            finding.notes.add(new_note)
            finding.active = False
            finding.verified = False
            finding.under_review = True
            finding.review_requested_by = user
            finding.last_reviewed = now
            finding.last_reviewed_by = request.user

            users = form.cleaned_data['reviewers']
            finding.reviewers = users
            finding.save()

            send_review_email(request, user, finding, users, new_note)

            messages.add_message(
                request,
                messages.SUCCESS,
                'Finding marked for review and reviewers notified.',
                extra_tags='alert-success')
            return HttpResponseRedirect(
                reverse('view_finding', args=(finding.id, )))

    else:
        form = ReviewFindingForm()

    add_breadcrumb(parent=finding,
                   title="Review Finding",
                   top_level=False,
                   request=request)
    return render(request, 'dojo/review_finding.html', {
        'finding': finding,
        'user': user,
        'form': form
    })
예제 #11
0
def webhook(request):
    # Webhook shouldn't be active if jira isn't enabled
    if not get_system_setting('enable_jira'):
        raise PermissionDenied
    elif not get_system_setting('enable_jira_web_hook'):
        raise PermissionDenied

    if request.method == 'POST':
        parsed = json.loads(request.body)
        if 'issue' in parsed.keys():
            jid = parsed['issue']['id']
            jissue = get_object_or_404(JIRA_Issue, jira_id=jid)
            if jissue.finding is not None:
                finding = jissue.finding
                resolved = True
                if parsed['issue']['fields']['resolution'] is None:
                    resolved = False
                if finding.active == resolved:
                    if finding.active:
                        now = timezone.now()
                        finding.active = False
                        finding.mitigated = now
                        finding.endpoints.clear()
                    else:
                        finding.active = True
                        finding.mitigated = None
                        finding.save()
                    finding.save()
            """
            if jissue.engagement is not None:
                eng = jissue.engagement
                if parsed['issue']['fields']['resolution'] != None:
                    eng.active = False
                    eng.status = 'Completed'
                    eng.save()
           """
        else:
            comment_text = parsed['comment']['body']
            commentor = parsed['comment']['updateAuthor']['displayName']
            jid = parsed['comment']['self'].split('/')[7]
            jissue = JIRA_Issue.objects.get(jira_id=jid)
            finding = jissue.finding
            new_note = Notes()
            new_note.entry = '(%s): %s' % (commentor, comment_text)
            new_note.author, created = User.objects.get_or_create(
                username='******')
            new_note.save()
            finding.notes.add(new_note)
            finding.save()
    return HttpResponse('')
예제 #12
0
    def handle(self, *args, **options):

        findings = Finding.objects.exclude(jira_issue__isnull=True)
        findings = findings.filter(verified=True, active=True)
        findings = findings.prefetch_related('jira_issue')
        # finding = Finding.objects.get(id=1)
        for finding in findings:
            #    try:
            JIRAError.log_to_tempfile = False
            jira = jira_helper.get_jira_connection(finding)
            j_issue = finding.jira_issue
            issue = jira.issue(j_issue.jira_id)

            # Issue Cloned
            print(issue.fields.issuelinks[0])

            print("Jira Issue: " + str(issue))
            print("Resolution: " + str(issue.fields.resolution))

            if issue.fields.resolution is not None \
                    and not finding.under_defect_review:
                # print issue.fields.__dict__
                print("Jira Issue: " + str(issue) + " changed status")

                # Create Jira Note
                now = timezone.now()
                new_note = Notes()
                new_note.entry = "Please Review Jira Request: " + str(
                    issue) + ". Review status has changed to " + str(
                    issue.fields.resolution) + "."
                new_note.author = User.objects.get(username='******')
                new_note.date = now
                new_note.save()
                finding.notes.add(new_note)
                finding.under_defect_review = True
                dojo_user = Dojo_User.objects.get(username='******')
                finding.defect_review_requested_by = dojo_user

                # Create alert to notify user
                jira_helper.log_jira_message("Jira issue status change, please review.",
                                 finding)
                finding.save()
            else:
                print("No update necessary")
예제 #13
0
def request_finding_review(request, fid):
    finding = get_object_or_404(Finding, id=fid)
    user = get_object_or_404(Dojo_User, id=request.user.id)
    # in order to review a finding, we need to capture why a review is needed
    # we can do this with a Note
    if request.method == 'POST':
        form = ReviewFindingForm(request.POST)

        if form.is_valid():
            now = timezone.now()
            new_note = Notes()

            new_note.entry = "Review Request: " + form.cleaned_data['entry']
            new_note.author = request.user
            new_note.date = now
            new_note.save()
            finding.notes.add(new_note)
            finding.active = False
            finding.verified = False
            finding.under_review = True
            finding.review_requested_by = user
            finding.last_reviewed = now
            finding.last_reviewed_by = request.user

            users = form.cleaned_data['reviewers']
            finding.reviewers = users
            finding.save()

            create_notification(event='review_requested',
                               title='Finding review requested',
                               description='User %s has requested that you please review the finding "%s" for accuracy:\n\n%s' \
                                           % (user, finding.title, new_note),
                               icon='check',
                               url=request.build_absolute_uri(reverse("view_finding", args=(finding.id,))))

            messages.add_message(request,
                                 messages.SUCCESS,
                                 'Finding marked for review and reviewers notified.',
                                 extra_tags='alert-success')
            return HttpResponseRedirect(reverse('view_finding', args=(finding.id,)))

    else:
        form = ReviewFindingForm()

    add_breadcrumb(parent=finding, title="Review Finding", top_level=False, request=request)
    return render(request, 'dojo/review_finding.html',
                  {'finding': finding,
                   'user': user, 'form': form})
예제 #14
0
def finding_unlink_jira(request, finding):
    logger.debug('removing linked jira issue %s for finding %i',
                 finding.jira_issue.jira_key, finding.id)
    finding.jira_issue.delete()
    finding.save(push_to_jira=False,
                 dedupe_option=False,
                 issue_updater_option=False)

    jira_issue_url = get_jira_url(finding)

    new_note = Notes()
    new_note.entry = 'unlinked JIRA issue %s from finding' % (jira_issue_url)
    new_note.author = request.user
    new_note.save()
    finding.notes.add(new_note)
    return True
예제 #15
0
def clear_finding_review(request, fid):
    finding = get_object_or_404(Finding, id=fid)
    user = get_object_or_404(Dojo_User, id=request.user.id)
    # in order to clear a review for a finding, we need to capture why and how it was reviewed
    # we can do this with a Note

    if user == finding.review_requested_by or user in finding.reviewers.all():
        pass
    else:
        return HttpResponseForbidden()

    if request.method == 'POST':
        form = ClearFindingReviewForm(request.POST, instance=finding)

        if form.is_valid():
            now = timezone.now()
            new_note = Notes()
            new_note.entry = "Review Cleared: " + form.cleaned_data['entry']
            new_note.author = request.user
            new_note.date = now
            new_note.save()

            finding = form.save(commit=False)

            finding.under_review = False
            finding.last_reviewed = now
            finding.last_reviewed_by = request.user

            finding.reviewers = []
            finding.save()

            finding.notes.add(new_note)

            messages.add_message(request,
                                 messages.SUCCESS,
                                 'Finding review has been updated successfully.',
                                 extra_tags='alert-success')
            return HttpResponseRedirect(reverse('view_finding', args=(finding.id,)))

    else:
        form = ClearFindingReviewForm(instance=finding)

    add_breadcrumb(parent=finding, title="Clear Finding Review", top_level=False, request=request)
    return render(request, 'dojo/clear_finding_review.html',
                  {'finding': finding,
                   'user': user, 'form': form})
    def handle(self, *args, **options):

        findings = Finding.objects.exclude(jira_issue__isnull=True)
        findings = findings.filter(verified=True, active=True)
        # finding = Finding.objects.get(id=1)
        for finding in findings:
            #    try:
            JIRAError.log_to_tempfile = False
            jira = get_jira_connection(finding)
            j_issue = JIRA_Issue.objects.get(finding=finding)
            issue = jira.issue(j_issue.jira_id)

            # Issue Cloned
            print issue.fields.issuelinks[0]

            print "Jira Issue: " + str(issue)
            print "Resolution: " + str(issue.fields.resolution)

            if issue.fields.resolution is not None \
                    and finding.under_defect_review == False:
                # print issue.fields.__dict__
                print "Jira Issue: " + str(issue) + " changed status"

                # Create Jira Note
                now = timezone.now()
                new_note = Notes()
                new_note.entry = "Please Review Jira Request: " + str(
                    issue) + ". Review status has changed to " + str(
                    issue.fields.resolution) + "."
                new_note.author = User.objects.get(username='******')
                new_note.date = now
                new_note.save()
                finding.notes.add(new_note)
                finding.under_defect_review = True
                dojo_user = Dojo_User.objects.get(username='******')
                finding.defect_review_requested_by = dojo_user

                # Create alert to notify user
                log_jira_message("Jira issue status change, please review.",
                                 finding)
                finding.save()
            else:
                print "No update necessary"
예제 #17
0
def finding_link_jira(request, finding, new_jira_issue_key):
    logger.debug('linking existing jira issue %s for finding %i',
                 new_jira_issue_key, finding.id)

    existing_jira_issue = jira_get_issue(get_jira_project(finding),
                                         new_jira_issue_key)

    jira_project = get_jira_project(finding)

    if not existing_jira_issue:
        raise ValueError('JIRA issue not found or cannot be retrieved: ' +
                         new_jira_issue_key)

    jira_issue = JIRA_Issue(jira_id=existing_jira_issue.id,
                            jira_key=existing_jira_issue.key,
                            finding=finding,
                            jira_project=jira_project)

    jira_issue.jira_key = new_jira_issue_key
    # jira timestampe are in iso format: 'updated': '2020-07-17T09:49:51.447+0200'
    # seems to be a pain to parse these in python < 3.7, so for now just record the curent time as
    # as the timestamp the jira link was created / updated in DD
    jira_issue.jira_creation = timezone.now()
    jira_issue.jira_change = timezone.now()

    jira_issue.save()

    finding.save(push_to_jira=False,
                 dedupe_option=False,
                 issue_updater_option=False)

    jira_issue_url = get_jira_url(finding)

    new_note = Notes()
    new_note.entry = 'linked JIRA issue %s to finding' % (jira_issue_url)
    new_note.author = request.user
    new_note.save()
    finding.notes.add(new_note)
    return True
예제 #18
0
    def process_parsed_findings(self,
                                test,
                                parsed_findings,
                                scan_type,
                                user,
                                active,
                                verified,
                                minimum_severity=None,
                                endpoints_to_add=None,
                                push_to_jira=None,
                                group_by=None,
                                now=timezone.now(),
                                service=None):

        items = parsed_findings
        original_items = list(test.finding_set.all())
        new_items = []
        mitigated_count = 0
        finding_count = 0
        finding_added_count = 0
        reactivated_count = 0
        reactivated_items = []
        unchanged_count = 0
        unchanged_items = []

        logger.debug('starting reimport of %i items.',
                     len(items) if items else 0)
        from dojo.importers.reimporter.utils import (
            get_deduplication_algorithm_from_conf,
            match_new_finding_to_existing_finding, update_endpoint_status)
        deduplication_algorithm = get_deduplication_algorithm_from_conf(
            scan_type)

        i = 0
        logger.debug(
            'STEP 1: looping over findings from the reimported report and trying to match them to existing findings'
        )
        deduplicationLogger.debug(
            'Algorithm used for matching new findings to existing findings: %s',
            deduplication_algorithm)
        for item in items:
            # FIXME hack to remove when all parsers have unit tests for this attribute
            if item.severity.lower().startswith(
                    'info') and item.severity != 'Info':
                item.severity = 'Info'

            item.numerical_severity = Finding.get_numerical_severity(
                item.severity)

            if minimum_severity and (Finding.SEVERITIES[item.severity] >
                                     Finding.SEVERITIES[minimum_severity]):
                # finding's severity is below the configured threshold : ignoring the finding
                continue

            # existing findings may be from before we had component_name/version fields
            component_name = item.component_name if hasattr(
                item, 'component_name') else None
            component_version = item.component_version if hasattr(
                item, 'component_version') else None

            if not hasattr(item, 'test'):
                item.test = test

            item.service = service

            item.hash_code = item.compute_hash_code()
            deduplicationLogger.debug("item's hash_code: %s", item.hash_code)

            findings = match_new_finding_to_existing_finding(
                item, test, deduplication_algorithm, scan_type)

            deduplicationLogger.debug(
                'found %i findings matching with current new finding',
                len(findings))

            if findings:
                # existing finding found
                finding = findings[0]
                if finding.false_p or finding.out_of_scope or finding.risk_accepted:
                    logger.debug(
                        '%i: skipping existing finding (it is marked as false positive:%s and/or out of scope:%s or is a risk accepted:%s): %i:%s:%s:%s',
                        i, finding.false_p, finding.out_of_scope,
                        finding.risk_accepted, finding.id, finding,
                        finding.component_name, finding.component_version)
                elif finding.mitigated or finding.is_mitigated:
                    logger.debug('%i: reactivating: %i:%s:%s:%s', i,
                                 finding.id, finding, finding.component_name,
                                 finding.component_version)
                    finding.mitigated = None
                    finding.is_mitigated = False
                    finding.mitigated_by = None
                    finding.active = True
                    finding.verified = verified

                    # existing findings may be from before we had component_name/version fields
                    finding.component_name = finding.component_name if finding.component_name else component_name
                    finding.component_version = finding.component_version if finding.component_version else component_version

                    # don't dedupe before endpoints are added
                    finding.save(dedupe_option=False)
                    note = Notes(entry="Re-activated by %s re-upload." %
                                 scan_type,
                                 author=user)
                    note.save()
                    endpoint_status = finding.endpoint_status.all()
                    for status in endpoint_status:
                        status.mitigated_by = None
                        status.mitigated_time = None
                        status.mitigated = False
                        status.last_modified = timezone.now()
                        status.save()
                    finding.notes.add(note)
                    reactivated_items.append(finding)
                    reactivated_count += 1
                else:
                    # existing findings may be from before we had component_name/version fields
                    logger.debug('%i: updating existing finding: %i:%s:%s:%s',
                                 i, finding.id, finding,
                                 finding.component_name,
                                 finding.component_version)
                    if not finding.component_name or not finding.component_version:
                        finding.component_name = finding.component_name if finding.component_name else component_name
                        finding.component_version = finding.component_version if finding.component_version else component_version
                        finding.save(dedupe_option=False)

                    unchanged_items.append(finding)
                    unchanged_count += 1
                if finding.dynamic_finding:
                    logger.debug(
                        "Re-import found an existing dynamic finding for this new finding. Checking the status of endpoints"
                    )
                    update_endpoint_status(finding, item, user)
            else:
                # no existing finding found
                item.reporter = user
                item.last_reviewed = timezone.now()
                item.last_reviewed_by = user
                item.verified = verified
                item.active = active
                # Save it. Don't dedupe before endpoints are added.
                item.save(dedupe_option=False)
                logger.debug(
                    '%i: reimport created new finding as no existing finding match: %i:%s:%s:%s',
                    i, item.id, item, item.component_name,
                    item.component_version)

                # only new items get auto grouped to avoid confusion around already existing items that are already grouped
                if settings.FEATURE_FINDING_GROUPS and group_by:
                    finding_helper.add_finding_to_auto_group(item, group_by)

                finding_added_count += 1
                new_items.append(item)
                finding = item

                if hasattr(item, 'unsaved_req_resp'):
                    for req_resp in item.unsaved_req_resp:
                        burp_rr = BurpRawRequestResponse(
                            finding=finding,
                            burpRequestBase64=base64.b64encode(
                                req_resp["req"].encode("utf-8")),
                            burpResponseBase64=base64.b64encode(
                                req_resp["resp"].encode("utf-8")))
                        burp_rr.clean()
                        burp_rr.save()

                if item.unsaved_request and item.unsaved_response:
                    burp_rr = BurpRawRequestResponse(
                        finding=finding,
                        burpRequestBase64=base64.b64encode(
                            item.unsaved_request.encode()),
                        burpResponseBase64=base64.b64encode(
                            item.unsaved_response.encode()))
                    burp_rr.clean()
                    burp_rr.save()

            # for existing findings: make sure endpoints are present or created
            if finding:
                finding_count += 1
                for endpoint in item.unsaved_endpoints:
                    try:
                        endpoint.clean()
                    except ValidationError as e:
                        logger.warning(
                            "DefectDojo is storing broken endpoint because cleaning wasn't successful: "
                            "{}".format(e))

                    try:
                        ep, created = endpoint_get_or_create(
                            protocol=endpoint.protocol,
                            userinfo=endpoint.userinfo,
                            host=endpoint.host,
                            port=endpoint.port,
                            path=endpoint.path,
                            query=endpoint.query,
                            fragment=endpoint.fragment,
                            product=test.engagement.product)
                    except (MultipleObjectsReturned):
                        pass

                    try:
                        eps, created = Endpoint_Status.objects.get_or_create(
                            finding=finding, endpoint=ep)
                    except (MultipleObjectsReturned):
                        pass

                    ep.endpoint_status.add(eps)
                    finding.endpoints.add(ep)
                    finding.endpoint_status.add(eps)

                if endpoints_to_add:
                    for endpoint in endpoints_to_add:
                        # TODO Not sure what happens here, we get an endpoint model and try to create it again?
                        try:
                            endpoint.clean()
                        except ValidationError as e:
                            logger.warning(
                                "DefectDojo is storing broken endpoint because cleaning wasn't successful: "
                                "{}".format(e))

                        try:
                            ep, created = endpoint_get_or_create(
                                protocol=endpoint.protocol,
                                userinfo=endpoint.userinfo,
                                host=endpoint.host,
                                port=endpoint.port,
                                path=endpoint.path,
                                query=endpoint.query,
                                fragment=endpoint.fragment,
                                product=test.engagement.product)
                        except (MultipleObjectsReturned):
                            pass
                        try:
                            eps, created = Endpoint_Status.objects.get_or_create(
                                finding=finding, endpoint=ep)
                        except (MultipleObjectsReturned):
                            pass

                        ep.endpoint_status.add(eps)
                        finding.endpoints.add(ep)
                        finding.endpoint_status.add(eps)

                if item.unsaved_tags:
                    finding.tags = item.unsaved_tags

                # existing findings may be from before we had component_name/version fields
                finding.component_name = finding.component_name if finding.component_name else component_name
                finding.component_version = finding.component_version if finding.component_version else component_version

                # finding = new finding or existing finding still in the upload report
                # to avoid pushing a finding group multiple times, we push those outside of the loop
                if settings.FEATURE_FINDING_GROUPS and finding.finding_group:
                    finding.save()
                else:
                    finding.save(push_to_jira=push_to_jira)

        to_mitigate = set(original_items) - set(reactivated_items) - set(
            unchanged_items)
        untouched = set(unchanged_items) - set(to_mitigate)

        if settings.FEATURE_FINDING_GROUPS and push_to_jira:
            for finding_group in set([
                    finding.finding_group for finding in reactivated_items +
                    unchanged_items + new_items
                    if finding.finding_group is not None
            ]):
                jira_helper.push_to_jira(finding_group)

        return new_items, reactivated_items, to_mitigate, untouched
예제 #19
0
    def obj_create(self, bundle, **kwargs):
        bundle.obj = ImportScanObject(initial=kwargs)
        self.is_valid(bundle)
        if bundle.errors:
            raise ImmediateHttpResponse(response=self.error_response(bundle.request, bundle.errors))
        bundle = self.full_hydrate(bundle)

        test = bundle.obj.__getattr__('test_obj')
        scan_type = bundle.obj.__getattr__('scan_type')
        min_sev = bundle.obj.__getattr__('minimum_severity')
        scan_date = bundle.obj.__getattr__('scan_date')
        verified = bundle.obj.__getattr__('verified')
        active = bundle.obj.__getattr__('active')

        try:
            parser = import_parser_factory(bundle.data['file'], test)
        except ValueError:
            raise NotFound("Parser ValueError")

        try:
            items = parser.items
            original_items = test.finding_set.all().values_list("id", flat=True)
            new_items = []
            mitigated_count = 0
            finding_count = 0
            finding_added_count = 0
            reactivated_count = 0
            for item in items:
                sev = item.severity
                if sev == 'Information' or sev == 'Informational':
                    sev = 'Info'

                if Finding.SEVERITIES[sev] > Finding.SEVERITIES[min_sev]:
                    continue

                if scan_type == 'Veracode Scan' or scan_type == 'Arachni Scan':
                    find = Finding.objects.filter(title=item.title,
                                                  test__id=test.id,
                                                  severity=sev,
                                                  numerical_severity=Finding.get_numerical_severity(sev),
                                                  description=item.description
                                                  )
                else:
                    find = Finding.objects.filter(title=item.title,
                                                  test__id=test.id,
                                                  severity=sev,
                                                  numerical_severity=Finding.get_numerical_severity(sev),
                                                  )

                if len(find) == 1:
                    find = find[0]
                    if find.mitigated:
                        # it was once fixed, but now back
                        find.mitigated = None
                        find.mitigated_by = None
                        find.active = True
                        find.verified = verified
                        find.save()
                        note = Notes(entry="Re-activated by %s re-upload." % scan_type,
                                     author=bundle.request.user)
                        note.save()
                        find.notes.add(note)
                        reactivated_count += 1
                    new_items.append(find.id)
                else:
                    item.test = test
                    item.date = test.target_start
                    item.reporter = bundle.request.user
                    item.last_reviewed = timezone.now()
                    item.last_reviewed_by = bundle.request.user
                    item.verified = verified
                    item.active = active
                    item.save()
                    finding_added_count += 1
                    new_items.append(item.id)
                    find = item

                    if hasattr(item, 'unsaved_req_resp') and len(item.unsaved_req_resp) > 0:
                        for req_resp in item.unsaved_req_resp:
                            burp_rr = BurpRawRequestResponse(finding=find,
                                                             burpRequestBase64=req_resp["req"],
                                                             burpResponseBase64=req_resp["resp"],
                                                             )
                            burp_rr.clean()
                            burp_rr.save()

                    if item.unsaved_request is not None and item.unsaved_response is not None:
                        burp_rr = BurpRawRequestResponse(finding=find,
                                                         burpRequestBase64=item.unsaved_request,
                                                         burpResponseBase64=item.unsaved_response,
                                                         )
                        burp_rr.clean()
                        burp_rr.save()
                if find:
                    finding_count += 1
                    for endpoint in item.unsaved_endpoints:
                        ep, created = Endpoint.objects.get_or_create(protocol=endpoint.protocol,
                                                                     host=endpoint.host,
                                                                     path=endpoint.path,
                                                                     query=endpoint.query,
                                                                     fragment=endpoint.fragment,
                                                                     product=test.engagement.product)
                        find.endpoints.add(ep)

                    if item.unsaved_tags is not None:
                        find.tags = item.unsaved_tags
            # calculate the difference
            to_mitigate = set(original_items) - set(new_items)
            for finding_id in to_mitigate:
                finding = Finding.objects.get(id=finding_id)
                finding.mitigated = datetime.combine(scan_date, timezone.now().time())
                finding.mitigated_by = bundle.request.user
                finding.active = False
                finding.save()
                note = Notes(entry="Mitigated by %s re-upload." % scan_type,
                             author=bundle.request.user)
                note.save()
                finding.notes.add(note)
                mitigated_count += 1

        except SyntaxError:
            raise NotFound("Parser SyntaxError")

        # Everything executed fine. We successfully imported the scan.
        raise ImmediateHttpResponse(HttpCreated(location = bundle.obj.__getattr__('test')))
예제 #20
0
def webhook(request):
    # Webhook shouldn't be active if jira isn't enabled
    if not get_system_setting('enable_jira'):
        raise PermissionDenied
    elif not get_system_setting('enable_jira_web_hook'):
        raise PermissionDenied

    if request.method == 'POST':
        parsed = json.loads(request.body.decode('utf-8'))
        if parsed.get('webhookEvent') == 'jira:issue_updated':
            jid = parsed['issue']['id']
            jissue = get_object_or_404(JIRA_Issue, jira_id=jid)
            if jissue.finding is not None:
                finding = jissue.finding
                jira_conf = finding.jira_conf()
                resolved = True
                resolution = parsed['issue']['fields']['resolution']
                if resolution is None:
                    resolved = False
                if finding.active == resolved:
                    if finding.active:
                        if jira_conf and resolution[
                                'name'] in jira_conf.accepted_resolutions:
                            finding.active = False
                            finding.mitigated = None
                            finding.is_Mitigated = False
                            finding.false_p = False
                            assignee = parsed['issue']['fields'].get(
                                'assignee')
                            assignee_name = assignee[
                                'name'] if assignee else None
                            Risk_Acceptance.objects.create(
                                accepted_by=assignee_name,
                                reporter=finding.reporter,
                            ).accepted_findings.set([finding])
                        elif jira_conf and resolution[
                                'name'] in jira_conf.false_positive_resolutions:
                            finding.active = False
                            finding.verified = False
                            finding.mitigated = None
                            finding.is_Mitigated = False
                            finding.false_p = True
                            finding.remove_from_any_risk_acceptance()
                        else:
                            # Mitigated by default as before
                            now = timezone.now()
                            finding.active = False
                            finding.mitigated = now
                            finding.endpoints.clear()
                            finding.false_p = False
                            finding.remove_from_any_risk_acceptance()
                    else:
                        # Reopen / Open Jira issue
                        finding.active = True
                        finding.mitigated = None
                        finding.is_Mitigated = False
                        finding.false_p = False
                        finding.remove_from_any_risk_acceptance()
                    finding.jira_change = timezone.now()
                    finding.save()
            """
            if jissue.engagement is not None:
                eng = jissue.engagement
                if parsed['issue']['fields']['resolution'] != None:
                    eng.active = False
                    eng.status = 'Completed'
                    eng.save()
           """
        if parsed.get('webhookEvent') == 'comment_created':
            comment_text = parsed['comment']['body']
            commentor = parsed['comment']['updateAuthor']['displayName']
            jid = parsed['comment']['self'].split('/')[7]
            jissue = JIRA_Issue.objects.get(jira_id=jid)
            finding = jissue.finding
            new_note = Notes()
            new_note.entry = '(%s): %s' % (commentor, comment_text)
            new_note.author, created = User.objects.get_or_create(
                username='******')
            new_note.save()
            finding.notes.add(new_note)
            finding.jira_change = timezone.now()
            finding.save()

        if parsed.get('webhookEvent') not in [
                'comment_created', 'jira:issue_updated'
        ]:
            logger.info('Unrecognized JIRA webhook event received: {}'.format(
                parsed.get('webhookEvent')))
    return HttpResponse('')
예제 #21
0
def re_import_scan_results(request, tid):
    additional_message = "When re-uploading a scan, any findings not found in original scan will be updated as " \
                         "mitigated.  The process attempts to identify the differences, however manual verification " \
                         "is highly recommended."
    t = get_object_or_404(Test, id=tid)
    scan_type = t.test_type.name
    engagement = t.engagement
    form = ReImportScanForm()

    form.initial['tags'] = [tag.name for tag in t.tags]
    if request.method == "POST":
        form = ReImportScanForm(request.POST, request.FILES)
        if form.is_valid():
            scan_date = form.cleaned_data['scan_date']
            min_sev = form.cleaned_data['minimum_severity']
            file = request.FILES['file']
            scan_type = t.test_type.name
            active = form.cleaned_data['active']
            verified = form.cleaned_data['verified']
            tags = request.POST.getlist('tags')
            ts = ", ".join(tags)
            t.tags = ts
            try:
                parser = import_parser_factory(file, t)
            except ValueError:
                raise Http404()

            try:
                items = parser.items
                original_items = t.finding_set.all().values_list("id", flat=True)
                new_items = []
                mitigated_count = 0
                finding_count = 0
                finding_added_count = 0
                reactivated_count = 0
                for item in items:
                    sev = item.severity
                    if sev == 'Information' or sev == 'Informational':
                        sev = 'Info'

                    if Finding.SEVERITIES[sev] > Finding.SEVERITIES[min_sev]:
                        continue

                    if scan_type == 'Veracode Scan' or scan_type == 'Arachni Scan':
                        find = Finding.objects.filter(title=item.title,
                                                      test__id=t.id,
                                                      severity=sev,
                                                      numerical_severity=Finding.get_numerical_severity(sev),
                                                      description=item.description
                                                      )
                    else:
                        find = Finding.objects.filter(title=item.title,
                                                      test__id=t.id,
                                                      severity=sev,
                                                      numerical_severity=Finding.get_numerical_severity(sev),
                                                      )

                    if len(find) == 1:
                        find = find[0]
                        if find.mitigated:
                            # it was once fixed, but now back
                            find.mitigated = None
                            find.mitigated_by = None
                            find.active = True
                            find.verified = verified
                            find.save()
                            note = Notes(entry="Re-activated by %s re-upload." % scan_type,
                                         author=request.user)
                            note.save()
                            find.notes.add(note)
                            reactivated_count += 1
                        new_items.append(find.id)
                    else:
                        item.test = t
                        item.date = t.target_start
                        item.reporter = request.user
                        item.last_reviewed = timezone.now()
                        item.last_reviewed_by = request.user
                        item.verified = verified
                        item.active = active
                        item.save()
                        finding_added_count += 1
                        new_items.append(item.id)
                        find = item

                        if hasattr(item, 'unsaved_req_resp') and len(item.unsaved_req_resp) > 0:
                            for req_resp in item.unsaved_req_resp:
                                burp_rr = BurpRawRequestResponse(finding=find,
                                                                 burpRequestBase64=req_resp["req"],
                                                                 burpResponseBase64=req_resp["resp"],
                                                                 )
                                burp_rr.clean()
                                burp_rr.save()

                        if item.unsaved_request is not None and item.unsaved_response is not None:
                            burp_rr = BurpRawRequestResponse(finding=find,
                                                             burpRequestBase64=item.unsaved_request,
                                                             burpResponseBase64=item.unsaved_response,
                                                             )
                            burp_rr.clean()
                            burp_rr.save()
                    if find:
                        finding_count += 1
                        for endpoint in item.unsaved_endpoints:
                            ep, created = Endpoint.objects.get_or_create(protocol=endpoint.protocol,
                                                                         host=endpoint.host,
                                                                         path=endpoint.path,
                                                                         query=endpoint.query,
                                                                         fragment=endpoint.fragment,
                                                                         product=t.engagement.product)
                            find.endpoints.add(ep)

                        if item.unsaved_tags is not None:
                            find.tags = item.unsaved_tags

                # calculate the difference
                to_mitigate = set(original_items) - set(new_items)
                for finding_id in to_mitigate:
                    finding = Finding.objects.get(id=finding_id)
                    finding.mitigated = datetime.combine(scan_date, timezone.now().time())
                    finding.mitigated_by = request.user
                    finding.active = False
                    finding.save()
                    note = Notes(entry="Mitigated by %s re-upload." % scan_type,
                                 author=request.user)
                    note.save()
                    finding.notes.add(note)
                    mitigated_count += 1
                messages.add_message(request,
                                     messages.SUCCESS,
                                     '%s processed, a total of ' % scan_type + message(finding_count, 'finding',
                                                                                       'processed'),
                                     extra_tags='alert-success')
                if finding_added_count > 0:
                    messages.add_message(request,
                                         messages.SUCCESS,
                                         'A total of ' + message(finding_added_count, 'finding',
                                                                 'added') + ', that are new to scan.',
                                         extra_tags='alert-success')
                if reactivated_count > 0:
                    messages.add_message(request,
                                         messages.SUCCESS,
                                         'A total of ' + message(reactivated_count, 'finding',
                                                                 'reactivated') + ', that are back in scan results.',
                                         extra_tags='alert-success')
                if mitigated_count > 0:
                    messages.add_message(request,
                                         messages.SUCCESS,
                                         'A total of ' + message(mitigated_count, 'finding',
                                                                 'mitigated') + '. Please manually verify each one.',
                                         extra_tags='alert-success')

                create_notification(event='results_added', title='Results added', finding_count=finding_count, test=t, engagement=engagement, url=request.build_absolute_uri(reverse('view_test', args=(t.id,))))

                return HttpResponseRedirect(reverse('view_test', args=(t.id,)))
            except SyntaxError:
                messages.add_message(request,
                                     messages.ERROR,
                                     'There appears to be an error in the XML report, please check and try again.',
                                     extra_tags='alert-danger')

    add_breadcrumb(parent=t, title="Re-upload a %s" % scan_type, top_level=False, request=request)
    return render(request,
                  'dojo/import_scan_results.html',
                  {'form': form,
                   'eid': engagement.id,
                   'additional_message': additional_message,
                   })
예제 #22
0
    def save(self):
        data = self.validated_data
        test = data['test']
        scan_type = data['scan_type']
        min_sev = data['minimum_severity']
        scan_date = data['scan_date']
        verified = data['verified']
        active = data['active']

        try:
            parser = import_parser_factory(data['file'],
                                           test,
                                           active,
                                           verified,
                                           data['scan_type'],)
        except ValueError:
            raise Exception("Parser ValueError")

        try:
            items = parser.items
            original_items = list(test.finding_set.all())
            new_items = []
            mitigated_count = 0
            finding_count = 0
            finding_added_count = 0
            reactivated_count = 0

            for item in items:
                sev = item.severity
                if sev == 'Information' or sev == 'Informational':
                    sev = 'Info'

                if (Finding.SEVERITIES[sev] >
                        Finding.SEVERITIES[min_sev]):
                    continue

                if scan_type == 'Veracode Scan' or scan_type == 'Arachni Scan':
                    findings = Finding.objects.filter(
                        title=item.title,
                        test=test,
                        severity=sev,
                        numerical_severity=Finding.get_numerical_severity(sev),
                        description=item.description).all()
                else:
                    findings = Finding.objects.filter(
                        title=item.title,
                        test=test,
                        severity=sev,
                        numerical_severity=Finding.get_numerical_severity(sev)).all()

                if findings:
                    finding = findings[0]
                    if finding.mitigated:
                        finding.mitigated = None
                        finding.mitigated_by = None
                        finding.active = True
                        finding.verified = verified
                        finding.save()
                        note = Notes(
                            entry="Re-activated by %s re-upload." % scan_type,
                            author=self.context['request'].user)
                        note.save()
                        finding.notes.add(note)
                        reactivated_count += 1
                    new_items.append(finding)
                else:
                    item.test = test
                    item.date = scan_date
                    item.reporter = self.context['request'].user
                    item.last_reviewed = timezone.now()
                    item.last_reviewed_by = self.context['request'].user
                    item.verified = verified
                    item.active = active
                    item.save(dedupe_option=False)
                    finding_added_count += 1
                    new_items.append(item.id)
                    finding = item

                    if hasattr(item, 'unsaved_req_resp'):
                        for req_resp in item.unsaved_req_resp:
                            burp_rr = BurpRawRequestResponse(
                                finding=finding,
                                burpRequestBase64=req_resp['req'],
                                burpResponseBase64=req_resp['resp'])
                            burp_rr.clean()
                            burp_rr.save()

                    if item.unsaved_request and item.unsaved_response:
                        burp_rr = BurpRawRequestResponse(
                            finding=finding,
                            burpRequestBase64=item.unsaved_request,
                            burpResponseBase64=item.unsaved_response)
                        burp_rr.clean()
                        burp_rr.save()

                if finding:
                    finding_count += 1
                    for endpoint in item.unsaved_endpoints:
                        ep, created = Endpoint.objects.get_or_create(
                            protocol=endpoint.protocol,
                            host=endpoint.host,
                            path=endpoint.path,
                            query=endpoint.query,
                            fragment=endpoint.fragment,
                            product=test.engagement.product)
                        finding.endpoints.add(ep)

                    if item.unsaved_tags:
                        finding.tags = item.unsaved_tags

                    finding.save()

            to_mitigate = set(original_items) - set(new_items)
            for finding in to_mitigate:
                finding.mitigated = datetime.datetime.combine(
                    scan_date,
                    timezone.now().time())
                if settings.USE_TZ:
                    finding.mitigated = timezone.make_aware(
                        finding.mitigated,
                        timezone.get_default_timezone())
                finding.mitigated_by = self.context['request'].user
                finding.active = False
                finding.save()
                note = Notes(entry="Mitigated by %s re-upload." % scan_type,
                             author=self.context['request'].user)
                note.save()
                finding.notes.add(note)
                mitigated_count += 1

        except SyntaxError:
            raise Exception("Parser SyntaxError")

        return test
예제 #23
0
def webhook(request, secret=None):
    if not get_system_setting('enable_jira'):
        logger.debug('ignoring incoming webhook as JIRA is disabled.')
        raise Http404('JIRA disabled')
    elif not get_system_setting('enable_jira_web_hook'):
        logger.debug('ignoring incoming webhook as JIRA Webhook is disabled.')
        raise Http404('JIRA Webhook disabled')
    elif not get_system_setting('disable_jira_webhook_secret'):
        if not get_system_setting('jira_webhook_secret'):
            logger.warning(
                'ignoring incoming webhook as JIRA Webhook secret is empty in Defect Dojo system settings.'
            )
            raise PermissionDenied('JIRA Webhook secret cannot be empty')
        if secret != get_system_setting('jira_webhook_secret'):
            logger.warning('invalid secret provided to JIRA Webhook')
            raise PermissionDenied(
                'invalid or no secret provided to JIRA Webhook')

    # if webhook secret is disabled in system_settings, we ignore the incoming secret, even if it doesn't match

    # example json bodies at the end of this file

    if request.content_type != 'application/json':
        return HttpResponseBadRequest("only application/json supported")

    if request.method == 'POST':
        try:
            parsed = json.loads(request.body.decode('utf-8'))
            if parsed.get('webhookEvent') == 'jira:issue_updated':
                # xml examples at the end of file
                jid = parsed['issue']['id']
                jissue = get_object_or_404(JIRA_Issue, jira_id=jid)
                logging.info("Received issue update for {}".format(
                    jissue.jira_key))
                if jissue.finding:
                    finding = jissue.finding
                    jira_instance = jira_helper.get_jira_instance(finding)
                    resolved = True
                    resolution = parsed['issue']['fields']['resolution']

                    #         "resolution":{
                    #             "self":"http://www.testjira.com/rest/api/2/resolution/11",
                    #             "id":"11",
                    #             "description":"Cancelled by the customer.",
                    #             "name":"Cancelled"
                    #         },

                    # or
                    #         "resolution": null

                    if resolution is None:
                        resolved = False
                        logger.debug(
                            "JIRA resolution is None, therefore resolved is now False"
                        )
                    if finding.active is resolved:
                        if finding.active:
                            if jira_instance and resolution[
                                    'name'] in jira_instance.accepted_resolutions:
                                logger.debug(
                                    "Marking related finding of {} as accepted. Creating risk acceptance."
                                    .format(jissue.jira_key))
                                finding.active = False
                                finding.mitigated = None
                                finding.is_Mitigated = False
                                finding.false_p = False
                                assignee = parsed['issue']['fields'].get(
                                    'assignee')
                                assignee_name = assignee[
                                    'name'] if assignee else None
                                Risk_Acceptance.objects.create(
                                    accepted_by=assignee_name,
                                    owner=finding.reporter,
                                ).accepted_findings.set([finding])
                            elif jira_instance and resolution[
                                    'name'] in jira_instance.false_positive_resolutions:
                                logger.debug(
                                    "Marking related finding of {} as false-positive"
                                    .format(jissue.jira_key))
                                finding.active = False
                                finding.verified = False
                                finding.mitigated = None
                                finding.is_Mitigated = False
                                finding.false_p = True
                                ra_helper.remove_from_any_risk_acceptance(
                                    finding)
                            else:
                                # Mitigated by default as before
                                logger.debug(
                                    "Marking related finding of {} as mitigated (default)"
                                    .format(jissue.jira_key))
                                now = timezone.now()
                                finding.active = False
                                finding.mitigated = now
                                finding.is_Mitigated = True
                                finding.endpoints.clear()
                                finding.false_p = False
                                ra_helper.remove_from_any_risk_acceptance(
                                    finding)
                        else:
                            # Reopen / Open Jira issue
                            logger.debug(
                                "Re-opening related finding of {}".format(
                                    jissue.jira_key))
                            finding.active = True
                            finding.mitigated = None
                            finding.is_Mitigated = False
                            finding.false_p = False
                            ra_helper.remove_from_any_risk_acceptance(finding)
                    else:
                        # Reopen / Open Jira issue
                        finding.active = True
                        finding.mitigated = None
                        finding.is_Mitigated = False
                        finding.false_p = False
                        ra_helper.remove_from_any_risk_acceptance(finding)

                    finding.jira_issue.jira_change = timezone.now()
                    finding.jira_issue.save()
                    finding.save()

                elif jissue.engagement:
                    # if parsed['issue']['fields']['resolution'] != None:
                    #     eng.active = False
                    #     eng.status = 'Completed'
                    #     eng.save()
                    return HttpResponse('Update for engagement ignored')
                else:
                    raise Http404(
                        'No finding or engagement found for JIRA issue {}'.
                        format(jissue.jira_key))

            if parsed.get('webhookEvent') == 'comment_created':
                """
                    example incoming requests from JIRA Server 8.14.0
                    {
                    "timestamp":1610269967824,
                    "webhookEvent":"comment_created",
                    "comment":{
                        "self":"https://jira.host.com/rest/api/2/issue/115254/comment/466578",
                        "id":"466578",
                        "author":{
                            "self":"https://jira.host.com/rest/api/2/user?username=defect.dojo",
                            "name":"defect.dojo",
                            "key":"defect.dojo", # seems to be only present on JIRA Server, not on Cloud
                            "avatarUrls":{
                                "48x48":"https://www.gravatar.com/avatar/9637bfb970eff6176357df615f548f1c?d=mm&s=48",
                                "24x24":"https://www.gravatar.com/avatar/9637bfb970eff6176357df615f548f1c?d=mm&s=24",
                                "16x16":"https://www.gravatar.com/avatar9637bfb970eff6176357df615f548f1c?d=mm&s=16",
                                "32x32":"https://www.gravatar.com/avatar/9637bfb970eff6176357df615f548f1c?d=mm&s=32"
                            },
                            "displayName":"Defect Dojo",
                            "active":true,
                            "timeZone":"Europe/Amsterdam"
                        },
                        "body":"(Valentijn Scholten):test4",
                        "updateAuthor":{
                            "self":"https://jira.host.com/rest/api/2/user?username=defect.dojo",
                            "name":"defect.dojo",
                            "key":"defect.dojo",
                            "avatarUrls":{
                                "48x48":"https://www.gravatar.com/avatar/9637bfb970eff6176357df615f548f1c?d=mm&s=48",
                                "24x24""https://www.gravatar.com/avatar/9637bfb970eff6176357df615f548f1c?d=mm&s=24",
                                "16x16":"https://www.gravatar.com/avatar/9637bfb970eff6176357df615f548f1c?d=mm&s=16",
                                "32x32":"https://www.gravatar.com/avatar/9637bfb970eff6176357df615f548f1c?d=mm&s=32"
                            },
                            "displayName":"Defect Dojo",
                            "active":true,
                            "timeZone":"Europe/Amsterdam"
                        },
                        "created":"2021-01-10T10:12:47.824+0100",
                        "updated":"2021-01-10T10:12:47.824+0100"
                    }
                    }
                """

                comment_text = parsed['comment']['body']
                commentor = ''
                if 'name' in parsed['comment']['updateAuthor']:
                    commentor = parsed['comment']['updateAuthor']['name']
                elif 'emailAddress' in parsed['comment']['updateAuthor']:
                    commentor = parsed['comment']['updateAuthor'][
                        'emailAddress']
                else:
                    logger.debug(
                        'Could not find the author of this jira comment!')
                commentor_display_name = parsed['comment']['updateAuthor'][
                    'displayName']
                # example: body['comment']['self'] = "http://www.testjira.com/jira_under_a_path/rest/api/2/issue/666/comment/456843"
                jid = parsed['comment']['self'].split('/')[-3]
                jissue = get_object_or_404(JIRA_Issue, jira_id=jid)
                logging.info("Received issue comment for {}".format(
                    jissue.jira_key))
                logger.debug('jissue: %s', vars(jissue))
                if jissue.finding:
                    # logger.debug('finding: %s', vars(jissue.finding))
                    jira_usernames = JIRA_Instance.objects.values_list(
                        'username', flat=True)
                    for jira_userid in jira_usernames:
                        # logger.debug('incoming username: %s jira config username: %s', commentor.lower(), jira_userid.lower())
                        if jira_userid.lower() == commentor.lower():
                            logger.debug(
                                'skipping incoming JIRA comment as the user id of the comment in JIRA (%s) matches the JIRA username in DefectDojo (%s)',
                                commentor.lower(), jira_userid.lower())
                            return HttpResponse('')
                            break
                    finding = jissue.finding
                    new_note = Notes()
                    new_note.entry = '(%s (%s)): %s' % (
                        commentor_display_name, commentor, comment_text)
                    new_note.author, created = User.objects.get_or_create(
                        username='******')
                    new_note.save()
                    finding.notes.add(new_note)
                    finding.jira_issue.jira_change = timezone.now()
                    finding.jira_issue.save()
                    finding.save()
                    create_notification(
                        event='other',
                        title='JIRA incoming comment - %s' % (jissue.finding),
                        url=reverse("view_finding",
                                    args=(jissue.finding.id, )),
                        icon='check')
                elif jissue.engagement:
                    return HttpResponse('Comment for engagement ignored')
                else:
                    raise Http404(
                        'No finding or engagement found for JIRA issue {}'.
                        format(jissue.jira_key))

            if parsed.get('webhookEvent') not in [
                    'comment_created', 'jira:issue_updated'
            ]:
                logger.info(
                    'Unrecognized JIRA webhook event received: {}'.format(
                        parsed.get('webhookEvent')))
        except Exception as e:
            if isinstance(e, Http404):
                logger.warning('404 error processing JIRA webhook')
            else:
                logger.exception(e)

            try:
                logger.debug('jira_webhook_body_parsed:')
                logger.debug(json.dumps(parsed, indent=4))
            except:
                logger.debug('jira_webhook_body:')
                logger.debug(request.body.decode('utf-8'))

            # reraise to make sure we don't silently swallow things
            raise
    return HttpResponse('')
예제 #24
0
def re_import_scan_results(request, tid):
    additional_message = "When re-uploading a scan, any findings not found in original scan will be updated as " \
                         "mitigated.  The process attempts to identify the differences, however manual verification " \
                         "is highly recommended."
    test = get_object_or_404(Test, id=tid)
    scan_type = test.test_type.name
    engagement = test.engagement
    form = ReImportScanForm()
    jform = None
    push_all_jira_issues = False

    # Decide if we need to present the Push to JIRA form
    if get_system_setting(
            'enable_jira') and engagement.product.jira_pkey_set.first(
            ) is not None:
        push_all_jira_issues = engagement.product.jira_pkey_set.first(
        ).push_all_issues
        jform = JIRAImportScanForm(push_all=push_all_jira_issues,
                                   prefix='jiraform')

    form.initial['tags'] = [tag.name for tag in test.tags]
    if request.method == "POST":
        form = ReImportScanForm(request.POST, request.FILES)
        if form.is_valid():
            scan_date = form.cleaned_data['scan_date']

            scan_date_time = datetime.combine(scan_date, timezone.now().time())
            if settings.USE_TZ:
                scan_date_time = timezone.make_aware(
                    scan_date_time, timezone.get_default_timezone())

            min_sev = form.cleaned_data['minimum_severity']
            file = request.FILES.get('file', None)
            scan_type = test.test_type.name
            active = form.cleaned_data['active']
            verified = form.cleaned_data['verified']
            tags = request.POST.getlist('tags')
            ts = ", ".join(tags)
            test.tags = ts
            if file and is_scan_file_too_large(file):
                messages.add_message(
                    request,
                    messages.ERROR,
                    "Report file is too large. Maximum supported size is {} MB"
                    .format(settings.SCAN_FILE_MAX_SIZE),
                    extra_tags='alert-danger')
                return HttpResponseRedirect(
                    reverse('re_import_scan_results', args=(test.id, )))

            try:
                parser = import_parser_factory(file, test, active, verified)
            except ValueError:
                raise Http404()
            except Exception as e:
                messages.add_message(
                    request,
                    messages.ERROR,
                    "An error has occurred in the parser, please see error "
                    "log for details.",
                    extra_tags='alert-danger')
                parse_logger.exception(e)
                parse_logger.error("Error in parser: {}".format(str(e)))
                return HttpResponseRedirect(
                    reverse('re_import_scan_results', args=(test.id, )))

            try:
                items = parser.items
                original_items = test.finding_set.all().values_list("id",
                                                                    flat=True)
                new_items = []
                mitigated_count = 0
                finding_count = 0
                finding_added_count = 0
                reactivated_count = 0
                # Push to Jira?

                push_to_jira = False
                if push_all_jira_issues:
                    push_to_jira = True
                elif 'jiraform-push_to_jira' in request.POST:
                    jform = JIRAImportScanForm(request.POST,
                                               prefix='jiraform',
                                               push_all=push_all_jira_issues)
                    if jform.is_valid():
                        push_to_jira = jform.cleaned_data.get('push_to_jira')
                for item in items:

                    sev = item.severity
                    if sev == 'Information' or sev == 'Informational':
                        sev = 'Info'
                        item.severity = sev

                    # existing findings may be from before we had component_name/version fields
                    component_name = item.component_name if hasattr(
                        item, 'component_name') else None
                    component_version = item.component_version if hasattr(
                        item, 'component_version') else None

                    # If it doesn't clear minimum severity, move on
                    if Finding.SEVERITIES[sev] > Finding.SEVERITIES[min_sev]:
                        continue

                    # Try to find the existing finding
                    # If it's Veracode or Arachni, then we consider the description for some
                    # reason...
                    from titlecase import titlecase
                    item.title = titlecase(item.title)
                    if scan_type == 'Veracode Scan' or scan_type == 'Arachni Scan':
                        finding = Finding.objects.filter(
                            title=item.title,
                            test__id=test.id,
                            severity=sev,
                            numerical_severity=Finding.get_numerical_severity(
                                sev),
                            description=item.description)

                    else:
                        finding = Finding.objects.filter(
                            title=item.title,
                            test__id=test.id,
                            severity=sev,
                            numerical_severity=Finding.get_numerical_severity(
                                sev))

                    if len(finding) == 1:
                        finding = finding[0]
                        if finding.mitigated or finding.is_Mitigated:
                            # it was once fixed, but now back
                            finding.mitigated = None
                            finding.is_Mitigated = False
                            finding.mitigated_by = None
                            finding.active = True
                            finding.verified = verified

                            # existing findings may be from before we had component_name/version fields
                            finding.component_name = finding.component_name if finding.component_name else component_name
                            finding.component_version = finding.component_version if finding.component_version else component_version

                            finding.save()
                            note = Notes(
                                entry="Re-activated by %s re-upload." %
                                scan_type,
                                author=request.user)
                            note.save()
                            finding.notes.add(note)

                            endpoint_status = finding.endpoint_status.all()
                            for status in endpoint_status:
                                status.mitigated_by = None
                                status.mitigated_time = None
                                status.mitigated = False
                                status.last_modified = timezone.now()
                                status.save()

                            reactivated_count += 1
                        else:
                            # existing findings may be from before we had component_name/version fields
                            if not finding.component_name or not finding.component_version:
                                finding.component_name = finding.component_name if finding.component_name else component_name
                                finding.component_version = finding.component_version if finding.component_version else component_version
                                finding.save(dedupe_option=False,
                                             push_to_jira=False)

                        new_items.append(finding.id)
                    else:
                        item.test = test
                        if item.date == timezone.now().date():
                            item.date = test.target_start.date()
                        item.reporter = request.user
                        item.last_reviewed = timezone.now()
                        item.last_reviewed_by = request.user
                        item.verified = verified
                        item.active = active

                        # Save it
                        item.save(dedupe_option=False)
                        finding_added_count += 1
                        # Add it to the new items
                        new_items.append(item.id)
                        finding = item

                        if hasattr(item, 'unsaved_req_resp') and len(
                                item.unsaved_req_resp) > 0:
                            for req_resp in item.unsaved_req_resp:
                                if scan_type == "Arachni Scan":
                                    burp_rr = BurpRawRequestResponse(
                                        finding=item,
                                        burpRequestBase64=req_resp["req"],
                                        burpResponseBase64=req_resp["resp"],
                                    )
                                else:
                                    burp_rr = BurpRawRequestResponse(
                                        finding=item,
                                        burpRequestBase64=base64.b64encode(
                                            req_resp["req"].encode("utf-8")),
                                        burpResponseBase64=base64.b64encode(
                                            req_resp["resp"].encode("utf-8")),
                                    )
                                burp_rr.clean()
                                burp_rr.save()

                        if item.unsaved_request is not None and item.unsaved_response is not None:
                            burp_rr = BurpRawRequestResponse(
                                finding=finding,
                                burpRequestBase64=base64.b64encode(
                                    item.unsaved_request.encode()),
                                burpResponseBase64=base64.b64encode(
                                    item.unsaved_response.encode()),
                            )
                            burp_rr.clean()
                            burp_rr.save()
                    if finding:
                        finding_count += 1
                        for endpoint in item.unsaved_endpoints:
                            ep, created = Endpoint.objects.get_or_create(
                                protocol=endpoint.protocol,
                                host=endpoint.host,
                                path=endpoint.path,
                                query=endpoint.query,
                                fragment=endpoint.fragment,
                                product=test.engagement.product)
                            eps, created = Endpoint_Status.objects.get_or_create(
                                finding=finding, endpoint=ep)
                            ep.endpoint_status.add(eps)

                            finding.endpoints.add(ep)
                            finding.endpoint_status.add(eps)
                        for endpoint in form.cleaned_data['endpoints']:
                            ep, created = Endpoint.objects.get_or_create(
                                protocol=endpoint.protocol,
                                host=endpoint.host,
                                path=endpoint.path,
                                query=endpoint.query,
                                fragment=endpoint.fragment,
                                product=test.engagement.product)
                            eps, created = Endpoint_Status.objects.get_or_create(
                                finding=finding, endpoint=ep)
                            ep.endpoint_status.add(eps)

                            finding.endpoints.add(ep)
                            finding.endpoint_status.add(eps)
                        if item.unsaved_tags is not None:
                            finding.tags = item.unsaved_tags

                    # Save it. This may be the second time we save it in this function.
                    finding.save(push_to_jira=push_to_jira)
                # calculate the difference
                to_mitigate = set(original_items) - set(new_items)
                for finding_id in to_mitigate:
                    finding = Finding.objects.get(id=finding_id)
                    if not finding.mitigated or not finding.is_Mitigated:
                        finding.mitigated = scan_date_time
                        finding.is_Mitigated = True
                        finding.mitigated_by = request.user
                        finding.active = False
                        finding.save()
                        note = Notes(entry="Mitigated by %s re-upload." %
                                     scan_type,
                                     author=request.user)
                        note.save()
                        finding.notes.add(note)
                        mitigated_count += 1

                        endpoint_status = finding.endpoint_status.all()
                        for status in endpoint_status:
                            status.mitigated_by = request.user
                            status.mitigated_time = timezone.now()
                            status.mitigated = True
                            status.last_modified = timezone.now()
                            status.save()

                test.updated = max_safe([scan_date_time, test.updated])
                test.engagement.updated = max_safe(
                    [scan_date_time, test.engagement.updated])

                test.save()
                test.engagement.save()

                messages.add_message(
                    request,
                    messages.SUCCESS,
                    '%s processed, a total of ' % scan_type +
                    message(finding_count, 'finding', 'processed'),
                    extra_tags='alert-success')
                if finding_added_count > 0:
                    messages.add_message(
                        request,
                        messages.SUCCESS,
                        'A total of ' +
                        message(finding_added_count, 'finding', 'added') +
                        ', that are new to scan.',
                        extra_tags='alert-success')
                if reactivated_count > 0:
                    messages.add_message(
                        request,
                        messages.SUCCESS,
                        'A total of ' +
                        message(reactivated_count, 'finding', 'reactivated') +
                        ', that are back in scan results.',
                        extra_tags='alert-success')
                if mitigated_count > 0:
                    messages.add_message(
                        request,
                        messages.SUCCESS,
                        'A total of ' +
                        message(mitigated_count, 'finding', 'mitigated') +
                        '. Please manually verify each one.',
                        extra_tags='alert-success')

                create_notification(event='scan_added',
                                    title=str(finding_count) +
                                    " findings for " +
                                    test.engagement.product.name,
                                    finding_count=finding_count,
                                    test=test,
                                    engagement=test.engagement,
                                    url=reverse('view_test', args=(test.id, )))

                return HttpResponseRedirect(
                    reverse('view_test', args=(test.id, )))
            except SyntaxError:
                messages.add_message(
                    request,
                    messages.ERROR,
                    'There appears to be an error in the XML report, please check and try again.',
                    extra_tags='alert-danger')

    product_tab = Product_Tab(engagement.product.id,
                              title="Re-upload a %s" % scan_type,
                              tab="engagements")
    product_tab.setEngagement(engagement)
    form.fields['endpoints'].queryset = Endpoint.objects.filter(
        product__id=product_tab.product.id)
    return render(
        request, 'dojo/import_scan_results.html', {
            'form': form,
            'product_tab': product_tab,
            'eid': engagement.id,
            'additional_message': additional_message,
            'jform': jform,
        })
예제 #25
0
def handle_uploaded_cvff(request, f):
    output = StringIO.StringIO()
    for chunk in f.chunks():
        output.write(chunk)

    csvString = output.getvalue().splitlines(True)[1:]

    inputCSV = csv.reader(csvString, quoting=csv.QUOTE_NONNUMERIC)
    logger.error('Before moving into loop')

    isHeader = 1
    indexOfResolution = 0
    for row in inputCSV:
        if isHeader == 1:
            for col in row:
                if str(col) == "WSO2_resolution":
                    isHeader = 0
                    break
                indexOfResolution = indexOfResolution + 1
        try:
            finding = Finding.objects.filter(pk=float(row[0]))[0]
            logger.error('Finding note count for id ' + str(row[0]) +
                         ' is : ' + str(finding.notes.count()))
            status = str(row[indexOfResolution]).strip().split("(")[0]
            if finding.notes.count() == 0:
                note = Notes(entry="[ " + status + " ] ~ " +
                             row[indexOfResolution + 2],
                             author=request.user)
                note.save()
                finding.notes.add(note)
                logger.info('Adding new note')
            else:
                note = finding.notes.all()[0]
                note.entry = "[ " + status + " ] ~ " + row[indexOfResolution +
                                                           2]
                note.author = request.user
                note.save()
                logger.info('Updating existing note' + str(note.id))

            status = status.replace('.', '').replace(',',
                                                     '').replace(' ',
                                                                 '').lower()

            finding.false_p = False
            finding.verified = False
            finding.active = False
            finding.out_of_scope = False
            finding.save()

            if status == 'falsepositive':
                finding.false_p = True
                finding.save()
            elif status == 'notathreat':
                finding.verified = True
                finding.save()
            elif status == 'needtobefixed':
                finding.active = True
                finding.save()
            elif status == 'needtofix':
                finding.active = True
                finding.save()
            elif status == 'truepositive':
                finding.active = True
                finding.save()
            elif status == 'alreadymitigated':
                finding.out_of_scope = True
                finding.save()
            elif status == 'notapplicable':
                finding.under_review = True
                finding.save()
            #elif status == 'cannotreproduce':
            #    finding.under_review = True
            #    finding.save()
            else:
                logger.error('Unknown status for : ' + str(row[0]) +
                             ". Status is : " + status)
        except Exception as e:
            logger.error(e.message)
            logger.error('Error in processing row: ' + str(row[0]) +
                         ". Skipping.")
예제 #26
0
    def obj_create(self, bundle, **kwargs):
        bundle.obj = ImportScanObject(initial=kwargs)
        self.is_valid(bundle)
        if bundle.errors:
            raise ImmediateHttpResponse(response=self.error_response(bundle.request, bundle.errors))
        bundle = self.full_hydrate(bundle)

        test = bundle.obj.__getattr__('test_obj')
        scan_type = bundle.obj.__getattr__('scan_type')
        min_sev = bundle.obj.__getattr__('minimum_severity')
        scan_date = bundle.obj.__getattr__('scan_date')
        verified = bundle.obj.__getattr__('verified')
        active = bundle.obj.__getattr__('active')

        try:
            parser = import_parser_factory(bundle.data['file'], test)
        except ValueError:
            raise NotFound("Parser ValueError")

        try:
            items = parser.items
            original_items = test.finding_set.all().values_list("id", flat=True)
            new_items = []
            mitigated_count = 0
            finding_count = 0
            finding_added_count = 0
            reactivated_count = 0
            for item in items:
                sev = item.severity
                if sev == 'Information' or sev == 'Informational':
                    sev = 'Info'

                if Finding.SEVERITIES[sev] > Finding.SEVERITIES[min_sev]:
                    continue

                if scan_type == 'Veracode Scan' or scan_type == 'Arachni Scan':
                    find = Finding.objects.filter(title=item.title,
                                                  test__id=test.id,
                                                  severity=sev,
                                                  numerical_severity=Finding.get_numerical_severity(sev),
                                                  description=item.description
                                                  )
                else:
                    find = Finding.objects.filter(title=item.title,
                                                  test__id=test.id,
                                                  severity=sev,
                                                  numerical_severity=Finding.get_numerical_severity(sev),
                                                  )

                if len(find) == 1:
                    find = find[0]
                    if find.mitigated:
                        # it was once fixed, but now back
                        find.mitigated = None
                        find.mitigated_by = None
                        find.active = True
                        find.verified = verified
                        find.save()
                        note = Notes(entry="Re-activated by %s re-upload." % scan_type,
                                     author=bundle.request.user)
                        note.save()
                        find.notes.add(note)
                        reactivated_count += 1
                    new_items.append(find.id)
                else:
                    item.test = test
                    item.date = test.target_start
                    item.reporter = bundle.request.user
                    item.last_reviewed = timezone.now()
                    item.last_reviewed_by = bundle.request.user
                    item.verified = verified
                    item.active = active
                    item.save()
                    finding_added_count += 1
                    new_items.append(item.id)
                    find = item

                    if hasattr(item, 'unsaved_req_resp') and len(item.unsaved_req_resp) > 0:
                        for req_resp in item.unsaved_req_resp:
                            burp_rr = BurpRawRequestResponse(finding=find,
                                                             burpRequestBase64=req_resp["req"],
                                                             burpResponseBase64=req_resp["resp"],
                                                             )
                            burp_rr.clean()
                            burp_rr.save()

                    if item.unsaved_request is not None and item.unsaved_response is not None:
                        burp_rr = BurpRawRequestResponse(finding=find,
                                                         burpRequestBase64=item.unsaved_request,
                                                         burpResponseBase64=item.unsaved_response,
                                                         )
                        burp_rr.clean()
                        burp_rr.save()
                if find:
                    finding_count += 1
                    for endpoint in item.unsaved_endpoints:
                        ep, created = Endpoint.objects.get_or_create(protocol=endpoint.protocol,
                                                                     host=endpoint.host,
                                                                     path=endpoint.path,
                                                                     query=endpoint.query,
                                                                     fragment=endpoint.fragment,
                                                                     product=test.engagement.product)
                        find.endpoints.add(ep)

                    if item.unsaved_tags is not None:
                        find.tags = item.unsaved_tags
            # calculate the difference
            to_mitigate = set(original_items) - set(new_items)
            for finding_id in to_mitigate:
                finding = Finding.objects.get(id=finding_id)
                finding.mitigated = datetime.combine(scan_date, timezone.now().time())
                finding.mitigated_by = bundle.request.user
                finding.active = False
                finding.save()
                note = Notes(entry="Mitigated by %s re-upload." % scan_type,
                             author=bundle.request.user)
                note.save()
                finding.notes.add(note)
                mitigated_count += 1

        except SyntaxError:
            raise NotFound("Parser SyntaxError")

        # Everything executed fine. We successfully imported the scan.
        raise ImmediateHttpResponse(HttpCreated(location = bundle.obj.__getattr__('test')))
예제 #27
0
    def __init__(self, filename, test):
        dupes = dict()
        use_case_notes_dict = dict()
        vul_influence_notes_dict = dict()
        wso2_resolution_notes_dict = dict()
        resolution_notes_dict = dict()

        self.items = ()
        self.use_case_notes = ()
        self.vul_influence_notes = ()
        self.wso2_resolution_notes = ()
        self.resolution_notes = ()

        if filename is None:
            self.items = ()
            return

        df = pd.read_csv(filename, header=0)

        for i, row in df.iterrows():
            cwe = df.loc[i, 'cwe']
            title = df.loc[i, 'title']
            description = df.loc[i, 'description']
            sev = df.loc[i, 'severity']
            line = df.loc[i, 'line_number']
            issue_id = df.loc[i, 'issue_id']
            use_case_note = df.loc[i, 'Use_Case']
            vul_influence_note = df.loc[i, 'Vulnerability_Influence']
            resolution_note = df.loc[i, 'Resolution']
            sourcefilepath = df.loc[i, 'sourcefilepath']
            sourcefile = df.loc[i, 'sourcefile']
            mitigation = df.loc[i, 'mitigation']
            impact = df.loc[i, 'impact']
            WSO2_resolution = df.loc[i, 'WSO2_resolution']

            # dupe_key = hashlib.md5(str(cwe).encode('utf-8') + title.encode('utf-8')).hexdigest()
            # try:
            dupe_key = sev + str(cwe) + str(line) + str(sourcefile) + str(sourcefilepath) + str(title) + str(issue_id)
            # except:
            #     dupe_key = sev + flaw.attrib['cweid'] + flaw.attrib['module'] + flaw.attrib['type'] + flaw.attrib[
            #         'issueid']
            if dupe_key in dupes:
                finding = dupes[dupe_key]
                if finding.description:
                    finding.description = finding.description + "\nVulnerability ID: " + \
                                          df.loc[i, 'mitigation']
                dupes[dupe_key] = finding
            else:
                dupes[dupe_key] = True

                finding = Finding(title=title,
                                  cwe=int(cwe),
                                  test=test,
                                  active=False,
                                  verified=False,
                                  severity=sev,
                                  static_finding=True,
                                  line_number=line,
                                  file_path=sourcefilepath+sourcefile,
                                  line=line,
                                  sourcefile=sourcefile,
                                  description=description,
                                  numerical_severity=Finding.
                                  get_numerical_severity(sev),
                                  mitigation=mitigation,
                                  impact=impact,
                                  url='N/A')

            use_case_note = Notes(entry=use_case_note, note_type=Note_Type(id=2), author=User.objects.all().first())
            vul_influence_note = Notes(entry=vul_influence_note, note_type=Note_Type(id=3), author=User.objects.all().first())
            wso2_resolution_note = Notes(entry=WSO2_resolution, note_type=Note_Type(id=1), author=User.objects.all().first())
            resolution_note = Notes(entry=resolution_note, note_type=Note_Type(id=4), author=User.objects.all().first())

            use_case_note.save()
            vul_influence_note.save()
            wso2_resolution_note.save()
            resolution_note.save()

            dupes[dupe_key] = finding
            use_case_notes_dict[dupe_key] = use_case_note
            vul_influence_notes_dict[dupe_key] = vul_influence_note
            wso2_resolution_notes_dict[dupe_key] = wso2_resolution_note
            resolution_notes_dict[dupe_key] = resolution_note

        self.items = list(dupes.values())
        self.use_case_notes = list(use_case_notes_dict.values())
        self.vul_influence_notes = list(vul_influence_notes_dict.values())
        self.wso2_resolution_notes = list(wso2_resolution_notes_dict.values())
        self.resolution_notes = list(resolution_notes_dict.values())
예제 #28
0
def sync_findings(request, tid, spreadsheetId):
    test = Test.objects.get(id=tid)
    system_settings = get_object_or_404(System_Settings, id=1)
    service_account_info = json.loads(system_settings.credentials)
    SCOPES = ['https://www.googleapis.com/auth/spreadsheets']
    credentials = service_account.Credentials.from_service_account_info(
        service_account_info, scopes=SCOPES)
    sheets_service = googleapiclient.discovery.build('sheets',
                                                     'v4',
                                                     credentials=credentials,
                                                     cache_discovery=False)
    res = {}
    spreadsheet = sheets_service.spreadsheets().get(
        spreadsheetId=spreadsheetId).execute()
    sheet_names = []
    for sheet in spreadsheet['sheets']:
        date = (sheet['properties']['title'])
        try:
            date = datetime.datetime.strptime(date, "%Y-%m-%d %H:%M:%S")
            sheet_names.append(date)
        except:
            pass
    try:
        sheet_title = str(max(sheet_names))
    except:
        raise Exception(
            'Existing Google Spreadsheet has errors. Delete the speadsheet and export again.'
        )
    res['sheet_title'] = sheet_title

    result = sheets_service.spreadsheets().values().get(
        spreadsheetId=spreadsheetId, range=sheet_title).execute()
    rows = result.get('values', [])
    header_raw = rows[0]
    findings_sheet = rows[1:]
    findings_db = Finding.objects.filter(
        test=test).order_by('numerical_severity')
    column_details = json.loads(system_settings.column_widths.replace(
        "'", '"'))
    active_note_types = Note_Type.objects.filter(is_active=True)
    note_type_activation = len(active_note_types)

    errors = []
    index_of_active = header_raw.index('active')
    index_of_verified = header_raw.index('verified')
    index_of_duplicate = header_raw.index('duplicate')
    index_of_false_p = header_raw.index('false_p')
    index_of_id = header_raw.index('id')

    for finding_sheet in findings_sheet:
        finding_id = finding_sheet[index_of_id]
        active = finding_sheet[index_of_active]
        verified = finding_sheet[index_of_verified]
        duplicate = finding_sheet[index_of_duplicate]
        false_p = finding_sheet[index_of_false_p]

        if (active == 'TRUE' or verified == 'TRUE'
            ) and duplicate == 'TRUE':  # Check update finding conditions
            error = 'Duplicate findings cannot be verified or active'
            errors.append({
                'finding_id': finding_id,
                'column_names': 'active, verified, duplicate',
                'error': error
            })
        elif false_p == 'TRUE' and verified == 'TRUE':
            error = 'False positive findings cannot be verified.'
            errors.append({
                'finding_id': finding_id,
                'column_names': 'false_p, verified',
                'error': error
            })
        else:
            try:
                finding_db = findings_db.get(
                    id=finding_id)  # Update finding attributes
            except:
                if finding_id is None:
                    finding_id = 'Null'
                error = 'Finding does not belong to the Test'
                errors.append({
                    'finding_id': finding_id,
                    'column_names': 'id',
                    'error': error
                })
            else:
                finding_notes = finding_db.notes.all()
                for column_name in header_raw:
                    if column_name in column_details:
                        if int(column_details[column_name][1]) == 0:
                            index_of_column = header_raw.index(column_name)
                            if finding_sheet[index_of_column] == 'TRUE':
                                setattr(finding_db, column_name, True)
                            elif finding_sheet[index_of_column] == 'FALSE':
                                setattr(finding_db, column_name, False)
                            else:
                                if finding_sheet[index_of_column] == '':
                                    setattr(finding_db, column_name, None)
                                else:
                                    setattr(finding_db, column_name,
                                            finding_sheet[index_of_column])
                    elif column_name[:6] == '[note]' and column_name[
                            -3:] == '_id':  # Updating notes
                        note_column_name = column_name[:-3]
                        try:
                            index_of_note_column = header_raw.index(
                                note_column_name)
                        except ValueError:
                            pass
                        else:
                            index_of_id_column = header_raw.index(column_name)
                            note_id = finding_sheet[index_of_id_column]
                            note_entry = finding_sheet[
                                index_of_note_column].rstrip()
                            if note_entry != '':
                                if note_id != '':  # If the note is an existing one
                                    note_db = finding_notes.get(id=note_id)
                                    if note_entry != note_db.entry.rstrip():
                                        note_db.entry = note_entry
                                        note_db.edited = True
                                        note_db.editor = request.user
                                        note_db.edit_time = timezone.now()
                                        history = NoteHistory(
                                            data=note_db.entry,
                                            time=note_db.edit_time,
                                            current_editor=note_db.editor)
                                        history.save()
                                        note_db.history.add(history)
                                        note_db.save()
                                else:  # If the note is a newly added one
                                    if note_type_activation:
                                        if note_column_name[7:12] == 'Note_':
                                            error = 'Can not add new notes without a note-type. Add your note under the correct note-type column'
                                            errors.append({
                                                'finding_id': finding_id,
                                                'column_names':
                                                note_column_name,
                                                'error': error
                                            })
                                        else:
                                            note_type_name = note_column_name[
                                                7:][:-2]
                                            try:
                                                note_type = active_note_types.get(
                                                    name=note_type_name)
                                            except:
                                                try:
                                                    note_type = Note_Type.objects.get(
                                                        name=note_type_name)
                                                except:
                                                    pass
                                                else:
                                                    error = '"' + note_type_name + '" Note-type is disabled. Cannot add new notes of "' + note_type_name + '" type'
                                                    errors.append({
                                                        'finding_id':
                                                        finding_id,
                                                        'column_names':
                                                        note_column_name,
                                                        'error':
                                                        error
                                                    })
                                            else:
                                                new_note = Notes(
                                                    note_type=note_type,
                                                    entry=note_entry,
                                                    date=timezone.now(),
                                                    author=request.user)
                                                new_note.save()
                                                history = NoteHistory(
                                                    data=new_note.entry,
                                                    time=new_note.date,
                                                    current_editor=new_note.
                                                    author,
                                                    note_type=new_note.
                                                    note_type)
                                                history.save()
                                                new_note.history.add(history)
                                                finding_db.notes.add(new_note)
                                    else:
                                        if note_column_name[7:12] == 'Note_':
                                            new_note = Notes(
                                                entry=note_entry,
                                                date=timezone.now(),
                                                author=request.user)
                                            new_note.save()
                                            history = NoteHistory(
                                                data=new_note.entry,
                                                time=new_note.date,
                                                current_editor=new_note.author)
                                            history.save()
                                            new_note.history.add(history)
                                            finding_db.notes.add(new_note)
                                        else:
                                            error_location = finding_id + ' ' + note_column_name
                                            error = 'Note-types are not enabled. Notes cannot have a note-type.'
                                            errors.append({
                                                'finding_id': finding_id,
                                                'column_names':
                                                note_column_name,
                                                'error': error
                                            })
                finding_db.save()
    res['errors'] = errors
    populate_sheet(tid, spreadsheetId)
    return res
예제 #29
0
    def process_parsed_findings(self,
                                test,
                                parsed_findings,
                                scan_type,
                                user,
                                active,
                                verified,
                                minimum_severity=None,
                                endpoints_to_add=None,
                                push_to_jira=None,
                                now=timezone.now()):

        items = parsed_findings
        original_items = list(test.finding_set.all())
        new_items = []
        mitigated_count = 0
        finding_count = 0
        finding_added_count = 0
        reactivated_count = 0
        reactivated_items = []
        unchanged_count = 0
        unchanged_items = []

        logger.debug('starting reimport of %i items.',
                     len(items) if items else 0)
        from dojo.importers.reimporter.utils import get_deduplication_algorithm_from_conf, match_new_finding_to_existing_finding, update_endpoint_status
        deduplication_algorithm = get_deduplication_algorithm_from_conf(
            scan_type)

        i = 0
        logger.debug(
            'STEP 1: looping over findings from the reimported report and trying to match them to existing findings'
        )
        deduplicationLogger.debug(
            'Algorithm used for matching new findings to existing findings: %s',
            deduplication_algorithm)
        for item in items:
            sev = item.severity
            if sev == 'Information' or sev == 'Informational':
                sev = 'Info'

            item.severity = sev
            item.numerical_severity = Finding.get_numerical_severity(sev)

            if (Finding.SEVERITIES[sev] >
                    Finding.SEVERITIES[minimum_severity]):
                # finding's severity is below the configured threshold : ignoring the finding
                continue

            # existing findings may be from before we had component_name/version fields
            component_name = item.component_name if hasattr(
                item, 'component_name') else None
            component_version = item.component_version if hasattr(
                item, 'component_version') else None

            if not hasattr(item, 'test'):
                item.test = test

            item.hash_code = item.compute_hash_code()
            deduplicationLogger.debug("item's hash_code: %s", item.hash_code)

            findings = match_new_finding_to_existing_finding(
                item, test, deduplication_algorithm, scan_type)

            deduplicationLogger.debug(
                'found %i findings matching with current new finding',
                len(findings))

            if findings:
                # existing finding found
                finding = findings[0]
                if finding.false_p or finding.out_of_scope or finding.risk_accepted:
                    logger.debug(
                        '%i: skipping existing finding (it is marked as false positive:%s and/or out of scope:%s or is a risk accepted:%s): %i:%s:%s:%s',
                        i, finding.false_p, finding.out_of_scope,
                        finding.risk_accepted, finding.id, finding,
                        finding.component_name, finding.component_version)
                elif finding.mitigated or finding.is_Mitigated:
                    logger.debug('%i: reactivating: %i:%s:%s:%s', i,
                                 finding.id, finding, finding.component_name,
                                 finding.component_version)
                    finding.mitigated = None
                    finding.is_Mitigated = False
                    finding.mitigated_by = None
                    finding.active = True
                    finding.verified = verified

                    # existing findings may be from before we had component_name/version fields
                    finding.component_name = finding.component_name if finding.component_name else component_name
                    finding.component_version = finding.component_version if finding.component_version else component_version

                    # don't dedupe before endpoints are added
                    finding.save(dedupe_option=False)
                    note = Notes(entry="Re-activated by %s re-upload." %
                                 scan_type,
                                 author=user)
                    note.save()
                    endpoint_status = finding.endpoint_status.all()
                    for status in endpoint_status:
                        status.mitigated_by = None
                        status.mitigated_time = None
                        status.mitigated = False
                        status.last_modified = timezone.now()
                        status.save()
                    finding.notes.add(note)
                    reactivated_items.append(finding)
                    reactivated_count += 1
                else:
                    # existing findings may be from before we had component_name/version fields
                    logger.debug('%i: updating existing finding: %i:%s:%s:%s',
                                 i, finding.id, finding,
                                 finding.component_name,
                                 finding.component_version)
                    if not finding.component_name or not finding.component_version:
                        finding.component_name = finding.component_name if finding.component_name else component_name
                        finding.component_version = finding.component_version if finding.component_version else component_version
                        finding.save(dedupe_option=False)

                    unchanged_items.append(finding)
                    unchanged_count += 1
                if finding.dynamic_finding:
                    logger.debug(
                        "Re-import found an existing dynamic finding for this new finding. Checking the status of endpoints"
                    )
                    update_endpoint_status(finding, item, user)
            else:
                # no existing finding found
                item.reporter = user
                item.last_reviewed = timezone.now()
                item.last_reviewed_by = user
                item.verified = verified
                item.active = active
                # Save it. Don't dedupe before endpoints are added.
                item.save(dedupe_option=False)
                logger.debug(
                    '%i: reimport creating new finding as no existing finding match: %i:%s:%s:%s',
                    i, item.id, item, item.component_name,
                    item.component_version)
                finding_added_count += 1
                new_items.append(item)
                finding = item

                if hasattr(item, 'unsaved_req_resp'):
                    for req_resp in item.unsaved_req_resp:
                        burp_rr = BurpRawRequestResponse(
                            finding=finding,
                            burpRequestBase64=base64.b64encode(
                                req_resp["req"].encode("utf-8")),
                            burpResponseBase64=base64.b64encode(
                                req_resp["resp"].encode("utf-8")))
                        burp_rr.clean()
                        burp_rr.save()

                if item.unsaved_request and item.unsaved_response:
                    burp_rr = BurpRawRequestResponse(
                        finding=finding,
                        burpRequestBase64=base64.b64encode(
                            item.unsaved_request.encode()),
                        burpResponseBase64=base64.b64encode(
                            item.unsaved_response.encode()))
                    burp_rr.clean()
                    burp_rr.save()

            # for existing findings: make sure endpoints are present or created
            if finding:
                finding_count += 1
                for endpoint in item.unsaved_endpoints:
                    try:
                        ep, created = Endpoint.objects.get_or_create(
                            protocol=endpoint.protocol,
                            host=endpoint.host,
                            path=endpoint.path,
                            query=endpoint.query,
                            fragment=endpoint.fragment,
                            product=test.engagement.product)
                    except (MultipleObjectsReturned):
                        pass

                    try:
                        eps, created = Endpoint_Status.objects.get_or_create(
                            finding=finding, endpoint=ep)
                    except (MultipleObjectsReturned):
                        pass

                    ep.endpoint_status.add(eps)
                    finding.endpoints.add(ep)
                    finding.endpoint_status.add(eps)

                if endpoints_to_add:
                    for endpoint in endpoints_to_add:
                        # TODO Not sure what happens here, we get an endpoint model and try to create it again?
                        try:
                            ep, created = Endpoint.objects.get_or_create(
                                protocol=endpoint.protocol,
                                host=endpoint.host,
                                path=endpoint.path,
                                query=endpoint.query,
                                fragment=endpoint.fragment,
                                product=test.engagement.product)
                        except (MultipleObjectsReturned):
                            pass
                        try:
                            eps, created = Endpoint_Status.objects.get_or_create(
                                finding=finding, endpoint=ep)
                        except (MultipleObjectsReturned):
                            pass

                        ep.endpoint_status.add(eps)
                        finding.endpoints.add(ep)
                        finding.endpoint_status.add(eps)

                if item.unsaved_tags:
                    finding.tags = item.unsaved_tags

                # existing findings may be from before we had component_name/version fields
                finding.component_name = finding.component_name if finding.component_name else component_name
                finding.component_version = finding.component_version if finding.component_version else component_version

                finding.save(push_to_jira=push_to_jira)

        to_mitigate = set(original_items) - set(reactivated_items) - set(
            unchanged_items)
        untouched = set(unchanged_items) - set(to_mitigate)

        return new_items, reactivated_items, to_mitigate, untouched
예제 #30
0
    def save(self, push_to_jira=False):
        data = self.validated_data
        test = data['test']
        scan_type = data['scan_type']
        endpoint_to_add = data['endpoint_to_add']
        min_sev = data['minimum_severity']
        scan_date = data['scan_date']
        scan_date_time = datetime.datetime.combine(scan_date,
                                                   timezone.now().time())
        if settings.USE_TZ:
            scan_date_time = timezone.make_aware(
                scan_date_time, timezone.get_default_timezone())
        verified = data['verified']
        active = data['active']

        try:
            parser = import_parser_factory(
                data.get('file', None),
                test,
                active,
                verified,
                data['scan_type'],
            )
        except ValueError:
            raise Exception("Parser ValueError")

        try:
            items = parser.items
            original_items = list(test.finding_set.all())
            new_items = []
            mitigated_count = 0
            finding_count = 0
            finding_added_count = 0
            reactivated_count = 0
            reactivated_items = []
            unchanged_count = 0
            unchanged_items = []

            for item in items:
                sev = item.severity
                if sev == 'Information' or sev == 'Informational':
                    sev = 'Info'

                if (Finding.SEVERITIES[sev] > Finding.SEVERITIES[min_sev]):
                    continue

                if scan_type == 'Veracode Scan' or scan_type == 'Arachni Scan':
                    findings = Finding.objects.filter(
                        title=item.title,
                        test=test,
                        severity=sev,
                        numerical_severity=Finding.get_numerical_severity(sev),
                        description=item.description).all()
                else:
                    findings = Finding.objects.filter(
                        title=item.title,
                        test=test,
                        severity=sev,
                        numerical_severity=Finding.get_numerical_severity(
                            sev)).all()

                if findings:
                    # existing finding found
                    finding = findings[0]
                    if finding.mitigated or finding.is_Mitigated:
                        finding.mitigated = None
                        finding.is_Mitigated = False
                        finding.mitigated_by = None
                        finding.active = True
                        finding.verified = verified
                        finding.save()
                        note = Notes(entry="Re-activated by %s re-upload." %
                                     scan_type,
                                     author=self.context['request'].user)
                        note.save()
                        endpoint_status = finding.endpoint_status.all()
                        for status in endpoint_status:
                            status.mitigated_by = None
                            status.mitigated_time = None
                            status.mitigated = False
                            status.last_modified = timezone.now()
                            status.save()
                        finding.notes.add(note)
                        reactivated_items.append(finding)
                        reactivated_count += 1
                    else:
                        unchanged_items.append(finding)
                        unchanged_count += 1
                else:
                    # no existing finding found
                    item.test = test
                    item.date = scan_date
                    item.reporter = self.context['request'].user
                    item.last_reviewed = timezone.now()
                    item.last_reviewed_by = self.context['request'].user
                    item.verified = verified
                    item.active = active
                    item.save(dedupe_option=False)
                    finding_added_count += 1
                    new_items.append(item)
                    finding = item

                    if hasattr(item, 'unsaved_req_resp'):
                        for req_resp in item.unsaved_req_resp:
                            burp_rr = BurpRawRequestResponse(
                                finding=finding,
                                burpRequestBase64=req_resp['req'],
                                burpResponseBase64=req_resp['resp'])
                            burp_rr.clean()
                            burp_rr.save()

                    if item.unsaved_request and item.unsaved_response:
                        burp_rr = BurpRawRequestResponse(
                            finding=finding,
                            burpRequestBase64=item.unsaved_request,
                            burpResponseBase64=item.unsaved_response)
                        burp_rr.clean()
                        burp_rr.save()

                if finding:
                    finding_count += 1
                    for endpoint in item.unsaved_endpoints:
                        ep, created = Endpoint.objects.get_or_create(
                            protocol=endpoint.protocol,
                            host=endpoint.host,
                            path=endpoint.path,
                            query=endpoint.query,
                            fragment=endpoint.fragment,
                            product=test.engagement.product)
                        finding.endpoints.add(ep)
                    if endpoint_to_add:
                        finding.endpoints.add(endpoint_to_add)
                    if item.unsaved_tags:
                        finding.tags = item.unsaved_tags

                    finding.save(push_to_jira=push_to_jira)

            to_mitigate = set(original_items) - set(reactivated_items) - set(
                unchanged_items)
            mitigated_findings = []
            for finding in to_mitigate:
                if not finding.mitigated or not finding.is_Mitigated:
                    finding.mitigated = scan_date_time
                    finding.is_Mitigated = True
                    finding.mitigated_by = self.context['request'].user
                    finding.active = False

                    endpoint_status = finding.endpoint_status.all()
                    for status in endpoint_status:
                        status.mitigated_by = self.context['request'].user
                        status.mitigated_time = timezone.now()
                        status.mitigated = True
                        status.last_modified = timezone.now()
                        status.save()

                    finding.save(push_to_jira=push_to_jira)
                    note = Notes(entry="Mitigated by %s re-upload." %
                                 scan_type,
                                 author=self.context['request'].user)
                    note.save()
                    finding.notes.add(note)
                    mitigated_findings.append(finding)
                    mitigated_count += 1

            untouched = set(unchanged_items) - set(to_mitigate)

            test.updated = max_safe([scan_date_time, test.updated])
            test.engagement.updated = max_safe(
                [scan_date_time, test.engagement.updated])

            if test.engagement.engagement_type == 'CI/CD':
                test.target_end = max_safe([scan_date_time, test.target_end])
                test.engagement.target_end = max_safe(
                    [scan_date, test.engagement.target_end])

            test.save()
            test.engagement.save()

            print(len(new_items))
            print(reactivated_count)
            print(mitigated_count)
            print(unchanged_count - mitigated_count)

            updated_count = mitigated_count + reactivated_count + len(
                new_items)
            if updated_count > 0:
                # new_items = original_items
                title = 'Updated ' + str(
                    updated_count) + " findings for " + str(
                        test.engagement.product) + ': ' + str(
                            test.engagement.name) + ': ' + str(test)
                create_notification(initiator=self.context['request'].user,
                                    event='scan_added',
                                    title=title,
                                    findings_new=new_items,
                                    findings_mitigated=mitigated_findings,
                                    findings_reactivated=reactivated_items,
                                    finding_count=updated_count,
                                    test=test,
                                    engagement=test.engagement,
                                    product=test.engagement.product,
                                    findings_untouched=untouched,
                                    url=reverse('view_test', args=(test.id, )))

        except SyntaxError:
            raise Exception("Parser SyntaxError")

        return test
예제 #31
0
def add_risk_acceptance(request, eid, fid=None):
    eng = get_object_or_404(Engagement, id=eid)
    finding = None
    if fid:
        finding = get_object_or_404(Finding, id=fid)

    if not eng.product.enable_full_risk_acceptance:
        raise PermissionDenied()

    if request.method == 'POST':
        form = RiskAcceptanceForm(request.POST, request.FILES)
        if form.is_valid():
            # first capture notes param as it cannot be saved directly as m2m
            notes = None
            if form.cleaned_data['notes']:
                notes = Notes(
                    entry=form.cleaned_data['notes'],
                    author=request.user,
                    date=timezone.now())
                notes.save()

            del form.cleaned_data['notes']

            try:
                # we sometimes see a weird exception here, but are unable to reproduce.
                # we add some logging in case it happens
                risk_acceptance = form.save()
            except Exception as e:
                logger.debug(vars(request.POST))
                logger.error(vars(form))
                logger.exception(e)
                raise

            # attach note to risk acceptance object now in database
            if notes:
                risk_acceptance.notes.add(notes)

            eng.risk_acceptance.add(risk_acceptance)

            findings = form.cleaned_data['accepted_findings']

            risk_acceptance = ra_helper.add_findings_to_risk_acceptance(risk_acceptance, findings)

            messages.add_message(
                request,
                messages.SUCCESS,
                'Risk acceptance saved.',
                extra_tags='alert-success')

            return redirect_to_return_url_or_else(request, reverse('view_engagement', args=(eid, )))
    else:
        risk_acceptance_title_suggestion = 'Accept: %s' % finding
        form = RiskAcceptanceForm(initial={'owner': request.user, 'name': risk_acceptance_title_suggestion})

    finding_choices = Finding.objects.filter(duplicate=False, test__engagement=eng).filter(NOT_ACCEPTED_FINDINGS_QUERY).order_by('title')

    form.fields['accepted_findings'].queryset = finding_choices
    if fid:
        form.fields['accepted_findings'].initial = {fid}
    product_tab = Product_Tab(eng.product.id, title="Risk Acceptance", tab="engagements")
    product_tab.setEngagement(eng)

    return render(request, 'dojo/add_risk_acceptance.html', {
                  'eng': eng,
                  'product_tab': product_tab,
                  'form': form
                  })
예제 #32
0
    def process_parsed_findings(self,
                                test,
                                parsed_findings,
                                scan_type,
                                user,
                                active,
                                verified,
                                minimum_severity=None,
                                endpoints_to_add=None,
                                push_to_jira=None,
                                group_by=None,
                                now=timezone.now(),
                                service=None,
                                scan_date=None,
                                **kwargs):

        items = parsed_findings
        original_items = list(test.finding_set.all())
        new_items = []
        mitigated_count = 0
        finding_count = 0
        finding_added_count = 0
        reactivated_count = 0
        reactivated_items = []
        unchanged_count = 0
        unchanged_items = []

        logger.debug('starting reimport of %i items.',
                     len(items) if items else 0)
        from dojo.importers.reimporter.utils import (
            get_deduplication_algorithm_from_conf,
            match_new_finding_to_existing_finding, update_endpoint_status,
            reactivate_endpoint_status)
        deduplication_algorithm = get_deduplication_algorithm_from_conf(
            scan_type)

        i = 0
        logger.debug(
            'STEP 1: looping over findings from the reimported report and trying to match them to existing findings'
        )
        deduplicationLogger.debug(
            'Algorithm used for matching new findings to existing findings: %s',
            deduplication_algorithm)
        for item in items:
            # FIXME hack to remove when all parsers have unit tests for this attribute
            if item.severity.lower().startswith(
                    'info') and item.severity != 'Info':
                item.severity = 'Info'

            item.numerical_severity = Finding.get_numerical_severity(
                item.severity)

            if minimum_severity and (Finding.SEVERITIES[item.severity] >
                                     Finding.SEVERITIES[minimum_severity]):
                # finding's severity is below the configured threshold : ignoring the finding
                continue

            # existing findings may be from before we had component_name/version fields
            component_name = item.component_name if hasattr(
                item, 'component_name') else None
            component_version = item.component_version if hasattr(
                item, 'component_version') else None

            if not hasattr(item, 'test'):
                item.test = test

            item.service = service

            item.hash_code = item.compute_hash_code()
            deduplicationLogger.debug("item's hash_code: %s", item.hash_code)

            findings = match_new_finding_to_existing_finding(
                item, test, deduplication_algorithm, scan_type)

            deduplicationLogger.debug(
                'found %i findings matching with current new finding',
                len(findings))

            if findings:
                # existing finding found
                finding = findings[0]
                if finding.false_p or finding.out_of_scope or finding.risk_accepted:
                    logger.debug(
                        '%i: skipping existing finding (it is marked as false positive:%s and/or out of scope:%s or is a risk accepted:%s): %i:%s:%s:%s',
                        i, finding.false_p, finding.out_of_scope,
                        finding.risk_accepted, finding.id, finding,
                        finding.component_name, finding.component_version)
                elif finding.mitigated or finding.is_mitigated:
                    logger.debug('%i: reactivating: %i:%s:%s:%s', i,
                                 finding.id, finding, finding.component_name,
                                 finding.component_version)
                    finding.mitigated = None
                    finding.is_mitigated = False
                    finding.mitigated_by = None
                    finding.active = True
                    finding.verified = verified

                    # existing findings may be from before we had component_name/version fields
                    finding.component_name = finding.component_name if finding.component_name else component_name
                    finding.component_version = finding.component_version if finding.component_version else component_version

                    # don't dedupe before endpoints are added
                    finding.save(dedupe_option=False)
                    note = Notes(entry="Re-activated by %s re-upload." %
                                 scan_type,
                                 author=user)
                    note.save()

                    endpoint_statuses = finding.endpoint_status.all()

                    # Determine if this can be run async
                    if settings.ASYNC_FINDING_IMPORT:
                        chunk_list = importer_utils.chunk_list(
                            endpoint_statuses)
                        # If there is only one chunk, then do not bother with async
                        if len(chunk_list) < 2:
                            reactivate_endpoint_status(endpoint_statuses,
                                                       sync=True)
                        logger.debug('IMPORT_SCAN: Split endpoints into ' +
                                     str(len(chunk_list)) + ' chunks of ' +
                                     str(chunk_list[0]))
                        # First kick off all the workers
                        for endpoint_status_list in chunk_list:
                            reactivate_endpoint_status(endpoint_status_list,
                                                       sync=False)
                    else:
                        reactivate_endpoint_status(endpoint_statuses,
                                                   sync=True)

                    finding.notes.add(note)
                    reactivated_items.append(finding)
                    reactivated_count += 1
                else:
                    # existing findings may be from before we had component_name/version fields
                    logger.debug('%i: updating existing finding: %i:%s:%s:%s',
                                 i, finding.id, finding,
                                 finding.component_name,
                                 finding.component_version)
                    if not finding.component_name or not finding.component_version:
                        finding.component_name = finding.component_name if finding.component_name else component_name
                        finding.component_version = finding.component_version if finding.component_version else component_version
                        finding.save(dedupe_option=False)

                    unchanged_items.append(finding)
                    unchanged_count += 1
                if finding.dynamic_finding:
                    logger.debug(
                        "Re-import found an existing dynamic finding for this new finding. Checking the status of endpoints"
                    )
                    update_endpoint_status(finding, item, user)
            else:
                # no existing finding found
                item.reporter = user
                item.last_reviewed = timezone.now()
                item.last_reviewed_by = user
                item.verified = verified
                item.active = active

                # if scan_date was provided, override value from parser
                if scan_date:
                    item.date = scan_date

                # Save it. Don't dedupe before endpoints are added.
                item.save(dedupe_option=False)
                logger.debug(
                    '%i: reimport created new finding as no existing finding match: %i:%s:%s:%s',
                    i, item.id, item, item.component_name,
                    item.component_version)

                # only new items get auto grouped to avoid confusion around already existing items that are already grouped
                if settings.FEATURE_FINDING_GROUPS and group_by:
                    finding_helper.add_finding_to_auto_group(item, group_by)

                finding_added_count += 1
                new_items.append(item)
                finding = item

                if hasattr(item, 'unsaved_req_resp'):
                    for req_resp in item.unsaved_req_resp:
                        burp_rr = BurpRawRequestResponse(
                            finding=finding,
                            burpRequestBase64=base64.b64encode(
                                req_resp["req"].encode("utf-8")),
                            burpResponseBase64=base64.b64encode(
                                req_resp["resp"].encode("utf-8")))
                        burp_rr.clean()
                        burp_rr.save()

                if item.unsaved_request and item.unsaved_response:
                    burp_rr = BurpRawRequestResponse(
                        finding=finding,
                        burpRequestBase64=base64.b64encode(
                            item.unsaved_request.encode()),
                        burpResponseBase64=base64.b64encode(
                            item.unsaved_response.encode()))
                    burp_rr.clean()
                    burp_rr.save()

            # for existing findings: make sure endpoints are present or created
            if finding:
                finding_count += 1
                if settings.ASYNC_FINDING_IMPORT:
                    importer_utils.chunk_endpoints_and_disperse(
                        finding, test, item.unsaved_endpoints)
                else:
                    importer_utils.add_endpoints_to_unsaved_finding(
                        finding, test, item.unsaved_endpoints, sync=True)

                if endpoints_to_add:
                    if settings.ASYNC_FINDING_IMPORT:
                        importer_utils.chunk_endpoints_and_disperse(
                            finding, test, endpoints_to_add)
                    else:
                        importer_utils.add_endpoints_to_unsaved_finding(
                            finding, test, endpoints_to_add, sync=True)

                if item.unsaved_tags:
                    finding.tags = item.unsaved_tags

                if item.unsaved_files:
                    for unsaved_file in item.unsaved_files:
                        data = base64.b64decode(unsaved_file.get('data'))
                        title = unsaved_file.get('title', '<No title>')
                        file_upload, file_upload_created = FileUpload.objects.get_or_create(
                            title=title, )
                        file_upload.file.save(title, ContentFile(data))
                        file_upload.save()
                        finding.files.add(file_upload)

                # existing findings may be from before we had component_name/version fields
                finding.component_name = finding.component_name if finding.component_name else component_name
                finding.component_version = finding.component_version if finding.component_version else component_version

                # finding = new finding or existing finding still in the upload report
                # to avoid pushing a finding group multiple times, we push those outside of the loop
                if settings.FEATURE_FINDING_GROUPS and finding.finding_group:
                    finding.save()
                else:
                    finding.save(push_to_jira=push_to_jira)

        to_mitigate = set(original_items) - set(reactivated_items) - set(
            unchanged_items)
        untouched = set(unchanged_items) - set(to_mitigate)

        if settings.FEATURE_FINDING_GROUPS and push_to_jira:
            for finding_group in set([
                    finding.finding_group for finding in reactivated_items +
                    unchanged_items + new_items
                    if finding.finding_group is not None
            ]):
                jira_helper.push_to_jira(finding_group)
        sync = kwargs.get('sync', False)
        if not sync:
            serialized_new_items = [
                serializers.serialize('json', [
                    finding,
                ]) for finding in new_items
            ]
            serialized_reactivated_items = [
                serializers.serialize('json', [
                    finding,
                ]) for finding in reactivated_items
            ]
            serialized_to_mitigate = [
                serializers.serialize('json', [
                    finding,
                ]) for finding in to_mitigate
            ]
            serialized_untouched = [
                serializers.serialize('json', [
                    finding,
                ]) for finding in untouched
            ]
            return serialized_new_items, serialized_reactivated_items, serialized_to_mitigate, serialized_untouched

        return new_items, reactivated_items, to_mitigate, untouched
예제 #33
0
def handle_uploaded_cvff(request, f):
    output = StringIO.StringIO()
    for chunk in f.chunks():
       output.write(chunk)

    csvString = output.getvalue().splitlines(True)[1:]

    inputCSV = csv.reader(csvString, quoting=csv.QUOTE_NONNUMERIC)
    logger.error('Before moving into loop')

    isHeader = 1
    indexOfResolution = 0
    for row in inputCSV:
        if isHeader == 1:
            for col in row:
                if str(col) == "WSO2_resolution":
                    isHeader = 0
                    break
                indexOfResolution = indexOfResolution + 1
        try:
            finding = Finding.objects.filter(pk=float(row[0]))[0];
            logger.error('Finding note count for id '+ str(row[0]) +' is : ' + str(finding.notes.count()))
            status = str(row[indexOfResolution]).strip().split("(")[0]
            if finding.notes.count() == 0:
                note = Notes(entry="[ " + status + " ] ~ " + row[indexOfResolution + 2], author=request.user)
                note.save()
                finding.notes.add(note);
                logger.info('Adding new note')
            else:
                note = finding.notes.all()[0]
                note.entry = "[ " + status + " ] ~ " + row[indexOfResolution + 2]
                note.author=request.user
                note.save()
                logger.info('Updating existing note' + str(note.id))

            status = status.replace('.','').replace(',','').replace(' ','').lower()

            finding.false_p = False
            finding.verified = False
            finding.active = False
            finding.out_of_scope = False
            finding.save()

            if status == 'falsepositive':
                finding.false_p = True
                finding.save()
            elif status == 'notathreat':
                finding.verified = True
                finding.save()
            elif status == 'needtobefixed':
                finding.active = True
                finding.save()
            elif status == 'needtofix':
                finding.active = True
                finding.save()
            elif status == 'truepositive':
                finding.active = True
                finding.save()
            elif status == 'alreadymitigated':
                finding.out_of_scope = True
                finding.save()
            elif status == 'notapplicable':
                finding.under_review = True
                finding.save()
            #elif status == 'cannotreproduce':
            #    finding.under_review = True
            #    finding.save()
            else:
                logger.error('Unknown status for : ' + str(row[0]) + ". Status is : " + status)
        except Exception as e:
            logger.error(e.message)
            logger.error('Error in processing row: ' + str(row[0]) + ". Skipping.")
예제 #34
0
def re_import_scan_results(request, tid):
    additional_message = "When re-uploading a scan, any findings not found in original scan will be updated as " \
                         "mitigated.  The process attempts to identify the differences, however manual verification " \
                         "is highly recommended."
    t = get_object_or_404(Test, id=tid)
    scan_type = t.test_type.name
    engagement = t.engagement
    form = ReImportScanForm()

    form.initial['tags'] = [tag.name for tag in t.tags]
    if request.method == "POST":
        form = ReImportScanForm(request.POST, request.FILES)
        if form.is_valid():
            scan_date = form.cleaned_data['scan_date']
            min_sev = form.cleaned_data['minimum_severity']
            file = request.FILES['file']
            scan_type = t.test_type.name
            active = form.cleaned_data['active']
            verified = form.cleaned_data['verified']
            tags = request.POST.getlist('tags')
            ts = ", ".join(tags)
            t.tags = ts
            try:
                parser = import_parser_factory(file, t, active, verified)
            except ValueError:
                raise Http404()

            try:
                items = parser.items
                original_items = t.finding_set.all().values_list("id", flat=True)
                new_items = []
                mitigated_count = 0
                finding_count = 0
                finding_added_count = 0
                reactivated_count = 0
                for item in items:
                    sev = item.severity
                    if sev == 'Information' or sev == 'Informational':
                        sev = 'Info'
                        item.severity = sev

                    if Finding.SEVERITIES[sev] > Finding.SEVERITIES[min_sev]:
                        continue

                    if scan_type == 'Veracode Scan' or scan_type == 'Arachni Scan':
                        find = Finding.objects.filter(title=item.title,
                                                      test__id=t.id,
                                                      severity=sev,
                                                      numerical_severity=Finding.get_numerical_severity(sev),
                                                      description=item.description
                                                      )
                    else:
                        find = Finding.objects.filter(title=item.title,
                                                      test__id=t.id,
                                                      severity=sev,
                                                      numerical_severity=Finding.get_numerical_severity(sev),
                                                      )

                    if len(find) == 1:
                        find = find[0]
                        if find.mitigated:
                            # it was once fixed, but now back
                            find.mitigated = None
                            find.mitigated_by = None
                            find.active = True
                            find.verified = verified
                            find.save()
                            note = Notes(entry="Re-activated by %s re-upload." % scan_type,
                                         author=request.user)
                            note.save()
                            find.notes.add(note)
                            reactivated_count += 1
                        new_items.append(find.id)
                    else:
                        item.test = t
                        item.date = scan_date
                        item.reporter = request.user
                        item.last_reviewed = timezone.now()
                        item.last_reviewed_by = request.user
                        item.verified = verified
                        item.active = active
                        item.save(dedupe_option=False)
                        finding_added_count += 1
                        new_items.append(item.id)
                        find = item

                        if hasattr(item, 'unsaved_req_resp') and len(item.unsaved_req_resp) > 0:
                            for req_resp in item.unsaved_req_resp:
                                if scan_type == "Arachni Scan":
                                    burp_rr = BurpRawRequestResponse(
                                        finding=item,
                                        burpRequestBase64=req_resp["req"],
                                        burpResponseBase64=req_resp["resp"],
                                    )
                                else:
                                    burp_rr = BurpRawRequestResponse(
                                        finding=item,
                                        burpRequestBase64=req_resp["req"].encode("utf-8"),
                                        burpResponseBase64=req_resp["resp"].encode("utf-8"),
                                    )
                                burp_rr.clean()
                                burp_rr.save()

                        if item.unsaved_request is not None and item.unsaved_response is not None:
                            burp_rr = BurpRawRequestResponse(finding=find,
                                                             burpRequestBase64=item.unsaved_request.encode("utf-8"),
                                                             burpResponseBase64=item.unsaved_response.encode("utf-8"),
                                                             )
                            burp_rr.clean()
                            burp_rr.save()
                    if find:
                        finding_count += 1
                        for endpoint in item.unsaved_endpoints:
                            ep, created = Endpoint.objects.get_or_create(protocol=endpoint.protocol,
                                                                         host=endpoint.host,
                                                                         path=endpoint.path,
                                                                         query=endpoint.query,
                                                                         fragment=endpoint.fragment,
                                                                         product=t.engagement.product)
                            find.endpoints.add(ep)

                        if item.unsaved_tags is not None:
                            find.tags = item.unsaved_tags

                    find.save()
                # calculate the difference
                to_mitigate = set(original_items) - set(new_items)
                for finding_id in to_mitigate:
                    finding = Finding.objects.get(id=finding_id)
                    finding.mitigated = datetime.combine(scan_date, timezone.now().time())
                    finding.mitigated_by = request.user
                    finding.active = False
                    finding.save()
                    note = Notes(entry="Mitigated by %s re-upload." % scan_type,
                                 author=request.user)
                    note.save()
                    finding.notes.add(note)
                    mitigated_count += 1
                messages.add_message(request,
                                     messages.SUCCESS,
                                     '%s processed, a total of ' % scan_type + message(finding_count, 'finding',
                                                                                       'processed'),
                                     extra_tags='alert-success')
                if finding_added_count > 0:
                    messages.add_message(request,
                                         messages.SUCCESS,
                                         'A total of ' + message(finding_added_count, 'finding',
                                                                 'added') + ', that are new to scan.',
                                         extra_tags='alert-success')
                if reactivated_count > 0:
                    messages.add_message(request,
                                         messages.SUCCESS,
                                         'A total of ' + message(reactivated_count, 'finding',
                                                                 'reactivated') + ', that are back in scan results.',
                                         extra_tags='alert-success')
                if mitigated_count > 0:
                    messages.add_message(request,
                                         messages.SUCCESS,
                                         'A total of ' + message(mitigated_count, 'finding',
                                                                 'mitigated') + '. Please manually verify each one.',
                                         extra_tags='alert-success')

                create_notification(event='results_added', title=str(finding_count) + " findings for " + engagement.product.name, finding_count=finding_count, test=t, engagement=engagement, url=reverse('view_test', args=(t.id,)))

                return HttpResponseRedirect(reverse('view_test', args=(t.id,)))
            except SyntaxError:
                messages.add_message(request,
                                     messages.ERROR,
                                     'There appears to be an error in the XML report, please check and try again.',
                                     extra_tags='alert-danger')

    product_tab = Product_Tab(engagement.product.id, title="Re-upload a %s" % scan_type, tab="engagements")
    product_tab.setEngagement(engagement)
    return render(request,
                  'dojo/import_scan_results.html',
                  {'form': form,
                   'product_tab': product_tab,
                   'eid': engagement.id,
                   'additional_message': additional_message,
                   })
예제 #35
0
    def save(self):
        data = self.validated_data
        test = data['test']
        scan_type = data['scan_type']
        min_sev = data['minimum_severity']
        scan_date = data['scan_date']
        verified = data['verified']
        active = data['active']

        try:
            parser = import_parser_factory(data['file'],
                                           test,
                                           data['scan_type'],)
        except ValueError:
            raise Exception("Parser ValueError")

        try:
            items = parser.items
            original_items = list(test.finding_set.all())
            new_items = []
            mitigated_count = 0
            finding_count = 0
            finding_added_count = 0
            reactivated_count = 0

            for item in items:
                sev = item.severity
                if sev == 'Information' or sev == 'Informational':
                    sev = 'Info'

                if (Finding.SEVERITIES[sev] >
                        Finding.SEVERITIES[min_sev]):
                    continue

                if scan_type == 'Veracode Scan' or scan_type == 'Arachni Scan':
                    findings = Finding.objects.filter(
                        title=item.title,
                        test=test,
                        severity=sev,
                        numerical_severity=Finding.get_numerical_severity(sev),
                        description=item.description).all()
                else:
                    findings = Finding.objects.filter(
                        title=item.title,
                        test=test,
                        severity=sev,
                        numerical_severity=Finding.get_numerical_severity(sev)).all()

                if findings:
                    finding = findings[0]
                    if finding.mitigated:
                        finding.mitigated = None
                        finding.mitigated_by = None
                        finding.active = True
                        finding.verified = verified
                        finding.save()
                        note = Notes(
                            entry="Re-activated by %s re-upload." % scan_type,
                            author=self.context['request'].user)
                        note.save()
                        finding.notes.add(note)
                        reactivated_count += 1
                    new_items.append(finding)
                else:
                    item.test = test
                    item.date = test.target_start
                    item.reporter = self.context['request'].user
                    item.last_reviewed = timezone.now()
                    item.last_reviewed_by = self.context['request'].user
                    item.verified = verified
                    item.active = active
                    item.save()
                    finding_added_count += 1
                    new_items.append(item.id)
                    finding = item

                    if hasattr(item, 'unsaved_req_resp'):
                        for req_resp in item.unsaved_req_resp:
                            burp_rr = BurpRawRequestResponse(
                                finding=finding,
                                burpRequestBase64=req_resp['req'],
                                burpResponseBase64=req_resp['resp'])
                            burp_rr.clean()
                            burp_rr.save()

                    if item.unsaved_request and item.unsaved_response:
                        burp_rr = BurpRawRequestResponse(
                            finding=finding,
                            burpRequestBase64=item.unsaved_request,
                            burpResponseBase64=item.unsaved_response)
                        burp_rr.clean()
                        burp_rr.save()

                if finding:
                    finding_count += 1
                    for endpoint in item.unsaved_endpoints:
                        ep, created = Endpoint.objects.get_or_create(
                            protocol=endpoint.protocol,
                            host=endpoint.host,
                            path=endpoint.path,
                            query=endpoint.query,
                            fragment=endpoint.fragment,
                            product=test.engagement.product)
                        finding.endpoints.add(ep)

                    # if item.unsaved_tags:
                    #    finding.tags = item.unsaved_tags

            to_mitigate = set(original_items) - set(new_items)
            for finding in to_mitigate:
                finding.mitigated = datetime.datetime.combine(
                    scan_date,
                    timezone.now().time())
                finding.mitigated_by = self.context['request'].user
                finding.active = False
                finding.save()
                note = Notes(entry="Mitigated by %s re-upload." % scan_type,
                             author=self.context['request'].user)
                note.save()
                finding.notes.add(note)
                mitigated_count += 1

        except SyntaxError:
            raise Exception("Parser SyntaxError")

        return test