Example #1
0
def add_template(request):
    form = FindingTemplateForm()
    if request.method == 'POST':
        form = FindingTemplateForm(request.POST)
        if form.is_valid():
            template = form.save(commit=False)
            template.numerical_severity = Finding.get_numerical_severity(template.severity)
            template.save()
            tags = request.POST.getlist('tags')
            t = ", ".join(tags)
            template.tags = t
            messages.add_message(request,
                                 messages.SUCCESS,
                                 'Template created successfully.',
                                 extra_tags='alert-success')
            return HttpResponseRedirect(reverse('templates'))
        else:
            messages.add_message(request,
                                 messages.ERROR,
                                 'Template form has error, please revise and try again.',
                                 extra_tags='alert-danger')
    add_breadcrumb(title="Add Template", top_level=False, request=request)
    return render(request, 'dojo/add_template.html',
                  {'form': form,
                   'name': 'Add Template'
                   })
Example #2
0
    def get_finding_from_vulnerability(self, vulnerability, filename, test):
        title = '{0} | {1}'.format(filename,
                                   self.get_field_value(vulnerability, 'name'))
        severity = self.get_field_value(vulnerability, 'severity')
        finding_detail = '{0}\n\n{1}'.format(
            self.get_field_value(vulnerability, 'cwe'),
            self.get_field_value(vulnerability, 'description'))
        reference_detail = None

        references_node = vulnerability.find(self.namespace + 'references')

        if references_node is not None:
            reference_detail = ''
            for reference_node in references_node.findall(
                    self.namespace + 'reference'):
                reference_detail += 'name: {0}\n' \
                                    'source: {1}\n' \
                                    'url: {2}\n\n'.format(
                    self.get_field_value(reference_node, 'name'),
                    self.get_field_value(reference_node, 'source'),
                    self.get_field_value(reference_node, 'url'))

        return Finding(
            title=title,
            file_path=filename,
            test=test,
            active=False,
            verified=False,
            description=finding_detail,
            severity=severity,
            numerical_severity=Finding.get_numerical_severity(severity),
            references=reference_detail
        )
Example #3
0
def edit_template(request, tid):
    template = get_object_or_404(Finding_Template, id=tid)
    form = FindingTemplateForm(instance=template)
    if request.method == 'POST':
        form = FindingTemplateForm(request.POST, instance=template)
        if form.is_valid():
            template = form.save(commit=False)
            template.numerical_severity = Finding.get_numerical_severity(template.severity)
            template.save()
            messages.add_message(request,
                                 messages.SUCCESS,
                                 'Template updated successfully.',
                                 extra_tags='alert-success')
            return HttpResponseRedirect(reverse('templates'))
        else:
            messages.add_message(request,
                                 messages.ERROR,
                                 'Template form has error, please revise and try again.',
                                 extra_tags='alert-danger')
    add_breadcrumb(title="Edit Template", top_level=False, request=request)
    return render(request, 'dojo/add_template.html',
                  {'form': form,
                   'name': 'Edit Template',
                   'template': template,
                   })
def severity_value(value):
    try:
        if get_system_setting('s_finding_severity_naming'):
            value = Finding.get_numerical_severity(value)
    except:
        pass

    return value
Example #5
0
    def __init__(self, filename, test):
        data = json.load(filename)
        dupes = dict()
        if "generated_at" in data:
            find_date = datetime.strptime(data["generated_at"], '%Y-%m-%dT%H:%M:%SZ')

        for item in data["results"]:
            categories = ''
            language = ''
            mitigation = ''
            impact = ''
            references = ''
            findingdetail = ''
            title = ''
            group = ''
            status = ''

            title = "Test Name: " + item["test_name"] + " Test ID: " + item["test_id"]

            ###### Finding details information ######
            findingdetail += "Filename: " + item["filename"] + "\n"
            findingdetail += "Line number: " + str(item["line_number"]) + "\n"
            findingdetail += "Issue Confidence: " + item["issue_confidence"] + "\n\n"
            findingdetail += "Code:\n"
            findingdetail += item["code"] + "\n"

            sev = item["issue_severity"]
            mitigation = item["issue_text"]
            references = item["test_id"]

            dupe_key = title + item["filename"] + str(item["line_number"])

            if dupe_key in dupes:
                find = dupes[dupe_key]
            else:
                dupes[dupe_key] = True

                find = Finding(title=title,
                               test=test,
                               active=False,
                               verified=False,
                               description=findingdetail,
                               severity= sev.title(),
                               numerical_severity=Finding.get_numerical_severity(sev),
                               mitigation=mitigation,
                               impact=impact,
                               references=references,
                               file_path = item["filename"],
                               line = item["line_number"],
                               url='N/A',
                               date=find_date,
                               static_finding=True)

                dupes[dupe_key] = find
                findingdetail = ''

        self.items = dupes.values()
Example #6
0
    def __init__(self, filename, test):
        self.chain = None
        self.column_names = dict()
        self.dupes = dict()
        self.items = ()
        self.create_chain()

        if filename is None:
            self.items = ()
            return

        content = filename.read()

        row_number = 0
        reader = csv.reader(StringIO.StringIO(content), delimiter=',', quotechar='"')
        for row in reader:
            finding = Finding(test=test)

            if row_number == 0:
                self.read_column_names(row)
                row_number += 1
                continue

            column_number = 0
            for column in row:
                self.chain.process_column(self.column_names[column_number], column, finding)
                column_number += 1

            if finding is not None and row_number > 0:
                if finding.url is None:
                    finding.url = ""
                if finding.title is None:
                    finding.title = ""
                if finding.description is None:
                    finding.description = ""

                key = hashlib.md5(finding.url + '|' + finding.severity + '|' + finding.title + '|' + finding.description).hexdigest()

                if key not in self.dupes:
                    self.dupes[key] = finding

            row_number += 1

        self.items = self.dupes.values()
Example #7
0
def promote_to_finding(request, fid):
    finding = get_object_or_404(Stub_Finding, id=fid)
    test = finding.test
    form_error = False
    form = PromoteFindingForm(initial={'title': finding.title,
                                       'date': finding.date,
                                       'severity': finding.severity,
                                       'description': finding.description,
                                       'test': finding.test,
                                       'reporter': finding.reporter})
    if request.method == 'POST':
        form = PromoteFindingForm(request.POST)
        if form.is_valid():
            new_finding = form.save(commit=False)
            new_finding.test = test
            new_finding.reporter = request.user
            new_finding.numerical_severity = Finding.get_numerical_severity(
                new_finding.severity)

            new_finding.active = True
            new_finding.false_p = False
            new_finding.duplicate = False
            new_finding.is_template = False
            new_finding.mitigated = None
            new_finding.verified = True
            new_finding.out_of_scope = False

            new_finding.save()
            new_finding.endpoints = form.cleaned_data['endpoints']
            new_finding.save()

            finding.delete()

            messages.add_message(request,
                                 messages.SUCCESS,
                                 'Finding promoted successfully.',
                                 extra_tags='alert-success')

            return HttpResponseRedirect(reverse('view_test', args=(test.id,)))
        else:
            if 'endpoints' in form.cleaned_data:
                form.fields['endpoints'].queryset = form.cleaned_data['endpoints']
            else:
                form.fields['endpoints'].queryset = Endpoint.objects.none()
            form_error = True
            messages.add_message(request,
                                 messages.ERROR,
                                 'The form has errors, please correct them below.',
                                 extra_tags='alert-danger')
    add_breadcrumb(parent=test, title="Promote Finding", top_level=False, request=request)
    return render(request, 'dojo/promote_to_finding.html',
                  {'form': form,
                   'test': test,
                   'stub_finding': finding,
                   'form_error': form_error,
                   })
Example #8
0
    def __init__(self, filename, test):
        data = json.load(filename)
        dupes = dict()
        find_date = parser.parse(data['scan_info']['end_time'])

        for item in data['warnings']:
            categories = ''
            language = ''
            mitigation = ''
            impact = ''
            references = ''
            findingdetail = ''
            title = ''
            group = ''
            status = ''

            title = item['warning_type'] + '. ' + item['message']

            # Finding details information
            findingdetail += 'Filename: ' + item['file'] + '\n'
            findingdetail += 'Line number: ' + str(item['line'] or '') + '\n'
            findingdetail += 'Issue Confidence: ' + item['confidence'] + '\n\n'
            findingdetail += 'Code:\n'
            findingdetail += item['code'] or '' + '\n'

            sev = 'Medium'
            mitigation = 'coming soon'
            references = item['link']

            dupe_key = item['fingerprint']

            if dupe_key in dupes:
                find = dupes[dupe_key]
            else:
                dupes[dupe_key] = True

                find = Finding(
                    title=title,
                    test=test,
                    active=False,
                    verified=False,
                    description=findingdetail,
                    severity=sev,
                    numerical_severity=Finding.get_numerical_severity(sev),
                    mitigation=mitigation,
                    impact=impact,
                    references=references,
                    file_path=item['file'],
                    line=item['line'],
                    url='N/A',
                    date=find_date,
                    static_finding=True)

                dupes[dupe_key] = find

        self.items = dupes.values()
Example #9
0
    def __init__(self, filename, test):
        dupes = dict()
        self.items = ()

        if filename is None:
            self.items = ()
            return

        df = pd.read_csv(filename, header=0)

        for i, row in df.iterrows():
            # Vulnerability Name,Vulnerability ID,Category,Rule Name,Severity,Status,Number of Events,First Seen,Last Seen,Application Name,Application ID,Application Code,CWE ID,Request Method,Request Port,Request Protocol,Request Version,Request URI,Request Qs,Request Body
            cwe = self.format_cwe(df.ix[i, 'CWE ID'])
            title = df.ix[i, 'Rule Name']
            category = df.ix[i, 'Category']
            description = self.format_description(df, i)
            severity = df.ix[i, 'Severity']
            if severity == "Note":
                severity = "Info"
            mitigation = "N/A"
            impact = "N/A"
            references = "N/A"

            dupe_key = hashlib.md5(category + '|' + str(cwe) + '|' + title + '|').hexdigest()

            if dupe_key in dupes:
                finding = dupes[dupe_key]
                if finding.description:
                    finding.description = finding.description + "\nVulnerability ID: " + \
                        df.ix[i, 'Vulnerability ID'] + "\n" + \
                        df.ix[i, 'Vulnerability Name'] + "\n"
                self.process_endpoints(finding, df, i)
                dupes[dupe_key] = finding
            else:
                dupes[dupe_key] = True

                finding = Finding(title=title,
                                  cwe=int(cwe),
                                  test=test,
                                  active=False,
                                  verified=False,
                                  description=description,
                                  severity=severity,
                                  numerical_severity=Finding.get_numerical_severity(
                                      severity),
                                  mitigation=mitigation,
                                  impact=impact,
                                  references=references,
                                  url='N/A',
                                  dynamic_finding=True)

                dupes[dupe_key] = finding
                self.process_endpoints(finding, df, i)

        self.items = dupes.values()
Example #10
0
    def get_items(self, tree, test):
        """
        @return items A list of Host instances
        """

        items = list()
        for node in tree.findall('site'):
            site = Site(node)
            main_host = Endpoint(host=site.ip + site.port if site.port is not None else "")
            for item in site.items:
                severity = item.riskdesc.split(' ', 1)[0]
                references = ''
                for ref in item.ref:
                    references += ref + "\n"

                find = Finding(title=item.name,
                               cwe=item.cwe,
                               description=item.desc,
                               test=test,
                               severity=severity,
                               mitigation=item.resolution,
                               references=references,
                               active=False,
                               verified=False,
                               false_p=False,
                               duplicate=False,
                               out_of_scope=False,
                               mitigated=None,
                               impact="No impact provided",
                               numerical_severity=Finding.get_numerical_severity(severity))

                find.unsaved_endpoints = [main_host]
                for i in item.items:
                    parts = urlparse.urlparse(i['uri'])
                    find.unsaved_endpoints.append(Endpoint(protocol=parts.scheme,
                                                           host=parts.netloc,
                                                           path=parts.path,
                                                           query=parts.query,
                                                           fragment=parts.fragment,
                                                           product=test.engagement.product))
                items.append(find)
        return items
Example #11
0
    def to_finding(self, test):

        return Finding(
                title=self.title,
                test=test,
                active=False,
                verified=False,
                description=self.get_finding_detail(),
                severity=self.get_finding_severity(),
                numerical_severity=Finding.get_numerical_severity(self.get_finding_severity())
        )
Example #12
0
    def __init__(self, filename, test):
        bug_patterns = dict()
        dupes = dict()

        SEVERITY = {
            '1': 'High',
            '2': 'Medium',
            '3': 'Low'
        }

        tree = ET.parse(filename)
        root = tree.getroot()

        for pattern in root.findall('BugPattern'):
            plain_pattern = re.sub(r'<[b-z/]*?>|<a|</a>|href=', '', ET.tostring(pattern.find('Details'), method='text'))
            bug_patterns[pattern.get('type')] = plain_pattern

        for bug in root.findall('BugInstance'):
            desc = ''
            for message in bug.itertext():
                desc += message

            dupe_key = bug.get('instanceHash')

            title = bug.find('ShortMessage').text
            cwe = bug.get('cweid', default=0)
            severity = SEVERITY[bug.get('priority')]
            description = desc
            mitigation = bug_patterns[bug.get('type')]
            impact = 'N/A'
            references = 'N/A'

            if dupe_key in dupes:
                finding = dupes[dupe_key]
            else:
                finding = Finding(
                    title=title,
                    cwe=cwe,
                    severity=severity,
                    description=description,
                    mitigation=mitigation,
                    impact=impact,
                    references=references,
                    test=test,
                    active=False,
                    verified=False,
                    numerical_severity=Finding.get_numerical_severity(severity),
                    static_finding=True
                )
                dupes[dupe_key] = finding

        self.items = dupes.values()
    def create():
        settings = System_Settings()
        settings.save()

        p = Product()
        p.Name = 'Test Product'
        p.Description = 'Product for Testing Apply Template functionality'
        p.save()

        e = Engagement()
        e.product = p
        e.target_start = timezone.now()
        e.target_end = e.target_start + datetime.timedelta(days=5)
        e.save()

        tt = Test_Type()
        tt.name = 'Temporary Test'
        tt.save()

        t = Test()
        t.engagement = e
        t.test_type = tt
        t.target_start = timezone.now()
        t.target_end = t.target_start + datetime.timedelta(days=5)
        t.save()

        user = FindingTemplateTestUtil.create_user(True)

        f = Finding()
        f.title = 'Finding for Testing Apply Template functionality'
        f.severity = 'High'
        f.description = 'Finding for Testing Apply Template Functionality'
        f.test = t
        f.reporter = user
        f.last_reviewed = timezone.now()
        f.last_reviewed_by = user
        f.save()
Example #14
0
def add_findings(request, tid):
    test = Test.objects.get(id=tid)
    findings = Finding.objects.filter(is_template=True).distinct()
    form_error = False
    form = AddFindingForm(initial={'date': datetime.now(tz=localtz).date()})
    if request.method == 'POST':
        form = AddFindingForm(request.POST)
        if form.is_valid():
            new_finding = form.save(commit=False)
            new_finding.test = test
            new_finding.reporter = request.user
            new_finding.numerical_severity = Finding.get_numerical_severity(
                new_finding.severity)
            if new_finding.false_p or new_finding.active is False:
                new_finding.mitigated = datetime.now(tz=localtz)
                new_finding.mitigated_by = request.user

            new_finding.save()
            new_finding.endpoints = form.cleaned_data['endpoints']
            new_finding.save()

            messages.add_message(request,
                                 messages.SUCCESS,
                                 'Finding added successfully.',
                                 extra_tags='alert-success')
            if '_Finished' in request.POST:
                return HttpResponseRedirect(reverse('view_test', args=(test.id,)))
            else:
                return HttpResponseRedirect(reverse('add_findings', args=(test.id,)))
        else:
            if 'endpoints' in form.cleaned_data:
                form.fields['endpoints'].queryset = form.cleaned_data['endpoints']
            else:
                form.fields['endpoints'].queryset = Endpoint.objects.none()
            form_error = True
            messages.add_message(request,
                                 messages.ERROR,
                                 'The form has errors, please correct them below.',
                                 extra_tags='alert-danger')
    add_breadcrumb(parent=test, title="Add Finding", top_level=False, request=request)
    return render(request, 'dojo/add_findings.html',
                  {'form': form,
                   'findings': findings,
                   'test': test,
                   'temp': False,
                   'tid': tid,
                   'form_error': form_error,
                   })
Example #15
0
    def get_items(self, tree, test):
        items = list()
        tables = list(tree.iter("tbody"))
        if len(tables) != 3:
            raise Exception('Parser ValueError')
        vulnerabilities_table = list(tables[1].iter("tr"))
        rules_table = list(tables[2].iter("tr"))

        for vuln in vulnerabilities_table:
            try:
                vuln_properties = list(vuln.iter("td"))
                vuln_rule_name = list(vuln_properties[0].iter("a"))[0].text
                vuln_severity = self.convert_sonar_severity(vuln_properties[1].text)
                vuln_title = vuln_properties[2].text
                vuln_mitigation = vuln_properties[3].text
            except:
                raise Exception('Parser ValueError')
            if vuln_title is None or vuln_mitigation is None:
                raise Exception('Parser ValueError')

            vuln_details = self.get_rule_details(vuln_rule_name, rules_table)
            if vuln_details is not None:
                vuln_description = self.get_description(vuln_details)
                vuln_references = self.get_references(vuln_details)
                vuln_cwe = self.get_cwe(vuln_references)
            else:
                vuln_description = "No description provided"
                vuln_references = ""
                vuln_cwe = 0

            find = Finding(title=vuln_title,
                           cwe=vuln_cwe,
                           description=vuln_description,
                           test=test,
                           severity=vuln_severity,
                           mitigation=vuln_mitigation,
                           references=vuln_references,
                           active=False,
                           verified=False,
                           false_p=False,
                           duplicate=False,
                           out_of_scope=False,
                           mitigated=None,
                           impact="No impact provided",
                           numerical_severity=Finding.get_numerical_severity(vuln_severity))
            items.append(find)
        return items
Example #16
0
    def __init__(self, filename, test):
        data = json.load(filename)
        dupes = dict()

        for item in data["Vulnerabilities"]:
            categories = ''
            language = ''
            mitigation = ''
            impact = ''
            references = ''
            findingdetail = ''
            title = ''
            group = ''
            status = ''

            title = item["Name"]
            findingdetail = cleantags(item["Description"])
            cwe = item["Classification"]["Cwe"]
            sev = item["Severity"]
            mitigation = cleantags(item["RemedialProcedure"])
            references = cleantags(item["RemedyReferences"])
            url = item["Url"]
            impact = cleantags(item["Impact"])
            dupe_key = title + item["Name"] + item["Url"]

            if dupe_key in dupes:
                find = dupes[dupe_key]
            else:
                dupes[dupe_key] = True

                find = Finding(title=title,
                               test=test,
                               active=False,
                               verified=False,
                               description=findingdetail,
                               severity=sev.title(),
                               numerical_severity=Finding.get_numerical_severity(sev),
                               mitigation=mitigation,
                               impact=impact,
                               references=references,
                               url=url,
                               cwe=cwe,
                               static_finding=True)
                dupes[dupe_key] = find
                findingdetail = ''

        self.items = dupes.values()
Example #17
0
    def __init__(self, filename, test):
        find_date = datetime.now()
        dupes = {}

        df = pd.read_csv(filename, header=0, error_bad_lines=False)

        for i, row in df.iterrows():
            profile = df.ix[i, 'PROFILE']
            account = df.ix[i, 'ACCOUNT_NUM']
            region = df.ix[i, 'REGION']
            title_id = df.ix[i, 'TITLE_ID']
            result = df.ix[i, 'RESULT']
            scored = df.ix[i, 'SCORED']
            level = df.ix[i, 'LEVEL']
            title_text = df.ix[i, 'TITLE_TEXT']
            title_text = re.sub(r'\[.*\]\s', '', title_text)
            title_text_trunc = Truncator(title_text).words(8)
            notes = df.ix[i, 'NOTES']

            sev = self.getCriticalityRating(result, level)
            description = "**Region:** " + region + "\n\n" + notes + "\n"
            dupe_key = sev + title_text
            if dupe_key in dupes:
                find = dupes[dupe_key]
                if description is not None:
                    find.description += description + "\n\n"
            else:
                find = Finding(title=title_text_trunc,
                               cwe=1032,  # Security Configuration Weaknesses, would like to fine tune
                               test=test,
                               active=False,
                               verified=False,
                               description="**AWS Account:** " + str(account) + "\n**Control:** " + title_text + "\n**CIS Control:** " + str(title_id) + ", " + level + "\n\n" + description,
                               severity=sev,
                               numerical_severity=Finding.get_numerical_severity(sev),
                               references=None,
                               date=find_date,
                               dynamic_finding=True)
                dupes[dupe_key] = find
        self.items = dupes.values()

        if account:
            test_description = ""
            test_description = "%s\n* **AWS Account:** %s\n" % (test_description, str(account))
            test.description = test_description
            test.save()
Example #18
0
def add_temp_finding(request, tid, fid):
    test = get_object_or_404(Test, id=tid)
    finding = get_object_or_404(Finding, id=fid)
    findings = Finding.objects.all()
    if request.method == 'POST':
        form = FindingForm(request.POST)
        if form.is_valid():
            new_finding = form.save(commit=False)
            new_finding.test = test
            new_finding.reporter = request.user
            new_finding.numerical_severity = Finding.get_numerical_severity(
                new_finding.severity)
            new_finding.date = datetime.today()
            if new_finding.false_p or new_finding.active is False:
                new_finding.mitigated = datetime.now(tz=localtz)
                new_finding.mitigated_by = request.user

            new_finding.save()
            new_finding.endpoints = form.cleaned_data['endpoints']
            new_finding.save()

            messages.add_message(request,
                                 messages.SUCCESS,
                                 'Temp finding added successfully.',
                                 extra_tags='alert-success')
            return HttpResponseRedirect(reverse('view_test', args=(test.id,)))
        else:
            messages.add_message(request,
                                 messages.ERROR,
                                 'The form has errors, please correct them below.',
                                 extra_tags='alert-danger')

    else:
        form = FindingForm(instance=finding, initial={'is_template': False, 'active': False, 'verified': False,
                                                      'false_p': False, 'duplicate': False, 'out_of_scope': False})

    add_breadcrumb(parent=test, title="Add Finding", top_level=False, request=request)
    return render(request, 'dojo/add_findings.html',
                  {'form': form,
                   'findings': findings,
                   'temp': True,
                   'fid': finding.id,
                   'tid': test.id,
                   'test': test,
                   })
Example #19
0
def edit_finding(request, fid):
    finding = get_object_or_404(Finding, id=fid)
    form = FindingForm(instance=finding)
    form_error = False
    if request.method == 'POST':
        form = FindingForm(request.POST, instance=finding)
        if form.is_valid():
            new_finding = form.save(commit=False)
            new_finding.test = finding.test
            new_finding.numerical_severity = Finding.get_numerical_severity(
                new_finding.severity)
            if new_finding.false_p or new_finding.active is False:
                new_finding.mitigated = datetime.now(tz=localtz)
                new_finding.mitigated_by = request.user
            if new_finding.active is True:
                new_finding.false_p = False
                new_finding.mitigated = None
                new_finding.mitigated_by = None

            new_finding.endpoints = form.cleaned_data['endpoints']
            new_finding.last_reviewed = datetime.now(tz=localtz)
            new_finding.last_reviewed_by = request.user
            new_finding.save()
            messages.add_message(request,
                                 messages.SUCCESS,
                                 'Finding saved successfully.',
                                 extra_tags='alert-success')
            return HttpResponseRedirect(reverse('view_finding', args=(new_finding.id,)))
        else:
            messages.add_message(request,
                                 messages.ERROR,
                                 'There appears to be errors on the form, please correct below.',
                                 extra_tags='alert-danger')
            form_error = True

    if form_error and 'endpoints' in form.cleaned_data:
        form.fields['endpoints'].queryset = form.cleaned_data['endpoints']
    else:
        form.fields['endpoints'].queryset = finding.endpoints.all()

    add_breadcrumb(parent=finding, title="Edit", top_level=False, request=request)
    return render(request, 'dojo/edit_findings.html',
                  {'form': form,
                   'finding': finding,
                   })
Example #20
0
def finding_bulk_update(request, tid):
    test = get_object_or_404(Test, id=tid)
    form = FindingBulkUpdateForm(request.POST)

    if request.method == "POST":
        finding_to_update = request.POST.getlist('finding_to_update')
        if request.POST.get('delete_bulk_findings') and finding_to_update:
            finds = Finding.objects.filter(test=test, id__in=finding_to_update)
            product = Product.objects.get(engagement__test=test)
            finds.delete()
            calculate_grade(product)
        else:
            if form.is_valid() and finding_to_update:
                finding_to_update = request.POST.getlist('finding_to_update')
                finds = Finding.objects.filter(test=test, id__in=finding_to_update)
                if form.cleaned_data['severity']:
                    finds.update(severity=form.cleaned_data['severity'],
                                 numerical_severity=Finding.get_numerical_severity(form.cleaned_data['severity']),
                                 last_reviewed=timezone.now(),
                                 last_reviewed_by=request.user)
                if form.cleaned_data['status']:
                    finds.update(active=form.cleaned_data['active'],
                                 verified=form.cleaned_data['verified'],
                                 false_p=form.cleaned_data['false_p'],
                                 out_of_scope=form.cleaned_data['out_of_scope'],
                                 last_reviewed=timezone.now(),
                                 last_reviewed_by=request.user)

                # Update the grade as bulk edits don't go through save
                if form.cleaned_data['severity'] or form.cleaned_data['status']:
                    calculate_grade(test.engagement.product)

                messages.add_message(request,
                                     messages.SUCCESS,
                                     'Bulk edit of findings was successful.  Check to make sure it is what you intended.',
                                     extra_tags='alert-success')
            else:
                messages.add_message(request,
                                     messages.ERROR,
                                     'Unable to process bulk update. Required fields were not selected.',
                                     extra_tags='alert-danger')

    return HttpResponseRedirect(reverse('view_test', args=(test.id,)))
Example #21
0
    def __init__(self, filename, test):
        vscan = ElementTree.parse(filename)
        root = vscan.getroot()

        if 'https://www.veracode.com/schema/reports/export/1.0' not in str(root):
            # version not supported
            raise NamespaceErr('This version of Veracode report is not supported.  '
                               'Please make sure the export is formatted using the '
                               'https://www.veracode.com/schema/reports/export/1.0 schema.')

        dupes = dict()

        for severity in root.iter('{https://www.veracode.com/schema/reports/export/1.0}severity'):
            if severity.attrib['level'] == '5':
                sev = 'Critical'
            elif severity.attrib['level'] == '4':
                sev = 'High'
            elif severity.attrib['level'] == '3':
                sev = 'Medium'
            elif severity.attrib['level'] == '2':
                sev = 'Low'
            else:
                sev = 'Info'

            for category in severity.iter('{https://www.veracode.com/schema/reports/export/1.0}category'):
                recommendations = category.find('{https://www.veracode.com/schema/reports/export/1.0}recommendations')
                mitigation = ''
                for para in recommendations.iter('{https://www.veracode.com/schema/reports/export/1.0}para'):
                    mitigation += para.attrib['text'] + '\n\n'
                    for bullet in para.iter('{https://www.veracode.com/schema/reports/export/1.0}bulletitem'):
                        mitigation += "    * " + bullet.attrib['text'] + '\n'

                for flaw in category.iter('{https://www.veracode.com/schema/reports/export/1.0}flaw'):
                    dupe_key = sev + flaw.attrib['cweid'] + flaw.attrib['module'] + flaw.attrib['type']

                    if dupe_key in dupes:
                        find = dupes[dupe_key]
                    else:
                        dupes[dupe_key] = True
                        description = flaw.attrib['description'].replace('. ', '.\n')
                        if 'References:' in description:
                            references = description[description.index('References:') + 13:].replace(')  ', ')\n')
                        else:
                            references = 'None'

                        if 'date_first_occurrence' in flaw.attrib:
                            find_date = datetime.strptime(flaw.attrib['date_first_occurrence'],
                                                          '%Y-%m-%d %H:%M:%S %Z')
                        else:
                            find_date = test.target_start

                        find = Finding(title=flaw.attrib['categoryname'],
                                       cwe=int(flaw.attrib['cweid']),
                                       test=test,
                                       active=False,
                                       verified=False,
                                       description=description + "\n\nVulnerable Module: " + flaw.attrib[
                                           'module'] + ' Type: ' + flaw.attrib['type'],
                                       severity=sev,
                                       numerical_severity=Finding.get_numerical_severity(sev),
                                       mitigation=mitigation,
                                       impact='CIA Impact: ' + flaw.attrib['cia_impact'].upper(),
                                       references=references,
                                       url='N/A',
                                       date=find_date)
                        dupes[dupe_key] = find

        self.items = dupes.values()
def add_temp_finding(request, tid, fid):
    jform = None
    test = get_object_or_404(Test, id=tid)
    finding = get_object_or_404(Finding_Template, id=fid)
    findings = Finding_Template.objects.all()

    if request.method == 'POST':
        form = FindingForm(request.POST)
        if form.is_valid():
            new_finding = form.save(commit=False)
            new_finding.test = test
            new_finding.reporter = request.user
            new_finding.numerical_severity = Finding.get_numerical_severity(
                new_finding.severity)
            new_finding.date = datetime.today()
            if new_finding.false_p or new_finding.active is False:
                new_finding.mitigated = timezone.now()
                new_finding.mitigated_by = request.user

            create_template = new_finding.is_template
            # is template always False now in favor of new model Finding_Template
            # no further action needed here since this is already adding from template.
            new_finding.is_template = False
            new_finding.save()
            new_finding.endpoints = form.cleaned_data['endpoints']
            new_finding.save()
            if 'jiraform-push_to_jira' in request.POST:
                jform = JIRAFindingForm(request.POST,
                                        prefix='jiraform',
                                        enabled=True)
                add_issue_task.delay(new_finding,
                                     jform.cleaned_data.get('push_to_jira'))
            messages.add_message(request,
                                 messages.SUCCESS,
                                 'Finding from template added successfully.',
                                 extra_tags='alert-success')

            if create_template:
                templates = Finding_Template.objects.filter(
                    title=new_finding.title)
                if len(templates) > 0:
                    messages.add_message(
                        request,
                        messages.ERROR,
                        'A finding template was not created.  A template with this title already '
                        'exists.',
                        extra_tags='alert-danger')
                else:
                    template = Finding_Template(
                        title=new_finding.title,
                        cwe=new_finding.cwe,
                        severity=new_finding.severity,
                        description=new_finding.description,
                        mitigation=new_finding.mitigation,
                        impact=new_finding.impact,
                        references=new_finding.references,
                        numerical_severity=new_finding.numerical_severity)
                    template.save()
                    messages.add_message(
                        request,
                        messages.SUCCESS,
                        'A finding template was also created.',
                        extra_tags='alert-success')

            return HttpResponseRedirect(reverse('view_test', args=(test.id, )))
        else:
            messages.add_message(
                request,
                messages.ERROR,
                'The form has errors, please correct them below.',
                extra_tags='alert-danger')

    else:
        form = FindingForm(
            initial={
                'active': False,
                'date': timezone.now().date(),
                'verified': False,
                'false_p': False,
                'duplicate': False,
                'out_of_scope': False,
                'title': finding.title,
                'description': finding.description,
                'cwe': finding.cwe,
                'severity': finding.severity,
                'mitigation': finding.mitigation,
                'impact': finding.impact,
                'references': finding.references,
                'numerical_severity': finding.numerical_severity
            })
        if get_system_setting('enable_jira'):
            enabled = JIRA_PKey.objects.get(
                product=test.engagement.product).push_all_issues
            jform = JIRAFindingForm(enabled=enabled, prefix='jiraform')
        else:
            jform = None

    add_breadcrumb(parent=test,
                   title="Add Finding",
                   top_level=False,
                   request=request)
    return render(
        request, 'dojo/add_findings.html', {
            'form': form,
            'jform': jform,
            'findings': findings,
            'temp': True,
            'fid': finding.id,
            'tid': test.id,
            'test': test,
        })
Example #23
0
def get_item(vuln, test):
    if vuln['category'] != 'sast':
        # For SAST reports, value must always be "sast"
        return None

    unique_id_from_tool = None
    if 'id' in vuln:
        unique_id_from_tool = vuln['id']
    else:
        # If the new unique id is not provided, fall back to deprecated "cve" fingerprint (old version)
        unique_id_from_tool = vuln['cve']

    title = ''
    if 'name' in vuln:
        title = vuln['name']
    elif 'message' in vuln:
        title = vuln['message']
    elif 'description' in vuln:
        title = vuln['description']
    else:
        # All other fields are optional, if none of them has a value, fall back on the unique id
        title = unique_id_from_tool

    description = 'Scanner: {}\n'.format(vuln['scanner']['name'])
    if 'message' in vuln:
        description += '{}\n'.format(vuln['message'])
    if 'description' in vuln:
        description += '{}\n'.format(vuln['description'])

    location = vuln['location']
    file_path = location['file'] if 'file' in location else None
    sourcefile = location['file'] if 'file' in location else None

    line = location['start_line'] if 'start_line' in location else None
    if 'end_line' in location:
        line = location['end_line']

    sast_source_line = location[
        'start_line'] if 'start_line' in location else None

    sast_object = None
    if 'class' in location and 'method' in location:
        sast_object = '{}#{}'.format(location['class'], location['method'])
    elif 'class' in location:
        sast_object = location['class']
    elif 'method' in location:
        sast_object = location['method']

    severity = vuln['severity']
    if severity == 'Undefined' or severity == 'Unknown':
        # Severity can be "Undefined" or "Unknown" in SAST report
        # In that case we set it as Info and specify the initial severity in the title
        title = '[{} severity] {}'.format(severity, title)
        severity = 'Info'
    numerical_severity = Finding.get_numerical_severity(severity)
    scanner_confidence = get_confidence_numeric(
        vuln.get('confidence', 'Unkown'))

    mitigation = ''
    if 'solution' in vuln:
        mitigation = vuln['solution']

    cwe = None
    cve = None
    references = ''
    if 'identifiers' in vuln:
        for identifier in vuln['identifiers']:
            if identifier['type'].lower() == 'cwe':
                if isinstance(identifier['value'], int):
                    cwe = identifier['value']
                elif identifier['value'].isdigit():
                    cwe = int(identifier['value'])
            elif identifier['type'].lower() == 'cve':
                cve = identifier['value']
            else:
                references += 'Identifier type: {}\n'.format(
                    identifier['type'])
                references += 'Name: {}\n'.format(identifier['name'])
                references += 'Value: {}\n'.format(identifier['value'])
                if 'url' in identifier:
                    references += 'URL: {}\n'.format(identifier['url'])
                references += '\n'

    finding = Finding(title=title,
                      test=test,
                      active=False,
                      verified=False,
                      description=description,
                      severity=severity,
                      numerical_severity=numerical_severity,
                      scanner_confidence=scanner_confidence,
                      mitigation=mitigation,
                      unique_id_from_tool=unique_id_from_tool,
                      references=references,
                      file_path=file_path,
                      sourcefile=sourcefile,
                      line=line,
                      sast_source_object=sast_object,
                      sast_sink_object=sast_object,
                      sast_source_file_path=file_path,
                      sast_source_line=sast_source_line,
                      cwe=cwe,
                      cve=cve,
                      static_finding=True,
                      dynamic_finding=False)

    return finding
Example #24
0
    def get_finding_from_vulnerability(self, dependency, related_dependency,
                                       vulnerability, test, namespace):
        dependency_filename, dependency_filepath = self.get_filename_and_path_from_dependency(
            dependency, related_dependency, namespace)
        # logger.debug('dependency_filename: %s', dependency_filename)

        if dependency_filename is None:
            return None

        tags = []
        name = vulnerability.findtext(f'{namespace}name')
        if vulnerability.find(f'{namespace}cwes'):
            cwe_field = vulnerability.find(f'{namespace}cwes').findtext(
                f'{namespace}cwe')
        else:
            cwe_field = vulnerability.findtext(f'{namespace}cwe')

        description = vulnerability.findtext(f'{namespace}description')

        source = vulnerability.get('source')
        if source:
            description += '\n**Source:** ' + str(source)

        # I need the notes field since this is how the suppression is documented.
        notes = vulnerability.findtext(f'.//{namespace}notes')

        cve = name[:28]
        if cve and not cve.startswith('CVE'):
            # for vulnerability sources which have a CVE, it is the start of the 'name'.
            # for other sources, we have to set it to None
            cve = None

        # Use CWE-1035 as fallback
        cwe = 1035  # Vulnerable Third Party Component
        if cwe_field:
            m = re.match(r"^(CWE-)?(\d+)", cwe_field)
            if m:
                cwe = int(m.group(2))

        component_name, component_version = self.get_component_name_and_version_from_dependency(
            dependency, related_dependency, namespace)

        stripped_name = name
        # startswith CVE-XXX-YYY
        stripped_name = re.sub(r'^CVE-\d{4}-\d{4,7}', '',
                               stripped_name).strip()
        # startswith CWE-XXX:
        stripped_name = re.sub(r'^CWE-\d+\:', '', stripped_name).strip()
        # startswith CWE-XXX
        stripped_name = re.sub(r'^CWE-\d+', '', stripped_name).strip()

        if component_name is None:
            logger.warning(
                "component_name was None for File: {}, using dependency file name instead."
                .format(dependency_filename))
            component_name = dependency_filename

        # some changes in v6.0.0 around CVSS version information
        # https://github.com/jeremylong/DependencyCheck/pull/2781

        cvssv2_node = vulnerability.find(namespace + 'cvssV2')
        cvssv3_node = vulnerability.find(namespace + 'cvssV3')
        severity = vulnerability.findtext(f'{namespace}severity')
        if not severity:
            if cvssv3_node is not None:
                severity = cvssv3_node.findtext(
                    f'{namespace}baseSeverity').lower().capitalize()
            elif cvssv2_node is not None:
                severity = cvssv2_node.findtext(
                    f'{namespace}severity').lower().capitalize()

        # handle if the severity have something not in the mapping
        # default to 'Medium' and produce warnings in logs
        if severity.strip().lower() not in self.SEVERITY_MAPPING:
            logger.warn(
                f"Warning: Unknow severity value detected '{severity}'. Bypass to 'Medium' value"
            )
            severity = "Medium"
        else:
            severity = self.SEVERITY_MAPPING[severity.strip().lower()]

        reference_detail = None
        references_node = vulnerability.find(namespace + 'references')

        if references_node is not None:
            reference_detail = ''
            for reference_node in references_node.findall(namespace +
                                                          'reference'):
                ref_source = reference_node.findtext(f"{namespace}source")
                ref_url = reference_node.findtext(f"{namespace}url")
                ref_name = reference_node.findtext(f"{namespace}name")
                if ref_url == ref_name:
                    reference_detail += f'**Source:** {ref_source}\n' \
                                        f'**URL:** {ref_url}\n\n'
                else:
                    reference_detail += f'**Source:** {ref_source}\n' \
                                        f'**URL:** {ref_url}\n' \
                                        f'**Name:** {ref_name}\n\n'

        if related_dependency is not None:
            tags.append("related")

        if vulnerability.tag == "{}suppressedVulnerability".format(namespace):
            if notes is None:
                notes = "Document on why we are suppressing this vulnerability is missing!"
                tags.append("no_suppression_document")
            mitigation = '**This vulnerability is mitigated and/or suppressed:** {}\n'.format(
                notes)
            mitigation = mitigation + 'Update {}:{} to at least the version recommended in the description'.format(
                component_name, component_version)

            active = False
            tags.append("suppressed")

        else:
            mitigation = 'Update {}:{} to at least the version recommended in the description'.format(
                component_name, component_version)
            description += '\n**Filepath:** ' + str(dependency_filepath)
            active = True

        return Finding(
            title=f'{component_name}:{component_version} | {name}',
            file_path=dependency_filename,
            test=test,
            cwe=cwe,
            cve=cve,
            description=description,
            severity=severity,
            mitigation=mitigation,
            tags=tags,
            active=active,
            dynamic_finding=False,
            static_finding=True,
            references=reference_detail,
            component_name=component_name,
            component_version=component_version,
        )
Example #25
0
def severity_number_value(value):
    return Finding.get_number_severity(value)
Example #26
0
def add_temp_finding(request, tid, fid):
    jform = None
    test = get_object_or_404(Test, id=tid)
    finding = get_object_or_404(Finding_Template, id=fid)
    findings = Finding_Template.objects.all()
    push_all_jira_issues = jira_helper.is_push_all_issues(finding)

    if request.method == 'POST':

        form = AddFindingForm(request.POST,
                              req_resp=None,
                              product=test.engagement.product)
        if jira_helper.get_jira_project(test):
            jform = JIRAFindingForm(
                push_all=jira_helper.is_push_all_issues(test),
                prefix='jiraform',
                jira_project=jira_helper.get_jira_project(test),
                finding_form=form)
            logger.debug('jform valid: %s', jform.is_valid())

        if (form['active'].value() is False or form['false_p'].value()
            ) and form['duplicate'].value() is False:
            closing_disabled = Note_Type.objects.filter(
                is_mandatory=True, is_active=True).count()
            if closing_disabled != 0:
                error_inactive = ValidationError(
                    'Can not set a finding as inactive without adding all mandatory notes',
                    code='not_active_or_false_p_true')
                error_false_p = ValidationError(
                    'Can not set a finding as false positive without adding all mandatory notes',
                    code='not_active_or_false_p_true')
                if form['active'].value() is False:
                    form.add_error('active', error_inactive)
                if form['false_p'].value():
                    form.add_error('false_p', error_false_p)
                messages.add_message(
                    request,
                    messages.ERROR,
                    'Can not set a finding as inactive or false positive without adding all mandatory notes',
                    extra_tags='alert-danger')
        if form.is_valid():
            finding.last_used = timezone.now()
            finding.save()
            new_finding = form.save(commit=False)
            new_finding.test = test
            new_finding.reporter = request.user
            new_finding.numerical_severity = Finding.get_numerical_severity(
                new_finding.severity)
            new_finding.date = datetime.today()
            finding_helper.update_finding_status(new_finding, request.user)

            new_finding.save(dedupe_option=False, false_history=False)

            # Save and add new endpoints
            finding_helper.add_endpoints(new_finding, form)

            new_finding.save(false_history=True)
            if 'jiraform-push_to_jira' in request.POST:
                jform = JIRAFindingForm(
                    request.POST,
                    prefix='jiraform',
                    instance=new_finding,
                    push_all=push_all_jira_issues,
                    jira_project=jira_helper.get_jira_project(test),
                    finding_form=form)
                if jform.is_valid():
                    if jform.cleaned_data.get('push_to_jira'):
                        jira_helper.push_to_jira(new_finding)
                else:
                    add_error_message_to_response(
                        'jira form validation failed: %s' % jform.errors)

            messages.add_message(request,
                                 messages.SUCCESS,
                                 'Finding from template added successfully.',
                                 extra_tags='alert-success')

            return HttpResponseRedirect(reverse('view_test', args=(test.id, )))
        else:
            messages.add_message(
                request,
                messages.ERROR,
                'The form has errors, please correct them below.',
                extra_tags='alert-danger')

    else:
        form = AddFindingForm(req_resp=None,
                              product=test.engagement.product,
                              initial={
                                  'active': False,
                                  'date': timezone.now().date(),
                                  'verified': False,
                                  'false_p': False,
                                  'duplicate': False,
                                  'out_of_scope': False,
                                  'title': finding.title,
                                  'description': finding.description,
                                  'cwe': finding.cwe,
                                  'severity': finding.severity,
                                  'mitigation': finding.mitigation,
                                  'impact': finding.impact,
                                  'references': finding.references,
                                  'numerical_severity':
                                  finding.numerical_severity
                              })

        if jira_helper.get_jira_project(test):
            jform = JIRAFindingForm(
                push_all=jira_helper.is_push_all_issues(test),
                prefix='jiraform',
                jira_project=jira_helper.get_jira_project(test),
                finding_form=form)

    # logger.debug('form valid: %s', form.is_valid())
    # logger.debug('jform valid: %s', jform.is_valid())
    # logger.debug('form errors: %s', form.errors)
    # logger.debug('jform errors: %s', jform.errors)
    # logger.debug('jform errors: %s', vars(jform))

    product_tab = Product_Tab(test.engagement.product.id,
                              title="Add Finding",
                              tab="engagements")
    product_tab.setEngagement(test.engagement)
    return render(
        request, 'dojo/add_findings.html', {
            'form': form,
            'product_tab': product_tab,
            'jform': jform,
            'findings': findings,
            'temp': True,
            'fid': finding.id,
            'tid': test.id,
            'test': test,
        })
Example #27
0
    def obj_create(self, bundle, **kwargs):
        bundle.obj = ImportScanObject(initial=kwargs)
        self.is_valid(bundle)
        if bundle.errors:
            raise ImmediateHttpResponse(
                response=self.error_response(bundle.request, bundle.errors))
        bundle = self.full_hydrate(bundle)

        test = bundle.obj.__getattr__('test_obj')
        scan_type = bundle.obj.__getattr__('scan_type')
        min_sev = bundle.obj.__getattr__('minimum_severity')
        scan_date = bundle.obj.__getattr__('scan_date')
        verified = bundle.obj.__getattr__('verified')
        active = bundle.obj.__getattr__('active')

        try:
            parser = import_parser_factory(bundle.data['file'], test)
        except ValueError:
            raise NotFound("Parser ValueError")

        try:
            items = parser.items
            original_items = test.finding_set.all().values_list("id",
                                                                flat=True)
            new_items = []
            mitigated_count = 0
            finding_count = 0
            finding_added_count = 0
            reactivated_count = 0
            for item in items:
                sev = item.severity
                if sev == 'Information' or sev == 'Informational':
                    sev = 'Info'

                if Finding.SEVERITIES[sev] > Finding.SEVERITIES[min_sev]:
                    continue

                if scan_type == 'Veracode Scan' or scan_type == 'Arachni Scan':
                    find = Finding.objects.filter(
                        title=item.title,
                        test__id=test.id,
                        severity=sev,
                        numerical_severity=Finding.get_numerical_severity(sev),
                        description=item.description)
                else:
                    find = Finding.objects.filter(
                        title=item.title,
                        test__id=test.id,
                        severity=sev,
                        numerical_severity=Finding.get_numerical_severity(sev),
                    )

                if len(find) == 1:
                    find = find[0]
                    if find.mitigated:
                        # it was once fixed, but now back
                        find.mitigated = None
                        find.mitigated_by = None
                        find.active = True
                        find.verified = verified
                        find.save()
                        note = Notes(entry="Re-activated by %s re-upload." %
                                     scan_type,
                                     author=bundle.request.user)
                        note.save()
                        find.notes.add(note)
                        reactivated_count += 1
                    new_items.append(find.id)
                else:
                    item.test = test
                    item.date = test.target_start
                    item.reporter = bundle.request.user
                    item.last_reviewed = datetime.now(tz=localtz)
                    item.last_reviewed_by = bundle.request.user
                    item.verified = verified
                    item.active = active
                    item.save()
                    finding_added_count += 1
                    new_items.append(item.id)
                    find = item

                    if hasattr(item, 'unsaved_req_resp') and len(
                            item.unsaved_req_resp) > 0:
                        for req_resp in item.unsaved_req_resp:
                            burp_rr = BurpRawRequestResponse(
                                finding=find,
                                burpRequestBase64=req_resp["req"],
                                burpResponseBase64=req_resp["resp"],
                            )
                            burp_rr.clean()
                            burp_rr.save()

                    if item.unsaved_request is not None and item.unsaved_response is not None:
                        burp_rr = BurpRawRequestResponse(
                            finding=find,
                            burpRequestBase64=item.unsaved_request,
                            burpResponseBase64=item.unsaved_response,
                        )
                        burp_rr.clean()
                        burp_rr.save()
                if find:
                    finding_count += 1
                    for endpoint in item.unsaved_endpoints:
                        ep, created = Endpoint.objects.get_or_create(
                            protocol=endpoint.protocol,
                            host=endpoint.host,
                            path=endpoint.path,
                            query=endpoint.query,
                            fragment=endpoint.fragment,
                            product=test.engagement.product)
                        find.endpoints.add(ep)

                    if item.unsaved_tags is not None:
                        find.tags = item.unsaved_tags
            # calculate the difference
            to_mitigate = set(original_items) - set(new_items)
            for finding_id in to_mitigate:
                finding = Finding.objects.get(id=finding_id)
                finding.mitigated = datetime.combine(
                    scan_date,
                    datetime.now(tz=localtz).time())
                finding.mitigated_by = bundle.request.user
                finding.active = False
                finding.save()
                note = Notes(entry="Mitigated by %s re-upload." % scan_type,
                             author=bundle.request.user)
                note.save()
                finding.notes.add(note)
                mitigated_count += 1

        except SyntaxError:
            raise NotFound("Parser SyntaxError")

        # Everything executed fine. We successfully imported the scan.
        raise ImmediateHttpResponse(
            HttpCreated(location=bundle.obj.__getattr__('test')))
Example #28
0
    def process_parsed_findings(self,
                                test,
                                parsed_findings,
                                scan_type,
                                user,
                                active,
                                verified,
                                minimum_severity=None,
                                endpoints_to_add=None,
                                push_to_jira=None,
                                group_by=None,
                                now=timezone.now(),
                                service=None,
                                scan_date=None,
                                **kwargs):
        logger.debug('endpoints_to_add: %s', endpoints_to_add)
        new_findings = []
        items = parsed_findings
        logger.debug('starting import of %i items.',
                     len(items) if items else 0)
        i = 0
        for item in items:
            # FIXME hack to remove when all parsers have unit tests for this attribute
            if item.severity.lower().startswith(
                    'info') and item.severity != 'Info':
                item.severity = 'Info'

            item.numerical_severity = Finding.get_numerical_severity(
                item.severity)

            if minimum_severity and (Finding.SEVERITIES[item.severity] >
                                     Finding.SEVERITIES[minimum_severity]):
                # finding's severity is below the configured threshold : ignoring the finding
                continue

            item.test = test
            item.reporter = user if user else get_current_user
            item.last_reviewed = now
            item.last_reviewed_by = user if user else get_current_user

            logger.debug(
                'process_parsed_findings: active from report: %s, verified from report: %s',
                item.active, item.verified)
            # active, verified parameters = parameters from the gui or api call.
            # item.active, item.verified = values from the report / the parser
            # if either value of active (from the parser or from the api/gui) is false, final status is inactive
            #   else final status is active
            # if either value of verified (from the parser or from the api/gui) is false, final status is not verified
            #   else final status is verified
            # Note that:
            #   - the API (active/verified parameters) values default to True if not specified
            #   - the parser values default to true if not set by the parser (as per the default value in models.py)
            #   - there is no "not specified" in the GUI (not ticked means not active/not verified)
            if item.active:
                item.active = active
            if item.verified:
                item.verified = verified

            # if scan_date was provided, override value from parser
            if scan_date:
                item.date = scan_date

            item.service = service

            item.save(dedupe_option=False)

            if settings.FEATURE_FINDING_GROUPS and group_by:
                finding_helper.add_finding_to_auto_group(item, group_by)

            if (hasattr(item, 'unsaved_req_resp')
                    and len(item.unsaved_req_resp) > 0):
                for req_resp in item.unsaved_req_resp:
                    burp_rr = BurpRawRequestResponse(
                        finding=item,
                        burpRequestBase64=base64.b64encode(
                            req_resp["req"].encode("utf-8")),
                        burpResponseBase64=base64.b64encode(
                            req_resp["resp"].encode("utf-8")))
                    burp_rr.clean()
                    burp_rr.save()

            if (item.unsaved_request is not None
                    and item.unsaved_response is not None):
                burp_rr = BurpRawRequestResponse(
                    finding=item,
                    burpRequestBase64=base64.b64encode(
                        item.unsaved_request.encode()),
                    burpResponseBase64=base64.b64encode(
                        item.unsaved_response.encode()))
                burp_rr.clean()
                burp_rr.save()

            if settings.ASYNC_FINDING_IMPORT:
                importer_utils.chunk_endpoints_and_disperse(
                    item, test, item.unsaved_endpoints)
            else:
                importer_utils.add_endpoints_to_unsaved_finding(
                    item, test, item.unsaved_endpoints, sync=True)

            if endpoints_to_add:
                if settings.ASYNC_FINDING_IMPORT:
                    importer_utils.chunk_endpoints_and_disperse(
                        item, test, endpoints_to_add)
                else:
                    importer_utils.add_endpoints_to_unsaved_finding(
                        item, test, endpoints_to_add, sync=True)

            if item.unsaved_tags:
                item.tags = item.unsaved_tags

            if item.unsaved_files:
                for unsaved_file in item.unsaved_files:
                    data = base64.b64decode(unsaved_file.get('data'))
                    title = unsaved_file.get('title', '<No title>')
                    file_upload, file_upload_created = FileUpload.objects.get_or_create(
                        title=title, )
                    file_upload.file.save(title, ContentFile(data))
                    file_upload.save()
                    item.files.add(file_upload)

            new_findings.append(item)
            # to avoid pushing a finding group multiple times, we push those outside of the loop
            if settings.FEATURE_FINDING_GROUPS and item.finding_group:
                item.save()
            else:
                item.save(push_to_jira=push_to_jira)

        if settings.FEATURE_FINDING_GROUPS and push_to_jira:
            for finding_group in set([
                    finding.finding_group for finding in new_findings
                    if finding.finding_group is not None
            ]):
                jira_helper.push_to_jira(finding_group)
        sync = kwargs.get('sync', False)
        if not sync:
            return [
                serializers.serialize('json', [
                    finding,
                ]) for finding in new_findings
            ]
        return new_findings
Example #29
0
def add_jira_issue(find):
    logger.info('trying to create a new jira issue for %d:%s', find.id,
                find.title)

    if not is_jira_enabled():
        return

    if not is_jira_configured_and_enabled(find):
        logger.error(
            "Finding {} cannot be pushed to JIRA as there is no JIRA configuration for this product."
            .format(find.id))
        log_jira_alert(
            'Finding cannot be pushed to JIRA as there is no JIRA configuration for this product.',
            find)
        return

    jira_minimum_threshold = None
    if System_Settings.objects.get().jira_minimum_severity:
        jira_minimum_threshold = Finding.get_number_severity(
            System_Settings.objects.get().jira_minimum_severity)

    jira_project = get_jira_project(find)
    jira_instance = get_jira_instance(find)

    if 'Active' in find.status() and 'Verified' in find.status():
        if jira_minimum_threshold and jira_minimum_threshold > Finding.get_number_severity(
                find.severity):
            log_jira_alert(
                'Finding below the minimum JIRA severity threshold.', find)
            logger.warn(
                "Finding {} is below the minimum JIRA severity threshold.".
                format(find.id))
            logger.warn("The JIRA issue will NOT be created.")
            return

        logger.debug(
            'Trying to create a new JIRA issue for finding {}...'.format(
                find.id))
        meta = None
        try:
            JIRAError.log_to_tempfile = False
            jira = get_jira_connection(jira_instance)

            fields = {
                'project': {
                    'key': jira_project.project_key
                },
                'summary': find.title,
                'description': jira_description(find),
                'issuetype': {
                    'name': jira_instance.default_issue_type
                },
            }

            if jira_project.component:
                fields['components'] = [
                    {
                        'name': jira_project.component
                    },
                ]

            # populate duedate field, but only if it's available for this project + issuetype
            if not meta:
                meta = get_jira_meta(jira, jira_project)

            if 'priority' in meta['projects'][0]['issuetypes'][0]['fields']:
                fields['priority'] = {
                    'name': jira_instance.get_priority(find.severity)
                }

            labels = get_labels(find)
            if labels:
                if 'labels' in meta['projects'][0]['issuetypes'][0]['fields']:
                    fields['labels'] = labels

            if System_Settings.objects.get().enable_finding_sla:

                if 'duedate' in meta['projects'][0]['issuetypes'][0]['fields']:
                    # jira wants YYYY-MM-DD
                    duedate = find.sla_deadline()
                    if duedate:
                        fields['duedate'] = duedate.strftime('%Y-%m-%d')

            if len(find.endpoints.all()) > 0:
                if not meta:
                    meta = get_jira_meta(jira, jira_project)

                if 'environment' in meta['projects'][0]['issuetypes'][0][
                        'fields']:
                    environment = "\n".join(
                        [str(endpoint) for endpoint in find.endpoints.all()])
                    fields['environment'] = environment

            logger.debug('sending fields to JIRA: %s', fields)

            new_issue = jira.create_issue(fields)

            j_issue = JIRA_Issue(jira_id=new_issue.id,
                                 jira_key=new_issue.key,
                                 finding=find,
                                 jira_project=jira_project)
            j_issue.jira_creation = timezone.now()
            j_issue.jira_change = timezone.now()
            j_issue.save()
            issue = jira.issue(new_issue.id)

            find.save(push_to_jira=False,
                      dedupe_option=False,
                      issue_updater_option=False)

            # Upload dojo finding screenshots to Jira
            for pic in find.images.all():
                jira_attachment(find, jira, issue,
                                settings.MEDIA_ROOT + pic.image_large.name)

                # if jira_project.enable_engagement_epic_mapping:
                #      epic = get_jira_issue(eng)
                #      issue_list = [j_issue.jira_id,]
                #      jira.add_jira_issues_to_epic(epic_id=epic.jira_id, issue_keys=[str(j_issue.jira_id)], ignore_epics=True)

            return True
        except JIRAError as e:
            logger.exception(e)
            logger.error("jira_meta for project: %s and url: %s meta: %s",
                         jira_project.project_key,
                         jira_project.jira_instance.url,
                         json.dumps(meta, indent=4))  # this is None safe
            log_jira_alert(e.text, find)
            return False
    else:
        log_jira_alert(
            "A Finding needs to be both Active and Verified to be pushed to JIRA.",
            find)
        logger.warning(
            "A Finding needs to be both Active and Verified to be pushed to JIRA: %s",
            find)
        return False
Example #30
0
    def get_finding_from_vulnerability(self, dependency, related_dependency,
                                       vulnerability, test, namespace):
        dependency_filename, dependency_filepath = self.get_filename_and_path_from_dependency(
            dependency, related_dependency, namespace)
        # logger.debug('dependency_filename: %s', dependency_filename)

        if dependency_filename is None:
            return None

        name = self.get_field_value(vulnerability, 'name', namespace)
        cwes_node = vulnerability.find(namespace + 'cwes')
        if cwes_node is not None:
            cwe_field = self.get_field_value(cwes_node, 'cwe', namespace)
        else:
            cwe_field = self.get_field_value(vulnerability, 'cwe', namespace)
        description = self.get_field_value(vulnerability, 'description',
                                           namespace)

        cve = name[:28]
        if cve and not cve.startswith('CVE'):
            # for vulnerability sources which have a CVE, it is the start of the 'name'.
            # for other sources, we have to set it to None
            cve = None

        # Use CWE-1035 as fallback
        cwe = 1035  # Vulnerable Third Party Component
        if cwe_field:
            m = re.match(r"^(CWE-)?(\d+)", cwe_field)
            if m:
                cwe = int(m.group(2))

        component_name, component_version = self.get_component_name_and_version_from_dependency(
            dependency, related_dependency, namespace)

        stripped_name = name
        # startswith CVE-XXX-YYY
        stripped_name = re.sub(r'^CVE-\d{4}-\d{4,7}', '',
                               stripped_name).strip()
        # startswith CWE-XXX:
        stripped_name = re.sub(r'^CWE-\d+\:', '', stripped_name).strip()
        # startswith CWE-XXX
        stripped_name = re.sub(r'^CWE-\d+', '', stripped_name).strip()

        title = '%s:%s | %s(in %s)' % (
            component_name.split(':')[-1], component_version,
            (stripped_name + ' ' if stripped_name else '') +
            (description if len(stripped_name) < 25 else ''),
            dependency_filename)

        # some changes in v6.0.0 around CVSS version information
        # https://github.com/jeremylong/DependencyCheck/pull/2781

        cvssv2_node = vulnerability.find(namespace + 'cvssV2')
        cvssv3_node = vulnerability.find(namespace + 'cvssV3')
        severity = self.get_field_value(vulnerability, 'severity',
                                        namespace).lower().capitalize()
        if not severity:
            if cvssv3_node is not None:
                severity = self.get_field_value(
                    cvssv3_node, 'baseSeverity',
                    namespace).lower().capitalize()
            elif cvssv2_node is not None:
                severity = self.get_field_value(
                    cvssv2_node, 'severity', namespace).lower().capitalize()

        # https://github.com/DefectDojo/django-DefectDojo/issues/4309
        if severity.lower() == 'moderate':
            severity = 'Medium'

        if severity in SEVERITY:
            severity = severity
        else:
            tag = "Severity is inaccurate : " + str(severity)
            title += " | " + tag
            logger.warn(
                "Warning: Inaccurate severity detected. Setting it's severity to Medium level.\n"
                + "Title is :" + title)
            severity = "Medium"

        reference_detail = None
        references_node = vulnerability.find(namespace + 'references')

        if references_node is not None:
            reference_detail = ''
            for reference_node in references_node.findall(namespace +
                                                          'reference'):
                name = self.get_field_value(reference_node, 'name', namespace)
                source = self.get_field_value(reference_node, 'source',
                                              namespace)
                url = self.get_field_value(reference_node, 'url', namespace)
                reference_detail += 'name: {0}\n' \
                                     'source: {1}\n' \
                                     'url: {2}\n\n'.format(name, source, url)

        mitigation = 'Update ' + component_name + ':' + component_version + ' to at least the version recommended in the description'
        description += '\nFilepath: ' + str(dependency_filepath)

        return Finding(title=title,
                       file_path=dependency_filename,
                       test=test,
                       cwe=cwe,
                       cve=cve,
                       description=description,
                       severity=severity,
                       mitigation=mitigation,
                       static_finding=True,
                       references=reference_detail,
                       component_name=component_name,
                       component_version=component_version)
Example #31
0
    def get_findings(self, filename, test):

        if filename is None:
            return ()

        content = filename.read()
        if type(content) is bytes:
            content = content.decode('utf-8')
        reader = csv.DictReader(io.StringIO(content),
                                delimiter=',',
                                quotechar='"')
        csvarray = []

        for row in reader:
            csvarray.append(row)

        dupes = dict()
        for row in csvarray:
            finding = Finding(test=test)
            finding.title = row.get('Vulnerability', '')
            finding.description = row.get('Description', '')
            finding.mitigation = row.get('Recommendations', '')
            finding.references = row.get('Links', '')
            finding.severity = row.get('Severity Level', 'Info')
            finding.file_path = row.get('File', '')
            finding.sast_source_file_path = row.get('File', '')
            finding.sast_source_line = row.get('Line', '')

            if not finding.sast_source_line.isdigit():
                finding.sast_source_line = finding.sast_source_line.split(
                    "-")[0]

            if finding is not None:
                if finding.title is None:
                    finding.title = ""
                if finding.description is None:
                    finding.description = ""

                key = hashlib.sha256(
                    (finding.title + '|' + finding.sast_source_file_path +
                     '|' +
                     finding.sast_source_line).encode("utf-8")).hexdigest()

                if key not in dupes:
                    dupes[key] = finding

        return list(dupes.values())
Example #32
0
def promote_to_finding(request, fid):
    finding = get_object_or_404(Stub_Finding, id=fid)
    test = finding.test
    form_error = False
    jira_available = False
    if hasattr(settings, 'ENABLE_JIRA'):
         if settings.ENABLE_JIRA:
             if JIRA_PKey.objects.filter(product=test.engagement.product) != 0:
                jform = JIRAFindingForm(request.POST, prefix='jiraform',
                                         enabled=JIRA_PKey.objects.get(product=test.engagement.product).push_all_issues)
                jira_available = True
    else:
         jform = None
    form = PromoteFindingForm(initial={'title': finding.title,
                                       'date': finding.date,
                                       'severity': finding.severity,
                                       'description': finding.description,
                                       'test': finding.test,
                                       'reporter': finding.reporter})
    if request.method == 'POST':
        form = PromoteFindingForm(request.POST)
        if form.is_valid():
            new_finding = form.save(commit=False)
            new_finding.test = test
            new_finding.reporter = request.user
            new_finding.numerical_severity = Finding.get_numerical_severity(
                new_finding.severity)

            new_finding.active = True
            new_finding.false_p = False
            new_finding.duplicate = False
            new_finding.mitigated = None
            new_finding.verified = True
            new_finding.out_of_scope = False

            new_finding.save()
            new_finding.endpoints = form.cleaned_data['endpoints']
            new_finding.save()

            finding.delete()
            if 'jiraform' in request.POST:
                jform = JIRAFindingForm(request.POST, prefix='jiraform',
                                        enabled=JIRA_PKey.objects.get(product=test.engagement.product).push_all_issues)
                if jform.is_valid():
                    add_issue_task.delay(new_finding, jform.cleaned_data.get('push_to_jira'))

            messages.add_message(request,
                                 messages.SUCCESS,
                                 'Finding promoted successfully.',
                                 extra_tags='alert-success')

            return HttpResponseRedirect(reverse('view_test', args=(test.id,)))
        else:
            if 'endpoints' in form.cleaned_data:
                form.fields['endpoints'].queryset = form.cleaned_data['endpoints']
            else:
                form.fields['endpoints'].queryset = Endpoint.objects.none()
            form_error = True
            messages.add_message(request,
                                 messages.ERROR,
                                 'The form has errors, please correct them below.',
                                 extra_tags='alert-danger')
    add_breadcrumb(parent=test, title="Promote Finding", top_level=False, request=request)
    return render(request, 'dojo/promote_to_finding.html',
                  {'form': form,
                   'test': test,
                   'stub_finding': finding,
                   'form_error': form_error,
                   })
Example #33
0
def edit_finding(request, fid):
    finding = get_object_or_404(Finding, id=fid)
    old_status = finding.status()
    form = FindingForm(instance=finding)
    form.initial['tags'] = [tag.name for tag in finding.tags]
    form_error = False
    jform = None
    try:
        jissue = JIRA_Issue.objects.get(finding=finding)
        enabled = True
    except:
        enabled = False
    pass
    if hasattr(settings, 'ENABLE_JIRA'):
        if settings.ENABLE_JIRA:
            if JIRA_PKey.objects.filter(product=finding.test.engagement.product) != 0:
                jform = JIRAFindingForm(enabled=enabled, prefix='jiraform')
    if request.method == 'POST':
        form = FindingForm(request.POST, instance=finding)
        if form.is_valid():
            new_finding = form.save(commit=False)
            new_finding.test = finding.test
            new_finding.numerical_severity = Finding.get_numerical_severity(
                new_finding.severity)
            if new_finding.false_p or new_finding.active is False:
                new_finding.mitigated = datetime.now(tz=localtz)
                new_finding.mitigated_by = request.user
            if new_finding.active is True:
                new_finding.false_p = False
                new_finding.mitigated = None
                new_finding.mitigated_by = None

            create_template = new_finding.is_template
            # always false now since this will be deprecated soon in favor of new Finding_Template model
            new_finding.is_template = False
            new_finding.endpoints = form.cleaned_data['endpoints']
            new_finding.last_reviewed = datetime.now(tz=localtz)
            new_finding.last_reviewed_by = request.user
            tags = request.POST.getlist('tags')
            t = ", ".join(tags)
            new_finding.tags = t
            new_finding.save()
            if 'jiraform-push_to_jira' in request.POST:
                jform = JIRAFindingForm(request.POST, prefix='jiraform', enabled=enabled)
                if jform.is_valid():
                    try:
                        jissue = JIRA_Issue.objects.get(finding=new_finding)
                        update_issue_task.delay(new_finding, old_status, jform.cleaned_data.get('push_to_jira'))
                    except:
                        add_issue_task.delay(new_finding, jform.cleaned_data.get('push_to_jira'))
                        pass
            tags = request.POST.getlist('tags')
            t = ", ".join(tags)
            new_finding.tags = t

            messages.add_message(request,
                                 messages.SUCCESS,
                                 'Finding saved successfully.',
                                 extra_tags='alert-success')
            if create_template:
                templates = Finding_Template.objects.filter(title=new_finding.title)
                if len(templates) > 0:
                    messages.add_message(request,
                                         messages.ERROR,
                                         'A finding template was not created.  A template with this title already '
                                         'exists.',
                                         extra_tags='alert-danger')
                else:
                    template = Finding_Template(title=new_finding.title,
                                                cwe=new_finding.cwe,
                                                severity=new_finding.severity,
                                                description=new_finding.description,
                                                mitigation=new_finding.mitigation,
                                                impact=new_finding.impact,
                                                references=new_finding.references,
                                                numerical_severity=new_finding.numerical_severity)
                    template.save()
                    messages.add_message(request,
                                         messages.SUCCESS,
                                         'A finding template was also created.',
                                         extra_tags='alert-success')
            return HttpResponseRedirect(reverse('view_finding', args=(new_finding.id,)))
        else:
            messages.add_message(request,
                                 messages.ERROR,
                                 'There appears to be errors on the form, please correct below.',
                                 extra_tags='alert-danger')
            form_error = True

    if form_error and 'endpoints' in form.cleaned_data:
        form.fields['endpoints'].queryset = form.cleaned_data['endpoints']
    else:
        form.fields['endpoints'].queryset = finding.endpoints.all()
    form.initial['tags'] = [tag.name for tag in finding.tags]
    add_breadcrumb(parent=finding, title="Edit", top_level=False, request=request)
    return render(request, 'dojo/edit_findings.html',
                  {'form': form,
                   'finding': finding,
                   'jform' : jform
                   })
Example #34
0
def add_temp_finding(request, tid, fid):
    jform = None
    test = get_object_or_404(Test, id=tid)
    finding = get_object_or_404(Finding_Template, id=fid)
    findings = Finding_Template.objects.all()

    if request.method == 'POST':
        form = FindingForm(request.POST)
        if form.is_valid():
            new_finding = form.save(commit=False)
            new_finding.test = test
            new_finding.reporter = request.user
            new_finding.numerical_severity = Finding.get_numerical_severity(
                new_finding.severity)
            new_finding.date = datetime.today()
            if new_finding.false_p or new_finding.active is False:
                new_finding.mitigated = timezone.now()
                new_finding.mitigated_by = request.user

            create_template = new_finding.is_template
            # is template always False now in favor of new model Finding_Template
            # no further action needed here since this is already adding from template.
            new_finding.is_template = False
            new_finding.save()
            new_finding.endpoints = form.cleaned_data['endpoints']
            new_finding.save()
            if 'jiraform-push_to_jira' in request.POST:
                    jform = JIRAFindingForm(request.POST, prefix='jiraform', enabled=True)
                    add_issue_task.delay(new_finding, jform.cleaned_data.get('push_to_jira'))
            messages.add_message(request,
                                 messages.SUCCESS,
                                 'Finding from template added successfully.',
                                 extra_tags='alert-success')

            if create_template:
                templates = Finding_Template.objects.filter(title=new_finding.title)
                if len(templates) > 0:
                    messages.add_message(request,
                                         messages.ERROR,
                                         'A finding template was not created.  A template with this title already '
                                         'exists.',
                                         extra_tags='alert-danger')
                else:
                    template = Finding_Template(title=new_finding.title,
                                                cwe=new_finding.cwe,
                                                severity=new_finding.severity,
                                                description=new_finding.description,
                                                mitigation=new_finding.mitigation,
                                                impact=new_finding.impact,
                                                references=new_finding.references,
                                                numerical_severity=new_finding.numerical_severity)
                    template.save()
                    messages.add_message(request,
                                         messages.SUCCESS,
                                         'A finding template was also created.',
                                         extra_tags='alert-success')

            return HttpResponseRedirect(reverse('view_test', args=(test.id,)))
        else:
            messages.add_message(request,
                                 messages.ERROR,
                                 'The form has errors, please correct them below.',
                                 extra_tags='alert-danger')

    else:
        form = FindingForm(initial={'active': False,
                                    'date': timezone.now().date(),
                                    'verified': False,
                                    'false_p': False,
                                    'duplicate': False,
                                    'out_of_scope': False,
                                    'title': finding.title,
                                    'description': finding.description,
                                    'cwe': finding.cwe,
                                    'severity': finding.severity,
                                    'mitigation': finding.mitigation,
                                    'impact': finding.impact,
                                    'references': finding.references,
                                    'numerical_severity': finding.numerical_severity})
        if get_system_setting('enable_jira'):
            enabled = JIRA_PKey.objects.get(product=test.engagement.product).push_all_issues
            jform = JIRAFindingForm(enabled=enabled, prefix='jiraform')
        else:
            jform = None

    add_breadcrumb(parent=test, title="Add Finding", top_level=False, request=request)
    return render(request, 'dojo/add_findings.html',
                  {'form': form,
                   'jform': jform,
                   'findings': findings,
                   'temp': True,
                   'fid': finding.id,
                   'tid': test.id,
                   'test': test,
                   })
Example #35
0
def edit_finding(request, fid):
    finding = get_object_or_404(Finding, id=fid)
    form = FindingForm(instance=finding)
    form_error = False
    if request.method == 'POST':
        form = FindingForm(request.POST, instance=finding)
        if form.is_valid():
            new_finding = form.save(commit=False)
            new_finding.test = finding.test
            new_finding.numerical_severity = Finding.get_numerical_severity(
                new_finding.severity)
            if new_finding.false_p or new_finding.active is False:
                new_finding.mitigated = datetime.now(tz=localtz)
                new_finding.mitigated_by = request.user
            if new_finding.active is True:
                new_finding.false_p = False
                new_finding.mitigated = None
                new_finding.mitigated_by = None

            create_template = new_finding.is_template
            # always false now since this will be deprecated soon in favor of new Finding_Template model
            new_finding.is_template = False
            new_finding.endpoints = form.cleaned_data['endpoints']
            new_finding.last_reviewed = datetime.now(tz=localtz)
            new_finding.last_reviewed_by = request.user
            new_finding.save()

            messages.add_message(request,
                                 messages.SUCCESS,
                                 'Finding saved successfully.',
                                 extra_tags='alert-success')
            if create_template:
                templates = Finding_Template.objects.filter(title=new_finding.title)
                if len(templates) > 0:
                    messages.add_message(request,
                                         messages.ERROR,
                                         'A finding template was not created.  A template with this title already '
                                         'exists.',
                                         extra_tags='alert-danger')
                else:
                    template = Finding_Template(title=new_finding.title,
                                                cwe=new_finding.cwe,
                                                severity=new_finding.severity,
                                                description=new_finding.description,
                                                mitigation=new_finding.mitigation,
                                                impact=new_finding.impact,
                                                references=new_finding.references,
                                                numerical_severity=new_finding.numerical_severity)
                    template.save()
                    messages.add_message(request,
                                         messages.SUCCESS,
                                         'A finding template was also created.',
                                         extra_tags='alert-success')
            return HttpResponseRedirect(reverse('view_finding', args=(new_finding.id,)))
        else:
            messages.add_message(request,
                                 messages.ERROR,
                                 'There appears to be errors on the form, please correct below.',
                                 extra_tags='alert-danger')
            form_error = True

    if form_error and 'endpoints' in form.cleaned_data:
        form.fields['endpoints'].queryset = form.cleaned_data['endpoints']
    else:
        form.fields['endpoints'].queryset = finding.endpoints.all()

    add_breadcrumb(parent=finding, title="Edit", top_level=False, request=request)
    return render(request, 'dojo/edit_findings.html',
                  {'form': form,
                   'finding': finding,
                   })
Example #36
0
def get_item(vuln, test):
    if vuln['category'] != 'dependency_scanning':
        # For Dependency Scanning reports, value must always be "dependency_scanning"
        return None

    unique_id_from_tool = None
    if 'id' in vuln:
        unique_id_from_tool = vuln['id']
    else:
        # If the new unique id is not provided, fall back to deprecated "cve" fingerprint (old version)
        unique_id_from_tool = vuln['cve']

    title = ''
    if 'name' in vuln:
        title = vuln['name']
    elif 'message' in vuln:
        title = vuln['message']
    elif 'description' in vuln:
        title = vuln['description']
    else:
        # All other fields are optional, if none of them has a value, fall back on the unique id
        title = unique_id_from_tool

    description = 'Scanner: {}\n'.format(vuln['scanner']['name'])
    if 'message' in vuln:
        description += '{}\n'.format(vuln['message'])
    if 'description' in vuln:
        description += '{}\n'.format(vuln['description'])

    location = vuln['location']
    file_path = location['file'] if 'file' in location else None

    component_name = None
    component_version = None
    if 'dependency' in location:
        component_version = location['dependency']['version'] if 'version' in location['dependency'] else None
        if 'package' in location['dependency']:
            component_name = location['dependency']['package']['name'] if 'name' in location['dependency']['package'] else None

    severity = vuln['severity']
    if severity == 'Undefined' or severity == 'Unknown':
        # Severity can be "Undefined" or "Unknown" in report
        # In that case we set it as Info and specify the initial severity in the title
        title = '[{} severity] {}'.format(severity, title)
        severity = 'Info'
    numerical_severity = Finding.get_numerical_severity(severity)
    # Dependency Scanning analyzers doesn't provide confidence property
    # See https://docs.gitlab.com/ee/user/application_security/dependency_scanning/analyzers.html#analyzers-data
    scanner_confidence = False

    mitigation = ''
    if 'solution' in vuln:
        mitigation = vuln['solution']

    cwe = None
    cve = None
    references = ''
    if 'identifiers' in vuln:
        for identifier in vuln['identifiers']:
            if identifier['type'].lower() == 'cwe':
                cwe = identifier['value']
            elif identifier['type'].lower() == 'cve':
                cve = identifier['value']
            else:
                references += 'Identifier type: {}\n'.format(identifier['type'])
                references += 'Name: {}\n'.format(identifier['name'])
                references += 'Value: {}\n'.format(identifier['value'])
                if 'url' in identifier:
                    references += 'URL: {}\n'.format(identifier['url'])
                references += '\n'

    finding = Finding(title=cve + ": " + title if cve else title,
                      test=test,
                      active=False,
                      verified=False,
                      description=description,
                      severity=severity,
                      numerical_severity=numerical_severity,
                      scanner_confidence=scanner_confidence,
                      mitigation=mitigation,
                      unique_id_from_tool=unique_id_from_tool,
                      references=references,
                      file_path=file_path,
                      component_name=component_name,
                      component_version=component_version,
                      cwe=cwe,
                      cve=cve,
                      static_finding=True,
                      dynamic_finding=False)

    return finding
Example #37
0
def add_findings(request, tid):
    test = Test.objects.get(id=tid)
    form_error = False
    jform = None
    form = AddFindingForm(initial={'date': timezone.now().date()},
                          req_resp=None,
                          product=test.engagement.product)
    push_all_jira_issues = jira_helper.is_push_all_issues(test)
    use_jira = jira_helper.get_jira_project(test) is not None

    if request.method == 'POST':
        form = AddFindingForm(request.POST,
                              req_resp=None,
                              product=test.engagement.product)
        if (form['active'].value() is False or form['false_p'].value()
            ) and form['duplicate'].value() is False:
            closing_disabled = Note_Type.objects.filter(
                is_mandatory=True, is_active=True).count()
            if closing_disabled != 0:
                error_inactive = ValidationError(
                    'Can not set a finding as inactive without adding all mandatory notes',
                    code='inactive_without_mandatory_notes')
                error_false_p = ValidationError(
                    'Can not set a finding as false positive without adding all mandatory notes',
                    code='false_p_without_mandatory_notes')
                if form['active'].value() is False:
                    form.add_error('active', error_inactive)
                if form['false_p'].value():
                    form.add_error('false_p', error_false_p)
                messages.add_message(
                    request,
                    messages.ERROR,
                    'Can not set a finding as inactive or false positive without adding all mandatory notes',
                    extra_tags='alert-danger')
        if use_jira:
            jform = JIRAFindingForm(
                request.POST,
                prefix='jiraform',
                push_all=push_all_jira_issues,
                jira_project=jira_helper.get_jira_project(test),
                finding_form=form)

        if form.is_valid() and (jform is None or jform.is_valid()):
            if jform:
                logger.debug('jform.jira_issue: %s',
                             jform.cleaned_data.get('jira_issue'))
                logger.debug('jform.push_to_jira: %s',
                             jform.cleaned_data.get('push_to_jira'))

            new_finding = form.save(commit=False)
            new_finding.test = test
            new_finding.reporter = request.user
            new_finding.numerical_severity = Finding.get_numerical_severity(
                new_finding.severity)
            new_finding.tags = form.cleaned_data['tags']
            new_finding.save(dedupe_option=False, push_to_jira=False)

            # Save and add new endpoints
            finding_helper.add_endpoints(new_finding, form)

            # Push to jira?
            push_to_jira = False
            jira_message = None
            if jform and jform.is_valid():
                # can't use helper as when push_all_jira_issues is True, the checkbox gets disabled and is always false
                # push_to_jira = jira_helper.is_push_to_jira(new_finding, jform.cleaned_data.get('push_to_jira'))
                push_to_jira = push_all_jira_issues or jform.cleaned_data.get(
                    'push_to_jira')

                # if the jira issue key was changed, update database
                new_jira_issue_key = jform.cleaned_data.get('jira_issue')
                if new_finding.has_jira_issue:
                    jira_issue = new_finding.jira_issue

                    # everything in DD around JIRA integration is based on the internal id of the issue in JIRA
                    # instead of on the public jira issue key.
                    # I have no idea why, but it means we have to retrieve the issue from JIRA to get the internal JIRA id.
                    # we can assume the issue exist, which is already checked in the validation of the jform

                    if not new_jira_issue_key:
                        jira_helper.finding_unlink_jira(request, new_finding)
                        jira_message = 'Link to JIRA issue removed successfully.'

                    elif new_jira_issue_key != new_finding.jira_issue.jira_key:
                        jira_helper.finding_unlink_jira(request, new_finding)
                        jira_helper.finding_link_jira(request, new_finding,
                                                      new_jira_issue_key)
                        jira_message = 'Changed JIRA link successfully.'
                else:
                    logger.debug('finding has no jira issue yet')
                    if new_jira_issue_key:
                        logger.debug(
                            'finding has no jira issue yet, but jira issue specified in request. trying to link.'
                        )
                        jira_helper.finding_link_jira(request, new_finding,
                                                      new_jira_issue_key)
                        jira_message = 'Linked a JIRA issue successfully.'

            new_finding.save(false_history=True, push_to_jira=push_to_jira)
            create_notification(event='other',
                                title='Addition of %s' % new_finding.title,
                                finding=new_finding,
                                description='Finding "%s" was added by %s' %
                                (new_finding.title, request.user),
                                url=request.build_absolute_uri(
                                    reverse('view_finding',
                                            args=(new_finding.id, ))),
                                icon="exclamation-triangle")

            if 'request' in form.cleaned_data or 'response' in form.cleaned_data:
                burp_rr = BurpRawRequestResponse(
                    finding=new_finding,
                    burpRequestBase64=base64.b64encode(
                        form.cleaned_data['request'].encode()),
                    burpResponseBase64=base64.b64encode(
                        form.cleaned_data['response'].encode()),
                )
                burp_rr.clean()
                burp_rr.save()

            if '_Finished' in request.POST:
                return HttpResponseRedirect(
                    reverse('view_test', args=(test.id, )))
            else:
                return HttpResponseRedirect(
                    reverse('add_findings', args=(test.id, )))
        else:
            form_error = True
            add_error_message_to_response(
                'The form has errors, please correct them below.')
            add_field_errors_to_response(jform)
            add_field_errors_to_response(form)

    else:
        if use_jira:
            jform = JIRAFindingForm(
                push_all=jira_helper.is_push_all_issues(test),
                prefix='jiraform',
                jira_project=jira_helper.get_jira_project(test),
                finding_form=form)

    product_tab = Product_Tab(test.engagement.product.id,
                              title="Add Finding",
                              tab="engagements")
    product_tab.setEngagement(test.engagement)
    return render(
        request, 'dojo/add_findings.html', {
            'form': form,
            'product_tab': product_tab,
            'test': test,
            'temp': False,
            'tid': tid,
            'form_error': form_error,
            'jform': jform,
        })
Example #38
0
    def __init__(self, filename, test):
        cxscan = ElementTree.parse(filename)
        root = cxscan.getroot()

        dupes = dict()

        for query in root.findall('Query'):
            categories = ''
            language = ''
            mitigation = ''
            impact = ''
            references = ''
            findingdetail = ''
            title = ''
            group = ''
            status = ''

            find_date = root.get("ScanStart")
            name = query.get('name')
            cwe = query.get('cweId')

            if query.get('categories') is not None:
                categories = query.get('categories')

            if query.get('Language') is not None:
                language = query.get('Language')

            if query.get('group') is not None:
                group = query.get('group').replace('_', ' ')

            for result in query.findall('Result'):
                deeplink = result.get('DeepLink')

                if categories is not None:
                    findingdetail = 'Category: ' +  categories + '\n'

                if language is not None:
                    findingdetail += 'Language: ' +  language + '\n'

                if group is not None:
                    findingdetail += 'Group: ' +  group + '\n'

                if result.get('Status') is not None:
                    findingdetail += 'Status: ' +  result.get('Status') + '\n'

                findingdetail += 'Finding Link: ' +  deeplink + '\n\n'

                dupe_key = categories + cwe + name + result.get('FileName') + result.get('Line')

                if dupe_key in dupes:
                    find = dupes[dupe_key]
                else:
                    dupes[dupe_key] = True

                    sev = result.get('Severity')
                    result.get('FileName')

                    for path in result.findall('Path'):
                        title = query.get('name').replace('_', ' ') + ' (' + path.get('PathId') + ')'
                        for pathnode in path.findall('PathNode'):
                            findingdetail += 'Source Object: ' + pathnode.find('Name').text + '\n'
                            findingdetail += 'Filename: ' + pathnode.find('FileName').text + '\n'
                            findingdetail += 'Line Number: ' + pathnode.find('Line').text + '\n'
                            for codefragment in pathnode.findall('Snippet/Line'):
                                findingdetail += 'Code: ' + codefragment.find('Code').text.strip() + '\n'

                            findingdetail += '\n'

                    find = Finding(title=title,
                                   cwe=int(cwe),
                                   test=test,
                                   active=False,
                                   verified=False,
                                   description=findingdetail,
                                   severity=sev,
                                   numerical_severity=Finding.get_numerical_severity(sev),
                                   mitigation=mitigation,
                                   impact=impact,
                                   references=references,
                                   url='N/A',
                                   date=find_date)
                    dupes[dupe_key] = find
                    findingdetail = ''

        self.items = dupes.values()
Example #39
0
def get_item(resource, vuln, test):
    resource_name = resource.get('name', resource.get('path'))
    resource_version = resource.get('version', 'No version')
    cve = vuln.get('name', 'No CVE')
    fix_version = vuln.get('fix_version', 'None')
    description = vuln.get('description', 'No description.')
    cvssv3 = None

    url = ""
    if 'nvd_url' in vuln:
        url += "\n{}".format(vuln.get('nvd_url'))
    if 'vendor_url' in vuln:
        url += "\n{}".format(vuln.get('vendor_url'))

    # Take in order of preference (most prio at the bottom of ifs), and put everything in severity justification anyways.
    score = 0
    severity_justification = ""
    used_for_classification = ""
    if 'aqua_severity' in vuln:
        score = vuln.get('aqua_severity')
        severity = aqua_severity_of(score)
        used_for_classification = "Aqua security score ({}) used for classification.\n".format(
            score)
        severity_justification = vuln.get('aqua_severity_classification')
        if 'nvd_score_v3' in vuln:
            cvssv3 = vuln.get('nvd_vectors_v3')
    else:
        if 'aqua_score' in vuln:
            score = vuln.get('aqua_score')
            used_for_classification = "Aqua score ({}) used for classification.\n".format(
                score)
        elif 'vendor_score' in vuln:
            score = vuln.get('vendor_score')
            used_for_classification = "Vendor score ({}) used for classification.\n".format(
                score)
        elif 'nvd_score_v3' in vuln:
            score = vuln.get('nvd_score_v3')
            used_for_classification = "NVD score v3 ({}) used for classification.\n".format(
                score)
            severity_justification += "\nNVD v3 vectors: {}".format(
                vuln.get('nvd_vectors_v3'))
            # Add the CVSS3 to Finding
            cvssv3 = vuln.get('nvd_vectors_v3')
        elif 'nvd_score' in vuln:
            score = vuln.get('nvd_score')
            used_for_classification = "NVD score v2 ({}) used for classification.\n".format(
                score)
            severity_justification += "\nNVD v2 vectors: {}".format(
                vuln.get('nvd_vectors'))
        severity = severity_of(score)
        severity_justification += "\n{}".format(used_for_classification)

    return Finding(title=cve + " - " + resource_name + " (" +
                   resource_version + ") ",
                   test=test,
                   severity=severity,
                   severity_justification=severity_justification,
                   cwe=0,
                   cve=cve,
                   cvssv3=cvssv3,
                   description=description.strip(),
                   mitigation=fix_version,
                   references=url,
                   component_name=resource.get('name'),
                   component_version=resource.get('version'),
                   impact=severity)
Example #40
0
    def __init__(self, filename, test):
        cxscan = ElementTree.parse(filename)
        root = cxscan.getroot()

        dupes = dict()
        for query in root.findall('Query'):
            categories = ''
            language = ''
            mitigation = 'N/A'
            impact = 'N/A'
            references = ''
            findingdetail = ''
            title = ''
            group = ''
            status = ''
            self.result_dupes = dict()
            find_date = parser.parse(root.get("ScanStart"))
            name = query.get('name')
            cwe = query.get('cweId')

            if query.get('categories') is not None:
                categories = query.get('categories')

            if query.get('Language') is not None:
                language = query.get('Language')

            if query.get('group') is not None:
                group = query.get('group').replace('_', ' ')

            for result in query.findall('Result'):
                if categories is not None:
                    findingdetail = "{}**Category:** {}\n".format(findingdetail, categories)

                if language is not None:
                    findingdetail = "{}**Language:** {}\n".format(findingdetail, language)
                    if language not in self.language_list:
                        self.language_list.append(language)

                if group is not None:
                    findingdetail = "{}**Group:** {}\n".format(findingdetail, group)

                if result.get('Status') is not None:
                    findingdetail = "{}**Status:** {}\n".format(findingdetail, result.get('Status'))

                deeplink = "[{}]({})".format(result.get('DeepLink'), result.get('DeepLink'))
                findingdetail = "{}**Finding Link:** {}\n\n".format(findingdetail, deeplink)

                dupe_key = "{}{}{}{}".format(categories, cwe, name, result.get('FileName'))

                if dupe_key in dupes:
                    find = dupes[dupe_key]
                    title, description, pathnode = self.get_finding_detail(query, result)
                    "{}\n{}".format(find.description, description)
                    dupes[dupe_key] = find
                else:
                    dupes[dupe_key] = True

                    sev = result.get('Severity')
                    result.get('FileName')
                    title, description, pathnode = self.get_finding_detail(query, result)

                    find = Finding(title=title,
                                   cwe=int(cwe),
                                   test=test,
                                   active=False,
                                   verified=False,
                                   description=findingdetail + description,
                                   severity=sev,
                                   numerical_severity=Finding.get_numerical_severity(sev),
                                   mitigation=mitigation,
                                   impact=impact,
                                   references=references,
                                   file_path=pathnode.find('FileName').text,
                                   line=pathnode.find('Line').text,
                                   url='N/A',
                                   date=find_date,
                                   static_finding=True)
                    dupes[dupe_key] = find
                    findingdetail = ''

        for lang in self.language_list:
            add_language(test.engagement.product, lang)

        self.items = dupes.values()
Example #41
0
    def save(self):
        data = self.validated_data
        test = data['test']
        scan_type = data['scan_type']
        min_sev = data['minimum_severity']
        scan_date = data['scan_date']
        verified = data['verified']
        active = data['active']

        try:
            parser = import_parser_factory(
                data['file'],
                test,
                data['scan_type'],
            )
        except ValueError:
            raise Exception("Parser ValueError")

        try:
            items = parser.items
            original_items = list(test.finding_set.all())
            new_items = []
            mitigated_count = 0
            finding_count = 0
            finding_added_count = 0
            reactivated_count = 0

            for item in items:
                sev = item.severity
                if sev == 'Information' or sev == 'Informational':
                    sev = 'Info'

                if (Finding.SEVERITIES[sev] > Finding.SEVERITIES[min_sev]):
                    continue

                if scan_type == 'Veracode Scan' or scan_type == 'Arachni Scan':
                    findings = Finding.objects.filter(
                        title=item.title,
                        test=test,
                        severity=sev,
                        numerical_severity=Finding.get_numerical_severity(sev),
                        description=item.description).all()
                else:
                    findings = Finding.objects.filter(
                        title=item.title,
                        test=test,
                        severity=sev,
                        numerical_severity=Finding.get_numerical_severity(
                            sev)).all()

                if findings:
                    finding = findings[0]
                    if finding.mitigated:
                        finding.mitigated = None
                        finding.mitigated_by = None
                        finding.active = True
                        finding.verified = verified
                        finding.save()
                        note = Notes(entry="Re-activated by %s re-upload." %
                                     scan_type,
                                     author=self.context['request'].user)
                        note.save()
                        finding.notes.add(note)
                        reactivated_count += 1
                    new_items.append(finding)
                else:
                    item.test = test
                    item.date = test.target_start
                    item.reporter = self.context['request'].user
                    item.last_reviewed = timezone.now()
                    item.last_reviewed_by = self.context['request'].user
                    item.verified = verified
                    item.active = active
                    item.save()
                    finding_added_count += 1
                    new_items.append(item.id)
                    finding = item

                    if hasattr(item, 'unsaved_req_resp'):
                        for req_resp in item.unsaved_req_resp:
                            burp_rr = BurpRawRequestResponse(
                                finding=finding,
                                burpRequestBase64=req_resp['req'],
                                burpResponseBase64=req_resp['resp'])
                            burp_rr.clean()
                            burp_rr.save()

                    if item.unsaved_request and item.unsaved_response:
                        burp_rr = BurpRawRequestResponse(
                            finding=finding,
                            burpRequestBase64=item.unsaved_request,
                            burpResponseBase64=item.unsaved_response)
                        burp_rr.clean()
                        burp_rr.save()

                if finding:
                    finding_count += 1
                    for endpoint in item.unsaved_endpoints:
                        ep, created = Endpoint.objects.get_or_create(
                            protocol=endpoint.protocol,
                            host=endpoint.host,
                            path=endpoint.path,
                            query=endpoint.query,
                            fragment=endpoint.fragment,
                            product=test.engagement.product)
                        finding.endpoints.add(ep)

                    if item.unsaved_tags:
                        finding.tags = item.unsaved_tags

            to_mitigate = set(original_items) - set(new_items)
            for finding in to_mitigate:
                finding.mitigated = datetime.datetime.combine(
                    scan_date,
                    timezone.now().time())
                finding.mitigated_by = self.context['request'].user
                finding.active = False
                finding.save()
                note = Notes(entry="Mitigated by %s re-upload." % scan_type,
                             author=self.context['request'].user)
                note.save()
                finding.notes.add(note)
                mitigated_count += 1

        except SyntaxError:
            raise Exception("Parser SyntaxError")

        return test
Example #42
0
    def get_findings(self, file, test):
        tree = json.load(file)

        if "viewContentsV1" not in tree:
            raise ValueError("Report file is not a well-formed Coverity REST view report", file.name)

        items = list()
        for issue in tree["viewContentsV1"]["rows"]:

            # get only security findings
            if "Security" != issue.get("displayIssueKind"):
                continue

            description_formated = "\n".join(
                [
                    f"**CID:** `{issue.get('cid')}`",
                    f"**Type:** `{issue.get('displayType')}`",
                    f"**Status:** `{issue.get('status')}`",
                    f"**Classification:** `{issue.get('classification')}`",
                ]
            )

            finding = Finding()
            finding.test = test
            finding.title = issue["displayType"]
            finding.severity = self.convert_displayImpact(issue.get("displayImpact"))
            finding.description = description_formated
            finding.static_finding = True
            finding.dynamic_finding = False
            finding.unique_id_from_tool = issue.get("cid")

            if "firstDetected" in issue:
                finding.date = datetime.strptime(issue["firstDetected"], "%m/%d/%y").date()

            if "cwe" in issue and type(issue["cwe"]) == int:
                finding.cwe = issue["cwe"]

            if "displayFile" in issue:
                finding.file_path = issue["displayFile"]

            if "occurrenceCount" in issue:
                finding.nb_occurences = int(issue["occurrenceCount"])
            else:
                finding.nb_occurences = 1

            if "New" == issue.get("status"):
                finding.active = True
                finding.verified = False
            elif "Triaged" == issue.get("status"):
                finding.active = True
                finding.verified = True
            elif "Fixed" == issue.get("status"):
                finding.active = False
                finding.verified = True
            else:
                if "False Positive" == issue.get("classification"):
                    finding.false_p = True
                if "lastTriaged" in issue:
                    ds = issue["lastTriaged"][0:10]
                    finding.mitigated = datetime.strptime(ds, "%Y-%M-%d")
                finding.is_mitigated = True
                finding.active = False
                finding.verified = True

            items.append(finding)

        return items
Example #43
0
    def get_findings(self, filename, test):
        data = json.load(filename)
        dupes = dict()
        for item in data['vulnerabilities']:
            cve = item.get('vuln')

            title = item['vuln'] + ' - ' + item['package'] + '(' + item['package_type'] + ')'

            # Finding details information
            # depending on version image_digest/imageDigest
            findingdetail = '**Image hash**: ' + item.get('image_digest', item.get('imageDigest', 'None')) + '\n\n'
            findingdetail += '**Package**: ' + item['package'] + '\n\n'
            findingdetail += '**Package path**: ' + item['package_path'] + '\n\n'
            findingdetail += '**Package type**: ' + item['package_type'] + '\n\n'
            findingdetail += '**Feed**: ' + item['feed'] + '/' + item['feed_group'] + '\n\n'
            findingdetail += '**CVE**: ' + cve + '\n\n'
            findingdetail += '**CPE**: ' + item['package_cpe'] + '\n\n'
            findingdetail += '**Description**: ' + item.get('description', '<None>') + '\n\n'

            sev = item['severity']
            if sev == "Negligible" or sev == "Unknown":
                sev = 'Info'

            mitigation = "Upgrade to " + item['package_name'] + ' ' + item['fix'] + '\n'
            mitigation += "URL: " + item['url'] + '\n'

            cvssv3_base_score = None
            if item['feed'] == 'nvdv2' or item['feed'] == 'vulnerabilities':
                if 'nvd_data' in item and len(item['nvd_data']) > 0:
                    cvssv3_base_score = item['nvd_data'][0]['cvss_v3']['base_score']
            else:
                # there may be other keys, but taking a best guess here
                if 'vendor_data' in item and len(item['vendor_data']) > 0:
                    # sometimes cvssv3 in 1st element will have -1 for "not set", but have data in the 2nd array item
                    if 'cvss_v3' in item['vendor_data'][0] and item['vendor_data'][0]['cvss_v3']['base_score'] != -1:
                        cvssv3_base_score = item['vendor_data'][0]['cvss_v3']['base_score']
                    elif len(item['vendor_data']) > 1:
                        if 'cvss_v3' in item['vendor_data'][1] and item['vendor_data'][1]['cvss_v3']['base_score'] != -1:
                            cvssv3_base_score = item['vendor_data'][1]['cvss_v3']['base_score']

            references = item['url']

            dupe_key = '|'.join([
                item.get('image_digest', item.get('imageDigest', 'None')),  # depending on version image_digest/imageDigest
                item['feed'],
                item['feed_group'],
                item['package_name'],
                item['package_version'],
                item['package_path'],
                item['vuln']
            ])

            if dupe_key in dupes:
                find = dupes[dupe_key]
            else:
                dupes[dupe_key] = True

                find = Finding(
                    title=title,
                    test=test,
                    cve=cve,
                    cvssv3_score=cvssv3_base_score,
                    description=findingdetail,
                    severity=sev,
                    mitigation=mitigation,
                    references=references,
                    file_path=item["package_path"],
                    component_name=item['package_name'],
                    component_version=item['package_version'],
                    url=item.get('url'),
                    static_finding=True,
                    dynamic_finding=False,
                    vuln_id_from_tool=item.get('vuln'),
                )

                dupes[dupe_key] = find

        return list(dupes.values())
Example #44
0
def ad_hoc_finding(request, pid):
    prod = Product.objects.get(id=pid)
    test = None
    try:
        eng = Engagement.objects.get(product=prod, name="Ad Hoc Engagement")
        tests = Test.objects.filter(engagement=eng)

        if len(tests) != 0:
            test = tests[0]
        else:
            test = Test(engagement=eng, test_type=Test_Type.objects.get(name="Pen Test"),
                        target_start=timezone.now(), target_end=timezone.now())
            test.save()
    except:
        eng = Engagement(name="Ad Hoc Engagement", target_start=timezone.now(),
                         target_end=timezone.now(), active=False, product=prod)
        eng.save()
        test = Test(engagement=eng, test_type=Test_Type.objects.get(name="Pen Test"),
                    target_start=timezone.now(), target_end=timezone.now())
        test.save()
    form_error = False
    enabled = False
    jform = None
    form = AdHocFindingForm(initial={'date': timezone.now().date()})
    if get_system_setting('enable_jira'):
        if JIRA_PKey.objects.filter(product=test.engagement.product).count() != 0:
            enabled = JIRA_PKey.objects.get(product=test.engagement.product).push_all_issues
            jform = JIRAFindingForm(enabled=enabled, prefix='jiraform')
    else:
        jform = None
    if request.method == 'POST':
        form = AdHocFindingForm(request.POST)
        if form.is_valid():
            new_finding = form.save(commit=False)
            new_finding.test = test
            new_finding.reporter = request.user
            new_finding.numerical_severity = Finding.get_numerical_severity(
                new_finding.severity)
            if new_finding.false_p or new_finding.active is False:
                new_finding.mitigated = timezone.now()
                new_finding.mitigated_by = request.user
            create_template = new_finding.is_template
            # always false now since this will be deprecated soon in favor of new Finding_Template model
            new_finding.is_template = False
            new_finding.save()
            new_finding.endpoints.set(form.cleaned_data['endpoints'])
            new_finding.save()
            if 'jiraform-push_to_jira' in request.POST:
                jform = JIRAFindingForm(request.POST, prefix='jiraform', enabled=enabled)
                if jform.is_valid():
                    add_issue_task.delay(new_finding, jform.cleaned_data.get('push_to_jira'))
                messages.add_message(request,
                                     messages.SUCCESS,
                                     'Finding added successfully.',
                                     extra_tags='alert-success')
            if create_template:
                templates = Finding_Template.objects.filter(title=new_finding.title)
                if len(templates) > 0:
                    messages.add_message(request,
                                         messages.ERROR,
                                         'A finding template was not created.  A template with this title already '
                                         'exists.',
                                         extra_tags='alert-danger')
                else:
                    template = Finding_Template(title=new_finding.title,
                                                cwe=new_finding.cwe,
                                                severity=new_finding.severity,
                                                description=new_finding.description,
                                                mitigation=new_finding.mitigation,
                                                impact=new_finding.impact,
                                                references=new_finding.references,
                                                numerical_severity=new_finding.numerical_severity)
                    template.save()
                    messages.add_message(request,
                                         messages.SUCCESS,
                                         'A finding template was also created.',
                                         extra_tags='alert-success')
            if '_Finished' in request.POST:
                return HttpResponseRedirect(reverse('view_test', args=(test.id,)))
            else:
                return HttpResponseRedirect(reverse('add_findings', args=(test.id,)))
        else:
            if 'endpoints' in form.cleaned_data:
                form.fields['endpoints'].queryset = form.cleaned_data['endpoints']
            else:
                form.fields['endpoints'].queryset = Endpoint.objects.none()
            form_error = True
            messages.add_message(request,
                                 messages.ERROR,
                                 'The form has errors, please correct them below.',
                                 extra_tags='alert-danger')
    product_tab = Product_Tab(pid, title="Add Finding", tab="engagements")
    product_tab.setEngagement(eng)
    return render(request, 'dojo/ad_hoc_findings.html',
                  {'form': form,
                   'product_tab': product_tab,
                   'temp': False,
                   'tid': test.id,
                   'pid': pid,
                   'form_error': form_error,
                   'jform': jform,
                   })
def re_import_scan_results(request, tid):
    additional_message = "When re-uploading a scan, any findings not found in original scan will be updated as " \
                         "mitigated.  The process attempts to identify the differences, however manual verification " \
                         "is highly recommended."
    t = get_object_or_404(Test, id=tid)
    scan_type = t.test_type.name
    engagement = t.engagement
    form = ReImportScanForm()

    form.initial['tags'] = [tag.name for tag in t.tags]
    if request.method == "POST":
        form = ReImportScanForm(request.POST, request.FILES)
        if form.is_valid():
            scan_date = form.cleaned_data['scan_date']
            min_sev = form.cleaned_data['minimum_severity']
            file = request.FILES['file']
            scan_type = t.test_type.name
            active = form.cleaned_data['active']
            verified = form.cleaned_data['verified']
            tags = request.POST.getlist('tags')
            ts = ", ".join(tags)
            t.tags = ts
            try:
                parser = import_parser_factory(file, t)
            except ValueError:
                raise Http404()

            try:
                items = parser.items
                original_items = t.finding_set.all().values_list("id",
                                                                 flat=True)
                new_items = []
                mitigated_count = 0
                finding_count = 0
                finding_added_count = 0
                reactivated_count = 0
                for item in items:
                    sev = item.severity
                    if sev == 'Information' or sev == 'Informational':
                        sev = 'Info'

                    if Finding.SEVERITIES[sev] > Finding.SEVERITIES[min_sev]:
                        continue

                    if scan_type == 'Veracode Scan' or scan_type == 'Arachni Scan':
                        find = Finding.objects.filter(
                            title=item.title,
                            test__id=t.id,
                            severity=sev,
                            numerical_severity=Finding.get_numerical_severity(
                                sev),
                            description=item.description)
                    else:
                        find = Finding.objects.filter(
                            title=item.title,
                            test__id=t.id,
                            severity=sev,
                            numerical_severity=Finding.get_numerical_severity(
                                sev),
                        )

                    if len(find) == 1:
                        find = find[0]
                        if find.mitigated:
                            # it was once fixed, but now back
                            find.mitigated = None
                            find.mitigated_by = None
                            find.active = True
                            find.verified = verified
                            find.save()
                            note = Notes(
                                entry="Re-activated by %s re-upload." %
                                scan_type,
                                author=request.user)
                            note.save()
                            find.notes.add(note)
                            reactivated_count += 1
                        new_items.append(find.id)
                    else:
                        item.test = t
                        item.date = t.target_start
                        item.reporter = request.user
                        item.last_reviewed = timezone.now()
                        item.last_reviewed_by = request.user
                        item.verified = verified
                        item.active = active
                        item.save()
                        finding_added_count += 1
                        new_items.append(item.id)
                        find = item

                        if hasattr(item, 'unsaved_req_resp') and len(
                                item.unsaved_req_resp) > 0:
                            for req_resp in item.unsaved_req_resp:
                                burp_rr = BurpRawRequestResponse(
                                    finding=find,
                                    burpRequestBase64=req_resp["req"],
                                    burpResponseBase64=req_resp["resp"],
                                )
                                burp_rr.clean()
                                burp_rr.save()

                        if item.unsaved_request is not None and item.unsaved_response is not None:
                            burp_rr = BurpRawRequestResponse(
                                finding=find,
                                burpRequestBase64=item.unsaved_request,
                                burpResponseBase64=item.unsaved_response,
                            )
                            burp_rr.clean()
                            burp_rr.save()
                    if find:
                        finding_count += 1
                        for endpoint in item.unsaved_endpoints:
                            ep, created = Endpoint.objects.get_or_create(
                                protocol=endpoint.protocol,
                                host=endpoint.host,
                                path=endpoint.path,
                                query=endpoint.query,
                                fragment=endpoint.fragment,
                                product=t.engagement.product)
                            find.endpoints.add(ep)

                        if item.unsaved_tags is not None:
                            find.tags = item.unsaved_tags

                # calculate the difference
                to_mitigate = set(original_items) - set(new_items)
                for finding_id in to_mitigate:
                    finding = Finding.objects.get(id=finding_id)
                    finding.mitigated = datetime.combine(
                        scan_date,
                        timezone.now().time())
                    finding.mitigated_by = request.user
                    finding.active = False
                    finding.save()
                    note = Notes(entry="Mitigated by %s re-upload." %
                                 scan_type,
                                 author=request.user)
                    note.save()
                    finding.notes.add(note)
                    mitigated_count += 1
                messages.add_message(
                    request,
                    messages.SUCCESS,
                    '%s processed, a total of ' % scan_type +
                    message(finding_count, 'finding', 'processed'),
                    extra_tags='alert-success')
                if finding_added_count > 0:
                    messages.add_message(
                        request,
                        messages.SUCCESS,
                        'A total of ' +
                        message(finding_added_count, 'finding', 'added') +
                        ', that are new to scan.',
                        extra_tags='alert-success')
                if reactivated_count > 0:
                    messages.add_message(
                        request,
                        messages.SUCCESS,
                        'A total of ' +
                        message(reactivated_count, 'finding', 'reactivated') +
                        ', that are back in scan results.',
                        extra_tags='alert-success')
                if mitigated_count > 0:
                    messages.add_message(
                        request,
                        messages.SUCCESS,
                        'A total of ' +
                        message(mitigated_count, 'finding', 'mitigated') +
                        '. Please manually verify each one.',
                        extra_tags='alert-success')

                create_notification(event='results_added',
                                    title='Results added',
                                    finding_count=finding_count,
                                    test=t,
                                    engagement=engagement,
                                    url=request.build_absolute_uri(
                                        reverse('view_test', args=(t.id, ))))

                return HttpResponseRedirect(reverse('view_test',
                                                    args=(t.id, )))
            except SyntaxError:
                messages.add_message(
                    request,
                    messages.ERROR,
                    'There appears to be an error in the XML report, please check and try again.',
                    extra_tags='alert-danger')

    add_breadcrumb(parent=t,
                   title="Re-upload a %s" % scan_type,
                   top_level=False,
                   request=request)
    return render(
        request, 'dojo/import_scan_results.html', {
            'form': form,
            'eid': engagement.id,
            'additional_message': additional_message,
        })
Example #46
0
    def __xml_sca_flaw_to_finding(cls, xml_node, test):
        ns = cls.ns

        # Defaults
        finding = Finding()
        finding.test = test
        finding.mitigation = "Make sure to upgrade this component."
        finding.verified = False
        finding.active = False
        finding.static_finding = True
        finding.dynamic_finding = False
        finding.unique_id_from_tool = cls.__xml_sca_flaw_to_dupekey(xml_node)

        _library = xml_node.xpath('string(@library)', namespaces=ns)
        _vendor = xml_node.xpath('string(@vendor)', namespaces=ns)
        _version = xml_node.xpath('string(@version)', namespaces=ns)
        _cvss = xml_node.xpath('number(@max_cvss_score)', namespaces=ns)
        _file = xml_node.xpath('string(@file_name)', namespaces=ns)
        _file_path = xml_node.xpath('string(x:file_paths/x:file_path/@value)',
                                    namespaces=ns)

        # Report values
        finding.severity = cls.__cvss_to_severity(_cvss)
        finding.numerical_severity = Finding.get_numerical_severity(
            finding.severity)
        finding.cwe = 937
        finding.title = "Vulnerable component: {0}:{1}".format(
            _library, _version)
        finding.component_name = _vendor + " / " + _library + ":" + _version
        finding.file_path = _file

        # Use report-date, otherwise DD doesn't
        # overwrite old matching SCA findings.
        finding.date = datetime.strptime(
            xml_node.xpath(
                'string(//x:component/ancestor::x:detailedreport/@last_update_time)',
                namespaces=ns), '%Y-%m-%d %H:%M:%S %Z')

        _description = 'This library has known vulnerabilities.\n'
        _description += 'Full component path: ' + _file_path + '\n'
        _description += 'Vulnerabilities:\n\n'
        for vuln_node in xml_node.xpath('x:vulnerabilities/x:vulnerability',
                                        namespaces=ns):
            _description += \
                "**CVE: [{0}](https://nvd.nist.gov/vuln/detail/{0})** ({1})\n" \
                "CVS Score: {2} ({3})\n" \
                "Summary: \n>{4}" \
                "\n\n-----\n\n".format(
                    vuln_node.xpath('string(@cve_id)', namespaces=ns),
                    datetime.strptime(vuln_node.xpath('string(@first_found_date)', namespaces=ns),
                                      '%Y-%m-%d %H:%M:%S %Z').strftime("%Y/%m"),
                    vuln_node.xpath('string(@cvss_score)', namespaces=ns),
                    cls.vc_severity_mapping.get(int(vuln_node.xpath('string(@severity)', namespaces=ns)), 'Info'),
                    vuln_node.xpath('string(@cve_summary)', namespaces=ns))
        finding.description = _description

        return finding
Example #47
0
    def __init__(self, filename, test):
        with open(filename.temporary_file_path(), "r") as fileobj:
            raw_data = fileobj.read()
            raw_data = raw_data.replace("scoutsuite_results =", "")
        data = json.loads(raw_data)
        find_date = datetime.now()
        dupes = {}

        test_description = ""
        account_id = data["account_id"]
        test_description = "%s  **Account:** %s\n" % (test_description, account_id)
        last_run = data["last_run"]
        test_description = "%s  **Ruleset:** %s\n" % (test_description, last_run["ruleset_name"])
        test_description = "%s  **Ruleset Description:** %s\n" % (test_description, last_run["ruleset_about"])

        # Summary of Services
        test_description = "%s\n**Services** \n\n" % (test_description)
        for service, items in list(last_run["summary"].items()):
            test_description = "%s\n**%s** \n" % (test_description, service.upper())
            test_description = "%s\n* **Checked Items:** %s\n" % (test_description, items["checked_items"])
            test_description = "%s* **Flagged Items:** %s\n" % (test_description, items["flagged_items"])
            test_description = "%s* **Max Level:** %s\n" % (test_description, items["max_level"])
            test_description = "%s* **Resource Count:** %s\n" % (test_description, items["resources_count"])
            test_description = "%s* **Rules Count:** %s\n\n" % (test_description, items["rules_count"])
        test.description = test_description

        scoutsuite_findings = []

        # Configured Services
        for service in list(data["services"].items()):
            for service_item in service:
                if "findings" in service_item:
                    for name, finding in list(service_item["findings"].items()):
                        if finding["items"]:
                            description_text = ""
                            for name in finding["items"]:
                                description_text = description_text + "**Location:** " + name + "\n\n---\n"
                                description_text = description_text + "\n"
                                key = name.split('.')
                                i = 1
                                lookup = service_item
                                while i < len(key):
                                    if key[i] in lookup:
                                        if (type(lookup[key[i]]) is dict):
                                            lookup = lookup[key[i]]
                                            if (key[i - 1] == "security_groups" or key[i - 1] == "PolicyDocument"):
                                                break
                                    i = i + 1

                                self.recursive_print(lookup)
                                description_text = description_text + self.item_data
                                self.item_data = ""

                            refs = finding["references"]
                            mobsf_item = {
                                "category": "Mobile Permissions",
                                "title": finding["description"],
                                "severity": finding["level"],
                                "description": description_text,
                                "references": ' '.join(filter(None, refs) if hasattr(refs, '__len__') else [])
                            }
                            scoutsuite_findings.append(mobsf_item)

        for scoutsuite_finding in scoutsuite_findings:
            title = strip_tags(scoutsuite_finding["title"])
            sev = self.getCriticalityRating(scoutsuite_finding["severity"])
            description = scoutsuite_finding["description"]
            references = scoutsuite_finding["references"]
            dupe_key = sev + title
            if dupe_key in dupes:
                find = dupes[dupe_key]
                if description is not None:
                    find.description += description
            else:
                find = Finding(title=Truncator(title).words(6),
                                cwe=1032,  # Security Configuration Weaknesses, would like to fine tune
                                test=test,
                                active=False,
                                verified=False,
                                description="**Account:** " + account_id + "\n" + description,
                                severity=sev,
                                numerical_severity=Finding.get_numerical_severity(sev),
                                references=references,
                                date=find_date,
                                dynamic_finding=True)
                dupes[dupe_key] = find
        self.items = list(dupes.values())
Example #48
0
    def __xml_flaw_to_finding(cls, xml_node, mitigation_text, test):
        ns = cls.ns

        # Defaults
        finding = Finding()
        finding.test = test
        finding.mitigation = mitigation_text
        finding.verified = False
        finding.active = False
        finding.static_finding = True
        finding.dynamic_finding = False
        finding.unique_id_from_tool = cls.__xml_flaw_to_unique_id(xml_node)

        # Report values
        finding.severity = cls.__xml_flaw_to_severity(xml_node)
        finding.numerical_severity = Finding.get_numerical_severity(
            finding.severity)
        finding.cwe = int(xml_node.attrib['cweid'])
        finding.title = xml_node.attrib['categoryname']
        finding.impact = 'CIA Impact: ' + xml_node.attrib['cia_impact'].upper()

        # Note that DD's legacy dedupe hashing uses the description field,
        # so for compatibility, description field should contain very static info.
        _description = xml_node.attrib['description'].replace('. ', '.\n')
        finding.description = _description

        _references = 'None'
        if 'References:' in _description:
            _references = _description[_description.index('References:') +
                                       13:].replace(')  ', ')\n')
        finding.references = _references \
            + "\n\nVulnerable Module: " + xml_node.attrib['module'] \
            + "\nType: " + xml_node.attrib['type'] \
            + "\nVeracode issue ID: " + xml_node.attrib['issueid']

        _date_found = test.target_start
        if 'date_first_occurrence' in xml_node.attrib:
            _date_found = datetime.strptime(
                xml_node.attrib['date_first_occurrence'],
                '%Y-%m-%d %H:%M:%S %Z')
        finding.date = _date_found

        _is_mitigated = False
        _mitigated_date = None
        if ('mitigation_status' in xml_node.attrib and
                xml_node.attrib["mitigation_status"].lower() == "accepted"):
            # This happens if any mitigation (including 'Potential false positive')
            # was accepted in VC.
            _is_mitigated = True
            _mitigated_date = datetime.strptime(
                xml_node.xpath(
                    'string(.//x:mitigations/x:mitigation[last()]/@date)',
                    namespaces=ns), '%Y-%m-%d %H:%M:%S %Z')
        finding.is_Mitigated = _is_mitigated
        finding.mitigated = _mitigated_date
        finding.active = not _is_mitigated

        # Check if it's a FP in veracode.
        # Only check in case finding was mitigated, since DD doesn't allow
        # both `verified` and `false_p` to be true, while `verified` is implied on the import
        # level, not on the finding-level.
        _false_positive = False
        if _is_mitigated:
            _remediation_status = xml_node.xpath('string(@remediation_status)',
                                                 namespaces=ns).lower()
            if "false positive" in _remediation_status or "falsepositive" in _remediation_status:
                _false_positive = True
        finding.false_p = _false_positive

        _line_number = xml_node.xpath('string(@line)')
        finding.line = _line_number if _line_number else None
        finding.line_number = finding.line
        finding.sast_source_line = finding.line

        _source_file = xml_node.xpath('string(@sourcefile)')
        finding.file_path = _source_file if _source_file else None
        finding.sourcefile = finding.file_path
        finding.sast_source_file_path = finding.file_path

        _component = xml_node.xpath('string(@module)') + ': ' + xml_node.xpath(
            'string(@scope)')
        finding.component_name = _component if _component != ': ' else None

        _sast_source_obj = xml_node.xpath('string(@functionprototype)')
        finding.sast_source_object = _sast_source_obj if _sast_source_obj else None

        return finding
Example #49
0
def add_findings(request, tid):
    test = Test.objects.get(id=tid)
    form_error = False
    enabled = False
    jform = None
    form = AddFindingForm(initial={'date': timezone.now().date()})

    if get_system_setting('enable_jira') and JIRA_PKey.objects.filter(product=test.engagement.product).count() != 0:
        enabled = JIRA_PKey.objects.get(product=test.engagement.product).push_all_issues
        jform = JIRAFindingForm(enabled=enabled, prefix='jiraform')
    else:
        jform = None

    if request.method == 'POST':
        form = AddFindingForm(request.POST)
        if form.is_valid():
            new_finding = form.save(commit=False)
            new_finding.test = test
            new_finding.reporter = request.user
            new_finding.numerical_severity = Finding.get_numerical_severity(
                new_finding.severity)
            if new_finding.false_p or new_finding.active is False:
                new_finding.mitigated = timezone.now()
                new_finding.mitigated_by = request.user
            create_template = new_finding.is_template
            # always false now since this will be deprecated soon in favor of new Finding_Template model
            new_finding.is_template = False
            new_finding.save()
            new_finding.endpoints = form.cleaned_data['endpoints']
            new_finding.save()
            if 'jiraform-push_to_jira' in request.POST:
                jform = JIRAFindingForm(request.POST, prefix='jiraform', enabled=enabled)
                if jform.is_valid():
                    add_issue_task.delay(new_finding, jform.cleaned_data.get('push_to_jira'))
                messages.add_message(request,
                                     messages.SUCCESS,
                                     'Finding added successfully.',
                                     extra_tags='alert-success')
            if create_template:
                templates = Finding_Template.objects.filter(title=new_finding.title)
                if len(templates) > 0:
                    messages.add_message(request,
                                         messages.ERROR,
                                         'A finding template was not created.  A template with this title already '
                                         'exists.',
                                         extra_tags='alert-danger')
                else:
                    template = Finding_Template(title=new_finding.title,
                                                cwe=new_finding.cwe,
                                                severity=new_finding.severity,
                                                description=new_finding.description,
                                                mitigation=new_finding.mitigation,
                                                impact=new_finding.impact,
                                                references=new_finding.references,
                                                numerical_severity=new_finding.numerical_severity)
                    template.save()
                    messages.add_message(request,
                                         messages.SUCCESS,
                                         'A finding template was also created.',
                                         extra_tags='alert-success')
            if '_Finished' in request.POST:
                return HttpResponseRedirect(reverse('view_test', args=(test.id,)))
            else:
                return HttpResponseRedirect(reverse('add_findings', args=(test.id,)))
        else:
            if 'endpoints' in form.cleaned_data:
                form.fields['endpoints'].queryset = form.cleaned_data['endpoints']
            else:
                form.fields['endpoints'].queryset = Endpoint.objects.none()
            form_error = True
            messages.add_message(request,
                                 messages.ERROR,
                                 'The form has errors, please correct them below.',
                                 extra_tags='alert-danger')
    add_breadcrumb(parent=test, title="Add Finding", top_level=False, request=request)
    return render(request, 'dojo/add_findings.html',
                  {'form': form,
                   'test': test,
                   'temp': False,
                   'tid': tid,
                   'form_error': form_error,
                   'jform': jform,
                   })
Example #50
0
    def __init__(self, filename, test):
        content = filename.read()
        try:
            data = json.loads(str(content, 'utf-8'))
        except (JSONDecodeError, TypeError):
            data = json.loads(content)

        find_date = datetime.now()

        try:
            for checks in data:
                for policies in checks.values():
                    for images in policies.values():
                        for evaluation in images:
                            self.items = list()
                            try:
                                results = evaluation['detail']['result']
                                imageid = results['image_id']
                                imageids = results['result']
                                imagechecks = imageids[imageid]
                                rows = imagechecks['result']['rows']
                                for row in rows:
                                    repo, tag = row[1].split(':', 2)
                                    description = row[5]
                                    severity = map_gate_action_to_severity(
                                        row[6])
                                    policyid = row[8]
                                    policyname = policy_name(
                                        evaluation['detail']['policy']
                                        ['policies'], policyid)
                                    gate = row[3]
                                    triggerid = row[2]
                                    cve = extract_cve(triggerid)
                                    title = policyname + ' - gate|' + gate + ' - trigger|' + triggerid
                                    find = Finding(
                                        title=title,
                                        test=test,
                                        cve=cve,
                                        description=description,
                                        severity=severity,
                                        numerical_severity=Finding.
                                        get_number_severity(severity),
                                        references=
                                        "Policy ID: {}\nTrigger ID: {}".format(
                                            policyid, triggerid),
                                        file_path=search_filepath(description),
                                        component_name=repo,
                                        component_version=tag,
                                        date=find_date,
                                        static_finding=True,
                                        dynamic_finding=False)
                                    self.items.append(find)
                            except (KeyError, IndexError) as err:
                                raise Exception(
                                    "Invalid format: {} key not found".format(
                                        err))
        except AttributeError as err:
            # import empty policies without error (e.g. policies or images objects are not a dictionary)
            logger.warning('Exception at %s',
                           'parsing anchore policy',
                           exc_info=err)
Example #51
0
def re_import_scan_results(request, tid):
    additional_message = "When re-uploading a scan, any findings not found in original scan will be updated as " \
                         "mitigated.  The process attempts to identify the differences, however manual verification " \
                         "is highly recommended."
    t = get_object_or_404(Test, id=tid)
    scan_type = t.test_type.name
    engagement = t.engagement
    form = ReImportScanForm()

    form.initial['tags'] = [tag.name for tag in t.tags]
    if request.method == "POST":
        form = ReImportScanForm(request.POST, request.FILES)
        if form.is_valid():
            scan_date = form.cleaned_data['scan_date']
            min_sev = form.cleaned_data['minimum_severity']
            file = request.FILES['file']
            scan_type = t.test_type.name
            active = form.cleaned_data['active']
            verified = form.cleaned_data['verified']
            tags = request.POST.getlist('tags')
            ts = ", ".join(tags)
            t.tags = ts
            try:
                parser = import_parser_factory(file, t)
            except ValueError:
                raise Http404()

            try:
                items = parser.items
                original_items = t.finding_set.all().values_list("id", flat=True)
                new_items = []
                mitigated_count = 0
                finding_count = 0
                finding_added_count = 0
                reactivated_count = 0
                for item in items:
                    sev = item.severity
                    if sev == 'Information' or sev == 'Informational':
                        sev = 'Info'

                    if Finding.SEVERITIES[sev] > Finding.SEVERITIES[min_sev]:
                        continue

                    if scan_type == 'Veracode Scan' or scan_type == 'Arachni Scan':
                        find = Finding.objects.filter(title=item.title,
                                                      test__id=t.id,
                                                      severity=sev,
                                                      numerical_severity=Finding.get_numerical_severity(sev),
                                                      description=item.description
                                                      )
                    else:
                        find = Finding.objects.filter(title=item.title,
                                                      test__id=t.id,
                                                      severity=sev,
                                                      numerical_severity=Finding.get_numerical_severity(sev),
                                                      )

                    if len(find) == 1:
                        find = find[0]
                        if find.mitigated:
                            # it was once fixed, but now back
                            find.mitigated = None
                            find.mitigated_by = None
                            find.active = True
                            find.verified = verified
                            find.save()
                            note = Notes(entry="Re-activated by %s re-upload." % scan_type,
                                         author=request.user)
                            note.save()
                            find.notes.add(note)
                            reactivated_count += 1
                        new_items.append(find.id)
                    else:
                        item.test = t
                        item.date = t.target_start
                        item.reporter = request.user
                        item.last_reviewed = timezone.now()
                        item.last_reviewed_by = request.user
                        item.verified = verified
                        item.active = active
                        item.save()
                        finding_added_count += 1
                        new_items.append(item.id)
                        find = item

                        if hasattr(item, 'unsaved_req_resp') and len(item.unsaved_req_resp) > 0:
                            for req_resp in item.unsaved_req_resp:
                                burp_rr = BurpRawRequestResponse(finding=find,
                                                                 burpRequestBase64=req_resp["req"],
                                                                 burpResponseBase64=req_resp["resp"],
                                                                 )
                                burp_rr.clean()
                                burp_rr.save()

                        if item.unsaved_request is not None and item.unsaved_response is not None:
                            burp_rr = BurpRawRequestResponse(finding=find,
                                                             burpRequestBase64=item.unsaved_request,
                                                             burpResponseBase64=item.unsaved_response,
                                                             )
                            burp_rr.clean()
                            burp_rr.save()
                    if find:
                        finding_count += 1
                        for endpoint in item.unsaved_endpoints:
                            ep, created = Endpoint.objects.get_or_create(protocol=endpoint.protocol,
                                                                         host=endpoint.host,
                                                                         path=endpoint.path,
                                                                         query=endpoint.query,
                                                                         fragment=endpoint.fragment,
                                                                         product=t.engagement.product)
                            find.endpoints.add(ep)

                        if item.unsaved_tags is not None:
                            find.tags = item.unsaved_tags

                # calculate the difference
                to_mitigate = set(original_items) - set(new_items)
                for finding_id in to_mitigate:
                    finding = Finding.objects.get(id=finding_id)
                    finding.mitigated = datetime.combine(scan_date, timezone.now().time())
                    finding.mitigated_by = request.user
                    finding.active = False
                    finding.save()
                    note = Notes(entry="Mitigated by %s re-upload." % scan_type,
                                 author=request.user)
                    note.save()
                    finding.notes.add(note)
                    mitigated_count += 1
                messages.add_message(request,
                                     messages.SUCCESS,
                                     '%s processed, a total of ' % scan_type + message(finding_count, 'finding',
                                                                                       'processed'),
                                     extra_tags='alert-success')
                if finding_added_count > 0:
                    messages.add_message(request,
                                         messages.SUCCESS,
                                         'A total of ' + message(finding_added_count, 'finding',
                                                                 'added') + ', that are new to scan.',
                                         extra_tags='alert-success')
                if reactivated_count > 0:
                    messages.add_message(request,
                                         messages.SUCCESS,
                                         'A total of ' + message(reactivated_count, 'finding',
                                                                 'reactivated') + ', that are back in scan results.',
                                         extra_tags='alert-success')
                if mitigated_count > 0:
                    messages.add_message(request,
                                         messages.SUCCESS,
                                         'A total of ' + message(mitigated_count, 'finding',
                                                                 'mitigated') + '. Please manually verify each one.',
                                         extra_tags='alert-success')

                create_notification(event='results_added', title='Results added', finding_count=finding_count, test=t, engagement=engagement, url=request.build_absolute_uri(reverse('view_test', args=(t.id,))))

                return HttpResponseRedirect(reverse('view_test', args=(t.id,)))
            except SyntaxError:
                messages.add_message(request,
                                     messages.ERROR,
                                     'There appears to be an error in the XML report, please check and try again.',
                                     extra_tags='alert-danger')

    add_breadcrumb(parent=t, title="Re-upload a %s" % scan_type, top_level=False, request=request)
    return render(request,
                  'dojo/import_scan_results.html',
                  {'form': form,
                   'eid': engagement.id,
                   'additional_message': additional_message,
                   })
Example #52
0
    def manage_vulnerability(self,
                             dupes,
                             vulnerability,
                             ns,
                             bom_refs,
                             report_date,
                             component_name=None,
                             component_version=None):
        ref = vulnerability.attrib["ref"]
        vuln_id = vulnerability.findtext("v:id", namespaces=ns)

        severity = vulnerability.findtext("v:ratings/v:rating/v:severity",
                                          namespaces=ns)
        description = "\n".join([
            f"**Ref:** {ref}",
            f"**Id:** {vuln_id}",
            f"**Severity:** {severity}",
        ])

        if component_name is None:
            bom = bom_refs[ref]
            component_name = bom["name"]
            component_version = bom["version"]

        if severity is None:
            severity = "Medium"
        if "Unknown" == severity:
            severity = "Info"
        if "None" == severity:
            severity = "Info"
        references = ""
        for adv in vulnerability.findall("v:advisories/v:advisory",
                                         namespaces=ns):
            references += f"{adv.text}\n"

        finding = Finding(
            title=vuln_id,
            description=description,
            severity=severity,
            numerical_severity=Finding.get_numerical_severity(severity),
            references=references,
            component_name=component_name,
            component_version=component_version,
            unique_id_from_tool=vuln_id,
            nb_occurences=1,
        )
        if report_date:
            finding.date = report_date

        # manage if the ID is a CVE
        if re.fullmatch("CVE-[0-9]+-[0-9]+", vuln_id):
            finding.cve = vuln_id

        # manage CVSS
        cvssv3 = self._get_cvssv3(vulnerability, ns)
        if cvssv3:
            cvssv3.compute_base_score()
            finding.cvssv3 = cvssv3.clean_vector()
            finding.cvssv3_score = float(cvssv3.base_score)

        # if there is some CWE
        cwes = self.get_cwes(vulnerability, ns)
        if len(cwes) > 1:
            # FIXME support more than one CWE
            LOGGER.warning(
                f"more than one CWE for a finding {cwes}. NOT supported by parser API"
            )
        if len(cwes) > 0:
            finding.cwe = cwes[0]

        dupe_key = hashlib.sha256("|".join([
            "vulnerability",
            ref,
        ]).encode("utf-8")).hexdigest()

        if dupe_key in dupes:
            find = dupes[dupe_key]
            find.description += description
            find.nb_occurences += 1
        else:
            dupes[dupe_key] = finding
Example #53
0
def get_item(vulnerability, test):
    # Some items have multiple CVEs for some reason, so get the CVE with the highest CVSSv3 score.
    # Note: the xray v2 importer just took the first CVE in the list, that doesn't seem ideal though
    highestCvssV3Index = 0
    highestCvssV3Score = 0

    for thisCveIndex in range(0, len(vulnerability['cves']) - 1):
        # not all cves have cvssv3 scores, so skip these. If no v3 scores, we'll default to index 0
        if 'cvss_v3_score' in vulnerability['cves'][thisCveIndex]:
            thisCvssV3Score = vulnerability['cves'][thisCveIndex][
                'cvss_v3_score']
            if thisCvssV3Score > highestCvssV3Score:
                highestCvssV3Index = thisCveIndex
                highestCvssV3Score = thisCvssV3Score

    # Following the CVSS Scoring per https://nvd.nist.gov/vuln-metrics/cvss
    if 'severity' in vulnerability:
        if vulnerability['severity'] == 'Unknown':
            severity = "Info"
        else:
            severity = vulnerability['severity'].title()
    # TODO: Needs UNKNOWN new status in the model.
    else:
        severity = "Info"

    cveIndex = highestCvssV3Index

    cve = None
    cvss_v3 = "No CVSS v3 score."  # for justification field
    cvssv3 = None  # for actual cvssv3 field
    cvss_v2 = "No CVSS v2 score."
    mitigation = None
    extra_desc = ""

    cves = vulnerability.get('cves', [])
    if len(cves) > 0:
        worstCve = cves[cveIndex]
        if 'cve' in cves[cveIndex]:
            cve = worstCve['cve']
        if 'cvss_v3_vector' in worstCve:
            cvss_v3 = worstCve['cvss_v3_vector']
            cvssv3 = cvss_v3
        if 'cvss_v2_vector' in worstCve:
            cvss_v2 = worstCve['cvss_v2_vector']

    if 'fixed_versions' in vulnerability and len(
            vulnerability['fixed_versions']) > 0:
        mitigation = "Versions containing a fix:\n"
        mitigation = mitigation + "\n".join(vulnerability['fixed_versions'])

    if 'external_advisory_source' in vulnerability and 'external_advisory_severity' in vulnerability:
        extra_desc = vulnerability[
            'external_advisory_source'] + ": " + vulnerability[
                'external_advisory_severity']

    if vulnerability['issue_id']:
        title = vulnerability['issue_id'] + " - " + vulnerability['summary']
    else:
        title = vulnerability['summary']

    references = "\n".join(vulnerability['references'])

    scan_time = datetime.strptime(vulnerability['artifact_scan_time'],
                                  "%Y-%m-%dT%H:%M:%S%z")

    # component has several parts separated by colons. Last part is the version, everything else is the name
    splitComponent = vulnerability['vulnerable_component'].split(':')
    component_name = ":".join(splitComponent[:-1])
    component_version = splitComponent[-1:][0]
    # remove package type from component name
    component_name = component_name.split("://", 1)[1]

    tags = ["packagetype_" + vulnerability['package_type']]

    # create the finding object
    finding = Finding(
        title=title,
        cve=cve,
        test=test,
        severity=severity,
        description=(vulnerability['description'] + "\n\n" +
                     extra_desc).strip(),
        mitigation=mitigation,
        component_name=component_name,
        component_version=component_version,
        file_path=vulnerability['path'],
        severity_justification="CVSS v3 base score: {}\nCVSS v2 base score: {}"
        .format(cvss_v3, cvss_v2),
        static_finding=True,
        dynamic_finding=False,
        references=references,
        impact=severity,
        cvssv3=cvssv3,
        date=scan_time,
        unique_id_from_tool=vulnerability['issue_id'],
        tags=tags)

    return finding
Example #54
0
    def __init__(self, filename, test):
        tree = filename.read()
        try:
            data = json.loads(str(tree, 'utf-8'))
        except:
            data = json.loads(tree)
        find_date = datetime.now()
        dupes = {}
        test_description = ""
        if "name" in data:
            test_description = "**Info:**\n"
            if "packagename" in data:
                test_description = "%s  **Package Name:** %s\n" % (
                    test_description, data["packagename"])

            if "mainactivity" in data:
                test_description = "%s  **Main Activity:** %s\n" % (
                    test_description, data["mainactivity"])

            if "pltfm" in data:
                test_description = "%s  **Platform:** %s\n" % (
                    test_description, data["pltfm"])

            if "sdk" in data:
                test_description = "%s  **SDK:** %s\n" % (test_description,
                                                          data["sdk"])

            if "min" in data:
                test_description = "%s  **Min SDK:** %s\n" % (test_description,
                                                              data["min"])

            if "targetsdk" in data:
                test_description = "%s  **Target SDK:** %s\n" % (
                    test_description, data["targetsdk"])

            if "minsdk" in data:
                test_description = "%s  **Min SDK:** %s\n" % (test_description,
                                                              data["minsdk"])

            if "maxsdk" in data:
                test_description = "%s  **Max SDK:** %s\n" % (test_description,
                                                              data["maxsdk"])

            test_description = "%s\n**File Information:**\n" % (
                test_description)

            if "name" in data:
                test_description = "%s  **Name:** %s\n" % (test_description,
                                                           data["name"])

            if "md5" in data:
                test_description = "%s  **MD5:** %s\n" % (test_description,
                                                          data["md5"])

            if "sha1" in data:
                test_description = "%s  **SHA-1:** %s\n" % (test_description,
                                                            data["sha1"])

            if "sha256" in data:
                test_description = "%s  **SHA-256:** %s\n" % (test_description,
                                                              data["sha256"])

            if "size" in data:
                test_description = "%s  **Size:** %s\n" % (test_description,
                                                           data["size"])

            if "urls" in data:
                curl = ""
                for url in data["urls"]:
                    for curl in url["urls"]:
                        curl = "%s\n" % (curl)

                if curl:
                    test_description = "%s\n**URL's:**\n %s\n" % (
                        test_description, curl)

            if "bin_anal" in data:
                test_description = "%s  \n**Binary Analysis:** %s\n" % (
                    test_description, data["bin_anal"])

        test.description = strip_tags(test_description)

        mobsf_findings = []
        # Mobile Permissions
        if "permissions" in data:
            # for permission, details in data["permissions"].items():
            if type(data["permissions"]) is list:
                for details in data["permissions"]:
                    mobsf_item = {
                        "category":
                        "Mobile Permissions",
                        "title":
                        details.get("name", ""),
                        "severity":
                        self.getSeverityForPermission(details.get("status")),
                        "description":
                        "**Permission Type:** " + details.get("name", "") +
                        " (" + details.get("status", "") +
                        ")\n\n**Description:** " +
                        details.get("description", "") + "\n\n**Reason:** " +
                        details.get("reason", ""),
                        "file_path":
                        None
                    }
                    mobsf_findings.append(mobsf_item)
            else:
                for permission, details in list(data["permissions"].items()):
                    mobsf_item = {
                        "category":
                        "Mobile Permissions",
                        "title":
                        permission,
                        "severity":
                        self.getSeverityForPermission(details.get(
                            "status", "")),
                        "description":
                        "**Permission Type:** " + permission +
                        "\n\n**Description:** " +
                        details.get("description", ""),
                        "file_path":
                        None
                    }
                    mobsf_findings.append(mobsf_item)

        # Insecure Connections
        if "insecure_connections" in data:
            for details in data["insecure_connections"]:
                insecure_urls = ""
                for url in details.split(','):
                    insecure_urls = insecure_urls + url + "\n"

                mobsf_item = {
                    "category": None,
                    "title": "Insecure Connections",
                    "severity": "Low",
                    "description": insecure_urls,
                    "file_path": None
                }
                mobsf_findings.append(mobsf_item)

        # Binary Analysis
        if "binary_analysis" in data:
            if type(data["binary_analysis"]) is list:
                for details in data["binary_analysis"]:
                    for binary_analysis_type in details:
                        if "name" != binary_analysis_type:
                            mobsf_item = {
                                "category":
                                "Binary Analysis",
                                "title":
                                details[binary_analysis_type]
                                ["description"].split(".")[0],
                                "severity":
                                details[binary_analysis_type]
                                ["severity"].replace("warning", "low").title(),
                                "description":
                                details[binary_analysis_type]["description"],
                                "file_path":
                                details["name"]
                            }
                            mobsf_findings.append(mobsf_item)
            else:
                for binary_analysis_type, details in list(
                        data["binary_analysis"].items()):
                    # "Binary makes use of insecure API(s)":{
                    #     "detailed_desc":"The binary may contain the following insecure API(s) _vsprintf.",
                    #     "severity":"high",
                    #     "cvss":6,
                    #     "cwe":"CWE-676 - Use of Potentially Dangerous Function",
                    #     "owasp-mobile":"M7: Client Code Quality",
                    #     "masvs":"MSTG-CODE-8"
                    # }
                    mobsf_item = {
                        "category":
                        "Binary Analysis",
                        "title":
                        details["detailed_desc"],
                        "severity":
                        details["severity"].replace("good", "info").title(),
                        "description":
                        details["detailed_desc"],
                        "file_path":
                        None
                    }
                    mobsf_findings.append(mobsf_item)

        # specific node for Android reports
        if "android_api" in data:
            # "android_insecure_random": {
            #     "files": {
            #         "u/c/a/b/a/c.java": "9",
            #         "kotlinx/coroutines/repackaged/net/bytebuddy/utility/RandomString.java": "3",
            #         ...
            #         "hu/mycompany/vbnmqweq/gateway/msg/Response.java": "13"
            #     },
            #     "metadata": {
            #         "id": "android_insecure_random",
            #         "description": "The App uses an insecure Random Number Generator.",
            #         "type": "Regex",
            #         "pattern": "java\\.util\\.Random;",
            #         "severity": "high",
            #         "input_case": "exact",
            #         "cvss": 7.5,
            #         "cwe": "CWE-330 Use of Insufficiently Random Values",
            #         "owasp-mobile": "M5: Insufficient Cryptography",
            #         "masvs": "MSTG-CRYPTO-6"
            #     }
            # },
            for api, details in list(data["android_api"].items()):
                mobsf_item = {
                    "category":
                    "Android API",
                    "title":
                    details["metadata"]["description"],
                    "severity":
                    details["metadata"]["severity"].replace("warning",
                                                            "low").title(),
                    "description":
                    "**API:** " + api + "\n\n**Description:** " +
                    details["metadata"]["description"],
                    "file_path":
                    None
                }
                mobsf_findings.append(mobsf_item)

        # Manifest
        if "manifest" in data:
            for details in data["manifest"]:
                mobsf_item = {
                    "category": "Manifest",
                    "title": details["title"],
                    "severity": details["stat"],
                    "description": details["desc"],
                    "file_path": None
                }
                mobsf_findings.append(mobsf_item)

        # MobSF Findings
        if "findings" in data:
            for title, finding in list(data["findings"].items()):
                description = title
                file_path = None

                if "path" in finding:
                    description = description + "\n\n**Files:**\n"
                    for path in finding["path"]:
                        if file_path is None:
                            file_path = path
                        description = description + " * " + path + "\n"

                mobsf_item = {
                    "category": "Findings",
                    "title": title,
                    "severity": finding["level"],
                    "description": description,
                    "file_path": file_path
                }

                mobsf_findings.append(mobsf_item)

        for mobsf_finding in mobsf_findings:
            title = strip_tags(mobsf_finding["title"])
            sev = self.getCriticalityRating(mobsf_finding["severity"])
            description = ""
            file_path = None
            if mobsf_finding["category"]:
                description = "**Category:** " + mobsf_finding[
                    "category"] + "\n\n"
            description = description + strip_tags(
                mobsf_finding["description"])
            if mobsf_finding["file_path"]:
                file_path = mobsf_finding["file_path"]
            dupe_key = sev + title
            if dupe_key in dupes:
                find = dupes[dupe_key]
                if description is not None:
                    find.description += description
            else:
                find = Finding(
                    title=Truncator(title).words(5),
                    cwe=919,  # Weaknesses in Mobile Applications
                    test=test,
                    active=False,
                    verified=False,
                    description=description,
                    severity=sev,
                    numerical_severity=Finding.get_numerical_severity(sev),
                    references=None,
                    date=find_date,
                    file_path=file_path,
                    static_finding=True)
                dupes[dupe_key] = find
        self.items = list(dupes.values())
def get_item(item_node, test):
    from dojo.models import Finding, Endpoint
    import html2text

    host_node = item_node.findall('host')[0]

    url = host_node.text
    rhost = re.search(
        "(http|https|ftp)\://([a-zA-Z0-9\.\-]+(\:[a-zA-Z0-9\.&amp;%\$\-]+)*@)*((25[0-5]|2[0-4][0-9]|[0-1]{1}[0-9]{2}|[1-9]{1}[0-9]{1}|[1-9])\.(25[0-5]|2[0-4][0-9]|[0-1]{1}[0-9]{2}|[1-9]{1}[0-9]{1}|[1-9]|0)\.(25[0-5]|2[0-4][0-9]|[0-1]{1}[0-9]{2}|[1-9]{1}[0-9]{1}|[1-9]|0)\.(25[0-5]|2[0-4][0-9]|[0-1]{1}[0-9]{2}|[1-9]{1}[0-9]{1}|[0-9])|localhost|([a-zA-Z0-9\-]+\.)*[a-zA-Z0-9\-]+\.(com|edu|gov|int|mil|net|org|biz|arpa|info|name|pro|aero|coop|museum|[a-zA-Z]{2}))[\:]*([0-9]+)*([/]*($|[a-zA-Z0-9\.\,\?\'\\\+&amp;%\$#\=~_\-]+)).*?$",
        url)
    protocol = rhost.group(1)
    host = rhost.group(4)

    port = 80
    if protocol == 'https':
        port = 443

    if rhost.group(11) is not None:
        port = rhost.group(11)

    ip = host_node.get('ip')
    url = item_node.get('url')
    path = item_node.findall('path')[0].text
    location = item_node.findall('location')[0].text

    request = item_node.findall('./requestresponse/request')[0].text if len(
        item_node.findall('./requestresponse/request')) > 0 else ""
    response = item_node.findall('./requestresponse/response')[0].text if len(
        item_node.findall('./requestresponse/response')) > 0 else ""

    try:
        dupe_endpoint = Endpoint.objects.get(protocol=protocol,
                                             host=host + (":" + port) if port is not None else "",
                                             path=path,
                                             query=None,
                                             fragment=None,
                                             product=test.engagement.product)
    except:
        dupe_endpoint = None

    if not dupe_endpoint:
        endpoint = Endpoint(protocol=protocol,
                            host=host + (":" + port) if port is not None else "",
                            path=path,
                            query=None,
                            fragment=None,
                            product=test.engagement.product)
    else:
        endpoint = dupe_endpoint

    if ip:
        try:
            dupe_endpoint = Endpoint.objects.get(protocol=None,
                                                 host=ip,
                                                 path=None,
                                                 query=None,
                                                 fragment=None,
                                                 product=test.engagement.product)
        except:
            dupe_endpoint = None

        if not dupe_endpoint:
            endpoints = [endpoint, Endpoint(host=ip, product=test.engagement.product)]
        else:
            endpoints = [endpoint, dupe_endpoint]

    background = do_clean(item_node.findall('issueBackground'))
    if background:
        background = html2text.html2text(background)

    detail = do_clean(item_node.findall('issueDetail'))
    if detail:
        detail = html2text.html2text(detail)

    remediation = do_clean(item_node.findall('remediationBackground'))
    if remediation:
        remediation = html2text.html2text(remediation)

    references = do_clean(item_node.findall('references'))
    if references:
        references = html2text.html2text(references)

    severity = item_node.findall('severity')[0].text

    # Finding and Endpoint objects returned have not been saved to the database
    finding = Finding(title=item_node.findall('name')[0].text,
                      url=url,
                      test=test,
                      severity=severity,
                      description=background + "\n\n" + detail,
                      mitigation=remediation,
                      references=references,
                      is_template=False,
                      active=False,
                      verified=False,
                      false_p=False,
                      duplicate=False,
                      out_of_scope=False,
                      mitigated=None,
                      impact="No impact provided",
                      numerical_severity=Finding.get_numerical_severity(severity))
    finding.unsaved_endpoints = endpoints
    finding.unsaved_request = request
    finding.unsaved_response = response

    return finding
Example #56
0
    def process_result_detailed(self, dupes, findingdetail, query, result,
                                find_date):
        name, cwe, categories = self.getQueryElements(query)
        title = ''
        sev = result.get('Severity')
        title = query.get('name').replace('_', ' ')
        # Loop over <Path> (there should be only one)
        paths = result.findall('Path')
        if (len(paths)) > 1:
            logger.warning("Checkmarx scan: more than one path found: " +
                           str(len(paths)) +
                           ". Only the last one will be used")

        for path in paths:
            sourceFilename = ''
            sinkFilename = ''
            sourceLineNumber = None
            sinkLineNumber = None
            sourceObject = ''
            sinkObject = ''
            similarityId = str(path.get("SimilarityId"))
            path_id = str(path.get("PathId"))
            pathId = similarityId + path_id
            findingdetail = '{}-----\n'.format(findingdetail)
            # Loop over function calls / assignments in the data flow graph
            for pathnode in path.findall('PathNode'):
                findingdetail = self.get_description_detailed(
                    pathnode, findingdetail)
                nodeId = pathnode.find('NodeId').text
                if (nodeId == "1"):
                    sourceFilename, sourceLineNumber, sourceObject = self.get_pathnode_elements(
                        pathnode)
            # the last pathnode is the sink
            sinkFilename, sinkLineNumber, sinkObject = self.get_pathnode_elements(
                pathnode)
            # pathId is the unique id from tool which means that there is basically no aggregation except real duplicates
            aggregateKeys = "{}{}{}{}{}".format(categories, cwe, name,
                                                sinkFilename, pathId)
            if title and sinkFilename:
                title = "{} ({})".format(title, ntpath.basename(sinkFilename))

            find = Finding(title=title,
                           cwe=int(cwe),
                           test=self.test,
                           active=False,
                           verified=False,
                           false_p=result.get('FalsePositive') == "True",
                           description=findingdetail,
                           severity=sev,
                           mitigation=self.mitigation,
                           impact=self.impact,
                           references=self.references,
                           file_path=sinkFilename,
                           line=sinkLineNumber,
                           url='N/A',
                           date=find_date,
                           static_finding=True,
                           unique_id_from_tool=pathId,
                           sast_source_object=sourceObject,
                           sast_sink_object=sinkObject,
                           sast_source_line=sourceLineNumber,
                           sast_source_file_path=sourceFilename)
        dupes[aggregateKeys] = find
Example #57
0
def issue_r(raw_row, vuln):
    ret_rows = []
    issue_row = {}

    # IP ADDRESS
    issue_row['ip_address']  = raw_row.findtext('IP')

    # FQDN
    issue_row['fqdn'] =raw_row.findtext('DNS')
    fqdn_parts = urlparse(issue_row['fqdn'])
    ep = Endpoint(host=fqdn_parts.netloc, path=fqdn_parts.path, query=fqdn_parts.query, fragment=fqdn_parts.fragment)

    # OS NAME
    issue_row['os'] = raw_row.findtext('OPERATING_SYSTEM')

    # Scan details
    for vuln_details in raw_row.iterfind('VULN_INFO_LIST/VULN_INFO'):
        _temp = issue_row
        # Port
        _gid = vuln_details.find('QID').attrib['id']
        _port = vuln_details.findtext('PORT')
        _temp['port_status'] = _port

        search = "//GLOSSARY/VULN_DETAILS_LIST/VULN_DETAILS[@id='{}']".format(_gid)
        vuln_item = vuln.find(search)
        if vuln_item is not None:
            finding = Finding()
            # Vuln name
            _temp['vuln_name'] = vuln_item.findtext('TITLE')


            #Solution Strips Heading Workaround(s)
            _temp['solution'] = re.sub('Workaround(s)?:.+\n', '', htmltext(vuln_item.findtext('SOLUTION')))

            # Vuln_description
            _temp['vuln_description'] = "\n".join([htmltext(vuln_item.findtext('THREAT')), htmltext(vuln_item.findtext('IMPACT'))])

            # CVSS
            _temp['CVSS_score'] = vuln_item.findtext('CVSS_SCORE/CVSS_BASE')

            # CVE and LINKS
            _temp_cve_details = vuln_item.iterfind('CVE_ID_LIST/CVE_ID')
            if _temp_cve_details:
                _cl = {cve_detail.findtext('ID'): cve_detail.findtext('URL') for cve_detail in _temp_cve_details}
                _temp['cve'] = "\n".join(_cl.keys())
                _temp['links'] = "\n".join(_cl.values())
            sev = 'Low'
            if 0.1 <= float(_temp['CVSS_score']) <= 3.9 :
                sev = 'Low'
            elif 4.0 <= float(_temp['CVSS_score']) <= 6.9:
                sev = 'Medium'
            elif 7.0 <= float(_temp['CVSS_score']) <= 8.9 :
                sev = 'High'
            else:
                sev = 'Critical'
            finding = None
            if _temp_cve_details:
                refs = "\n".join(_cl.values())
                finding = Finding(title= _temp['vuln_name'], mitigation = _temp['solution'],
                              description = _temp['vuln_description'], severity= sev,
                               references= refs )

            else:
                finding = Finding(title= _temp['vuln_name'], mitigation = _temp['solution'],
                                  description = _temp['vuln_description'], severity= sev)
            finding.unsaved_endpoints = list()
            finding.unsaved_endpoints.append(ep)
            ret_rows.append(finding)
    return ret_rows
    def get_findings(self, filename, test):

        tree = filename.read()
        try:
            data = json.loads(str(tree, "utf-8"))
        except:
            data = json.loads(tree)

        # When doing dictionary, we can detect duplications
        dupes = dict()

        # To be compatible with update in version
        try:
            vulnerability = data[next(iter(data.keys()))]["vulnerabilities"]
        except (KeyError, StopIteration):
            return list()

        # Early exit if empty
        if vulnerability is None:
            return list()

        for item in vulnerability:
            # Default = empty string
            title = ""
            sev = ""
            findingdetail = ""
            mitigation = ""
            impact = ""
            references = ""
            static_finding = True

            # Trying to add all possible details
            title = "{} - {} ({})".format(item["id"], item["package"],
                                          item["version"])
            sev = transpose_severity(item["severity"])
            findingdetail += item["description"]

            if item["fix_version"]:
                mitigation += "Upgrade {} to version {}\n\n".format(
                    item["package"], item["version"])

            if len(item["links"]) > 0:
                for link in item["links"]:
                    references += "{}\n".format(link)
                mitigation += "Reference: {}".format(item["links"][0])

            dupe_key = title

            if dupe_key in dupes:
                find = dupes[dupe_key]
            else:
                dupes[dupe_key] = True

                find = Finding(
                    title=title,
                    test=test,
                    active=False,
                    verified=False,
                    description=findingdetail,
                    severity=sev,
                    numerical_severity=Finding.get_numerical_severity(sev),
                    mitigation=mitigation,
                    impact=impact,
                    references=references,
                    file_path=filename,
                    url="N/A",
                    static_finding=True,
                )

                dupes[dupe_key] = find
                findingdetail = ""

        return list(dupes.values())
Example #59
0
    def __init__(self, filename, test):

        if "VulnerabilitiesSummary.xml" not in str(filename):
            raise NamespaceErr('Please ensure that you are uploading AppSpider\'s VulnerabilitiesSummary.xml file.'
                               'At this time it is the only file that is consumable by DefectDojo.')

        vscan = ElementTree.parse(filename)
        root = vscan.getroot()

        if "VulnSummary" not in str(root.tag):
            raise NamespaceErr('Please ensure that you are uploading AppSpider\'s VulnerabilitiesSummary.xml file.'
                               'At this time it is the only file that is consumable by DefectDojo.')

        dupes = dict()

        for finding in root.iter('Vuln'):

            severity = finding.find("AttackScore").text
            if severity == "0-Safe":
                severity = "Info"
            elif severity == "1-Informational":
                severity = "Low"
            elif severity == "2-Low":
                severity = "Medium"
            elif severity == "3-Medium":
                severity = "High"
            elif severity == "4-High":
                severity = "Critical"
            else:
                severity = "Info"

            title = finding.find("VulnType").text
            description = finding.find("Description").text
            mitigation = finding.find("Recommendation").text
            vuln_url = finding.find("VulnUrl").text

            parts = urlparse.urlparse(vuln_url)

            cwe = int(finding.find("CweId").text)

            dupe_key = severity + title
            unsaved_endpoints = list()
            unsaved_req_resp = list()

            if title is None:
                title = ''
            if description is None:
                description = ''
            if mitigation is None:
                mitigation = ''

            if dupe_key in dupes:
                find = dupes[dupe_key]

                unsaved_endpoints.append(find.unsaved_endpoints)
                unsaved_req_resp.append(find.unsaved_req_resp)

            else:
                find = Finding(title=title,
                               test=test,
                               active=False,
                               verified=False,
                               description=html2text.html2text(description),
                               severity=severity,
                               numerical_severity=Finding.get_numerical_severity(severity),
                               mitigation=html2text.html2text(mitigation),
                               impact="N/A",
                               references=None,
                               cwe=cwe)
                find.unsaved_endpoints = unsaved_endpoints
                find.unsaved_req_resp = unsaved_req_resp
                dupes[dupe_key] = find

                for attack in finding.iter("AttackRequest"):
                    req = attack.find("Request").text
                    resp = attack.find("Response").text

                    find.unsaved_req_resp.append({"req": req, "resp": resp})

                find.unsaved_endpoints.append(Endpoint(protocol=parts.scheme,
                                                       host=parts.netloc,
                                                       path=parts.path,
                                                       query=parts.query,
                                                       fragment=parts.fragment,
                                                       product=test.engagement.product))

        self.items = dupes.values()
Example #60
0
    def __init__(self, filename, test):
        cxscan = ElementTree.parse(filename)
        root = cxscan.getroot()

        dupes = dict()

        for query in root.findall('Query'):
            categories = ''
            language = ''
            mitigation = ''
            impact = ''
            references = ''
            findingdetail = ''
            title = ''
            group = ''
            status = ''

            find_date = root.get("ScanStart")
            name = query.get('name')
            cwe = query.get('cweId')

            if query.get('categories') is not None:
                categories = query.get('categories')

            if query.get('Language') is not None:
                language = query.get('Language')

            if query.get('group') is not None:
                group = query.get('group').replace('_', ' ')

            for result in query.findall('Result'):
                deeplink = result.get('DeepLink')

                if categories is not None:
                    findingdetail = 'Category: ' + categories + '\n'

                if language is not None:
                    findingdetail += 'Language: ' + language + '\n'

                if group is not None:
                    findingdetail += 'Group: ' + group + '\n'

                if result.get('Status') is not None:
                    findingdetail += 'Status: ' + result.get('Status') + '\n'

                findingdetail += 'Finding Link: ' + deeplink + '\n\n'

                dupe_key = categories + cwe + name + result.get(
                    'FileName') + result.get('Line')

                if dupe_key in dupes:
                    find = dupes[dupe_key]
                else:
                    dupes[dupe_key] = True

                    sev = result.get('Severity')
                    result.get('FileName')

                    for path in result.findall('Path'):
                        title = query.get('name').replace(
                            '_', ' ') + ' (' + path.get('PathId') + ')'
                        for pathnode in path.findall('PathNode'):
                            findingdetail += 'Source Object: %s\n' % (
                                pathnode.find('Name').text)
                            #findingdetail += 'Filename: %s\n' % (pathnode.find('FileName').text)
                            #findingdetail += 'Line Number: %s\n' % (pathnode.find('Line').text)
                            for codefragment in pathnode.findall(
                                    'Snippet/Line'):
                                findingdetail += 'Code: %s\n' % (
                                    codefragment.find('Code').text.strip())

                            findingdetail += '\n'

                    find = Finding(
                        title=title,
                        cwe=int(cwe),
                        test=test,
                        active=False,
                        verified=False,
                        description=findingdetail,
                        severity=sev,
                        numerical_severity=Finding.get_numerical_severity(sev),
                        mitigation=mitigation,
                        impact=impact,
                        references=references,
                        file_path=pathnode.find('FileName').text,
                        line=pathnode.find('Line').text,
                        url='N/A',
                        date=find_date)
                    dupes[dupe_key] = find
                    findingdetail = ''

        self.items = dupes.values()