Esempio n. 1
0
    def setUp(self):
        p = Product()
        p.Name = 'Test Product'
        p.Description = 'Product for Testing Endpoint functionality'
        p.save()

        e = Endpoint()
        e.product = p
        e.host = '127.0.0.1'
        e.save()
    def setUp(self):
        p = Product()
        p.Name = 'Test Product'
        p.Description = 'Product for Testing Endpoint functionality'
        p.save()

        e = Endpoint()
        e.product = p
        e.host = '127.0.0.1'
        e.save()
    def setUp(self):
        p = Product()
        p.Name = 'Test Product'
        p.Description = 'Product for Testing Endpoint functionality'
        p.save()

        e = Endpoint()
        e.product = p
        e.host = '127.0.0.1'
        e.save()

        call_command('loaddata', 'dojo/fixtures/system_settings', verbosity=0)
    def setUp(self):
        p = Product()
        p.Name = 'Test Product'
        p.Description = 'Product for Testing Endpoint functionality'
        p.save()

        e = Endpoint()
        e.product = p
        e.host = '127.0.0.1'
        e.save()

        EndpointMetaDataTestUtil.save_custom_field(e, 'TestField', 'TestValue')
        EndpointMetaDataTestUtil.save_custom_field(p, 'TestProductField', 'TestProductValue')
    def setUp(self):
        p = Product()
        p.Name = 'Test Product'
        p.Description = 'Product for Testing Endpoint functionality'
        p.save()

        e = Endpoint()
        e.product = p
        e.host = '127.0.0.1'
        e.save()

        EndpointMetaDataTestUtil.save_custom_field(e, 'TestField', 'TestValue')
        EndpointMetaDataTestUtil.save_custom_field(p, 'TestProductField', 'TestProductValue')

        call_command('loaddata', 'dojo/fixtures/system_settings', verbosity=0)
Esempio n. 6
0
    def get_findings(self, filename, test: Test):
        content = filename.read()
        if type(content) is bytes:
            content = content.decode('utf-8')
        csv.field_size_limit(int(sys.maxsize / 10))  # the request/resp are big
        reader = csv.DictReader(io.StringIO(content))
        dupes = dict()
        for row in reader:
            # manage severity from two possible columns 'Severity' and 'Risk'
            severity = 'Info'
            if 'Severity' in row:
                severity = self._convert_severity(row.get('Severity'))
            elif 'Risk' in row:
                severity = self._convert_severity(row.get('Risk'))
            # manage title from two possible columns 'Nme' and 'Plugin Name'
            title = row.get('Name')
            if title is None and 'Plugin Name' in row:
                title = row.get('Plugin Name')
            # special case to skip empty titles
            if not title:
                continue
            description = row.get('Synopsis')
            mitigation = str(row.get('Solution'))
            impact = row.get('Description', 'N/A')
            references = row.get('See Also', 'N/A')

            dupe_key = severity + title + row.get('Host', 'No host') + str(
                row.get('Port', 'No port')) + row.get('Synopsis',
                                                      'No synopsis')

            detected_cve = self._format_cve(str(row.get('CVE')))
            cve = None
            if detected_cve:
                # FIXME support more than one CVE in Nessus CSV parser
                cve = detected_cve[0]
                if len(detected_cve) > 1:
                    LOGGER.warning(
                        "more than one CVE for a finding. NOT supported by Nessus CSV parser"
                    )

            if dupe_key in dupes:
                find = dupes[dupe_key]
                if 'Plugin Output' in row:
                    find.description += row.get('Plugin Output')
            else:
                if 'Plugin Output' in row:
                    description = description + str(row.get('Plugin Output'))
                find = Finding(title=title,
                               test=test,
                               cve=cve,
                               description=description,
                               severity=severity,
                               mitigation=mitigation,
                               impact=impact,
                               references=references)

                # manage CVSS vector (only v3.x for now)
                if 'CVSS V3 Vector' in row and '' != row.get('CVSS V3 Vector'):
                    find.cvssv3 = CVSS3('CVSS:3.0/' +
                                        str(row.get('CVSS V3 Vector'))
                                        ).clean_vector(output_prefix=False)
                # manage CPE data
                detected_cpe = self._format_cpe(str(row.get('CPE')))
                if detected_cpe:
                    # FIXME support more than one CPE in Nessus CSV parser
                    if len(detected_cpe) > 1:
                        LOGGER.warning(
                            "more than one CPE for a finding. NOT supported by Nessus CSV parser"
                        )
                    cpe_decoded = CPE(detected_cpe[0])
                    find.component_name = cpe_decoded.get_product()[0] if len(
                        cpe_decoded.get_product()) > 0 else None
                    find.component_version = cpe_decoded.get_version(
                    )[0] if len(cpe_decoded.get_version()) > 0 else None

                find.unsaved_endpoints = list()
                dupes[dupe_key] = find
            # manage endpoints
            endpoint = Endpoint(host='localhost')
            if 'Host' in row:
                endpoint.host = row.get('Host')
            elif 'IP Address' in row:
                endpoint.host = row.get('IP Address')
            endpoint.port = row.get('Port')
            if 'Protocol' in row:
                endpoint.protocol = row.get('Protocol').lower()
            find.unsaved_endpoints.append(endpoint)
        return list(dupes.values())