Example #1
0
    def create_or_update(vulnerabilities: dict,
                         scanned_hosts: list,
                         config=None) -> None:
        index = VulnerabilityDocument.get_index(config)
        docs = []
        all_vulnerability_docs = VulnerabilityDocument.search(
            index=index).filter(~Q('match', tags=VulnerabilityStatus.FIXED))
        for current_vuln in all_vulnerability_docs.scan():
            vuln_id = current_vuln.id
            if vuln_id in vulnerabilities:
                if current_vuln.has_changed(vulnerabilities[vuln_id]):
                    c = current_vuln.update(vulnerabilities[vuln_id],
                                            index=index,
                                            weak=True)
                    docs.append(c.to_dict(include_meta=True))
                del vulnerabilities[vuln_id]
            elif vuln_id not in vulnerabilities and current_vuln.asset.ip_address in scanned_hosts:
                current_vuln.tags.append(VulnerabilityStatus.FIXED)
                c = current_vuln.save(index=index, weak=True)
                docs.append(c.to_dict(include_meta=True))

            if len(docs) > 500:
                async_bulk(docs, index=index)
                docs = []

        docs.extend(
            list(
                map(lambda x: x.save(weak=True).to_dict(),
                    vulnerabilities.values())))
        async_bulk(docs, index=index)
Example #2
0
    def _update_existing_assets(assets: dict, index):
        if assets:
            updated = []
            assets_search = AssetDocument.search(
                index=index).filter(~Q('match', tags=AssetStatus.DISCOVERED))
            current_assets = [a for a in assets_search.scan()]
            for current_asset in current_assets:
                asset_id = current_asset.id
                if asset_id in assets:
                    if current_asset.has_changed(assets[asset_id]):
                        current_asset.update(assets[asset_id],
                                             index=index,
                                             weak=True)
                        updated.append(
                            current_asset.to_dict(include_meta=True))
                    del assets[asset_id]
                elif asset_id not in assets and AssetStatus.DELETED not in current_asset.tags:
                    current_asset.tags.append(AssetStatus.DELETED)
                    updated.append(
                        current_asset.save(
                            index=index, weak=True).to_dict(include_meta=True))

                if len(updated) > 500:
                    async_bulk(updated)
                    updated = []

            async_bulk(updated)

        return assets
Example #3
0
    def create_or_update(assets: dict, config=None) -> None:
        index = AssetDocument.get_index(config)
        assets = AssetDocument._update_existing_assets(assets, index)
        assets = AssetDocument._update_discovered_assets(assets, index)

        async_bulk(
            list(map(lambda x: x.save(weak=True).to_dict(), assets.values())),
            index)
Example #4
0
    def process(handle):
        docs = []
        data = json.loads(handle)

        for key, value in data['cves'].items():
            doc = ExploitFactory.create(key, value)
            if doc:
                docs.append(doc.to_dict(include_meta=True))

        async_bulk(docs, index=CveDocument.Index.name)
Example #5
0
    def update_gone_discovered_assets(targets, scanned_hosts, discovered_assets, config=None):
        index = AssetDocument.get_index(config)
        # FIXME: update by query
        scanned_ips = [x.ip_address for x in scanned_hosts]

        for asset in discovered_assets.scan():
            if asset.ip_address in targets and asset.ip_address not in scanned_ips:
                asset.tags.append(AssetStatus.DELETED)
                asset.save(index=index)

        async_bulk(list(map(lambda x: x.save(weak=True).to_dict(include_meta=True), scanned_hosts)), index)
Example #6
0
    def process(handle):
        docs = []
        for obj in iter_elements_by_name(handle, 'Weakness'):
            cwe = CWEFactory.create(obj)

            if cwe:
                docs.append(cwe.to_dict(include_meta=True))

            if len(docs) > 500:
                async_bulk(docs, CweDocument.Index.name)
                docs = []

        async_bulk(docs, CweDocument.Index.name)
Example #7
0
    def process(handle):
        data = json.load(handle)
        docs = []
        for obj in data['CVE_Items']:
            cve = CveFactory.create(obj)

            if cve:
                docs.append(cve.to_dict(include_meta=True))

            if len(docs) > 500:
                async_bulk(docs, CveDocument.Index.name)
                docs = []

        async_bulk(docs, CveDocument.Index.name)
Example #8
0
def _processing(idx, slices_count, assets_count, vulnerability_index):
    docs = []
    try:
        vuln_search = VulnerabilityDocument.search(
            index=vulnerability_index).filter(
                ~Q('match', tags=VulnerabilityStatus.FIXED)
                & ~Q('match', asset__tags=AssetStatus.DELETED))

        LOGGER.debug(
            F'Calculation for {vulnerability_index} and {idx}, {slices_count} started'
        )

        if slices_count > 1:
            vuln_search = vuln_search.extra(slice={
                "id": idx,
                "max": slices_count
            }).params(scroll="60m")

        # List competence used due to better performance
        vulns = [vuln for vuln in vuln_search.scan()]
        LOGGER.debug(F'all vulns for slice {idx} downloaded')

        for vuln in vulns:
            score, vector = calculate_environmental_score_v3(vuln)
            vuln.environmental_score_vector_v3 = vector
            vuln.environmental_score_v3 = score

            vuln_count = get_cve_count(vulnerability_index, vuln.cve.id)
            score, vector = calculate_environmental_score_v2(
                vuln, vuln_count, assets_count)
            vuln.environmental_score_vector_v2 = vector
            vuln.environmental_score_v2 = score

            docs.append(vuln.to_dict(include_meta=True))

            if len(docs) > 10000:
                async_bulk(docs, vulnerability_index)
                docs = []

        async_bulk(docs, vulnerability_index)
    except Exception as ex:
        LOGGER.error(F'Unknown processing exception {ex}')
    finally:
        thread_pool_executor.wait_for_all()

    LOGGER.debug(
        F'Calculation for {vulnerability_index} and {idx}, {slices_count} done'
    )
Example #9
0
    def update_related(self, sender, new_version, old_version, **kwargs):
        if old_version:
            updates = []
            for document in self._get_related_doc(sender):
                for field_name in document.get_fields_name():
                    field_type = document._doc_type.mapping[field_name]

                    if isinstance(field_type, Object) and issubclass(
                            sender, field_type._doc_class):

                        for index in self._get_indexes(new_version,
                                                       field_type._doc_class):
                            search = document.search(index=index).filter(
                                'term',
                                **{F'{field_name}__id': old_version.id})
                            count = search.count()

                            if count > 0:

                                has_related_documents = getattr(
                                    document, 'related_documents', False)
                                if not has_related_documents:
                                    thread_pool_executor.submit(
                                        self._update_by_query, index
                                        if index else document.Index.name,
                                        field_name, old_version, new_version)
                                else:

                                    if count > 10000:
                                        result = search.scan()
                                    elif count > 0:
                                        result = search[0:count].execute()
                                    else:
                                        result = []

                                    for hit in result:
                                        setattr(hit, field_name, new_version)
                                        updates.append(
                                            hit.save(index=index,
                                                     weak=True).to_dict(
                                                         include_meta=True))

                                    if len(updates) > 500:
                                        async_bulk(updates)
                                        updates = []

            async_bulk(updates)
Example #10
0
    def _update_discovered_assets(assets: dict, index):
        if assets:
            updated = []
            assets = {a.ip_address: a for a in assets.values()}
            assets_search = AssetDocument.search(index=index).filter(Q('match', tags=AssetStatus.DISCOVERED))
            discovered_assets = [a for a in assets_search.scan()]
            for discovered_asset in discovered_assets:
                if discovered_asset.ip_address in assets:
                    discovered_asset.update(assets[discovered_asset.ip_address], index=index, weak=True)
                    updated.append(discovered_asset.to_dict(include_meta=True))
                    del assets[discovered_asset.ip_address]
            if len(updated) > 500:
                async_bulk(updated)
                updated = []

            async_bulk(updated)

        return assets
Example #11
0
    def _reopen(vulnerabilities: dict, index: str):
        prev_fixed = VulnerabilityDocument.search(index=index).filter(
            Q('match', tags=VulnerabilityStatus.FIXED))
        docs = []
        for prev_fixed_vuln in prev_fixed.scan():
            vuln_id = prev_fixed_vuln.id
            if vuln_id in vulnerabilities:
                c = prev_fixed_vuln.update(vulnerabilities[vuln_id],
                                           index=index,
                                           weak=True)
                c.tags.append(VulnerabilityStatus.REOPEN)
                if VulnerabilityStatus.FIXED in c.tags:
                    c.tags.remove(VulnerabilityStatus.FIXED)
                docs.append(c.to_dict(include_meta=True))
                del vulnerabilities[vuln_id]

            if len(docs) > 500:
                async_bulk(docs, index=index)
                docs = []

        async_bulk(docs, index=index)
        return vulnerabilities