def test_asset_updated(self): self.asset_2 = create_asset('10.10.10.11') create_vulnerability(self.asset, self.cve) create_vulnerability(self.asset_2, self.cve) self.cve_2 = create_cve('CVE-2017-0003') create_vulnerability(self.asset, self.cve_2) create_vulnerability(self.asset_2, self.cve_2) self.assertEqual(Search().index(VulnerabilityDocument.Index.name).count(), 4) self.asset.confidentiality_requirement = AssetImpact.HIGH self.asset.integrity_requirement = AssetImpact.HIGH self.asset.save() thread_pool_executor.wait_for_all() self.assertEqual(Search().index(VulnerabilityDocument.Index.name).count(), 4) result_1 = VulnerabilityDocument.search().filter( 'term', asset__ip_address=self.asset.ip_address).execute() self.assertEqual(len(result_1.hits), 2) self.assertEqual(result_1.hits[0].asset.confidentiality_requirement, self.asset.confidentiality_requirement) self.assertEqual(result_1.hits[0].asset.integrity_requirement, self.asset.integrity_requirement) self.assertEqual(result_1.hits[1].asset.confidentiality_requirement, self.asset.confidentiality_requirement) self.assertEqual(result_1.hits[1].asset.integrity_requirement, self.asset.integrity_requirement) result_2 = VulnerabilityDocument.search().filter( 'term', asset__ip_address=self.asset_2.ip_address).execute() self.assertEqual(len(result_2.hits), 2) self.assertEqual(result_2.hits[0].asset.confidentiality_requirement, self.asset_2.confidentiality_requirement) self.assertEqual(result_2.hits[0].asset.integrity_requirement, self.asset_2.integrity_requirement) self.assertEqual(result_2.hits[1].asset.confidentiality_requirement, self.asset_2.confidentiality_requirement) self.assertEqual(result_2.hits[1].asset.integrity_requirement, self.asset_2.integrity_requirement)
def test_cve_updated(self): self.asset_2 = create_asset('10.10.10.11') self.cve_2 = create_cve('CVE-2017-0003') create_vulnerability(self.asset, self.cve) create_vulnerability(self.asset, self.cve_2) create_vulnerability(self.asset_2, self.cve) create_vulnerability(self.asset_2, self.cve_2) self.assertEqual(Search().index(VulnerabilityDocument.Index.name).count(), 4) self.cve.access_vector_v2 = metrics.AccessVectorV2.LOCAL self.cve.save() thread_pool_executor.wait_for_all() self.assertEqual(Search().index(VulnerabilityDocument.Index.name).count(), 4) result_1 = VulnerabilityDocument.search().filter('term', cve__id=self.cve.id).execute() self.assertEqual(len(result_1.hits), 2) self.assertEqual(result_1.hits[0].cve.access_vector_v2, self.cve.access_vector_v2) self.assertEqual(result_1.hits[1].cve.access_vector_v2, self.cve.access_vector_v2) result_2 = VulnerabilityDocument.search().filter('term', cve__id=self.cve_2.id).execute() self.assertEqual(len(result_2.hits), 2) self.assertEqual(result_2.hits[0].cve.access_vector_v2, self.cve_2.access_vector_v2) self.assertEqual(result_2.hits[1].cve.access_vector_v2, self.cve_2.access_vector_v2)
def _update_scans(config_pk: int): LOGGER.debug(F'Starting update scans: {config_pk}') config = Config.objects.filter(pk=config_pk) if config.exists(): config = config.first() else: LOGGER.error(F'Config: {config_pk} not exist!') return None try: config.set_status(Config.Status.IN_PROGRESS) manager = scanners_registry.get(config) client = manager.get_client() parser = manager.get_parser() now_date = now() LOGGER.info(F'Trying to download scan lists') scan_list = client.get_scans() scan_list = parser.get_scans_ids(scan_list) LOGGER.info(F'scan list downloaded') LOGGER.debug(F'Scan list: {scan_list}') for scan_id in scan_list: LOGGER.info(F'Trying to download report form {config.name}') file = client.download_scan(scan_id, client.ReportFormat.XML) path = _get_save_path(config) file_name = '{}-{}.zip'.format(config.scanner, now().strftime('%H-%M-%S')) full_file_path = Path(path) / file_name LOGGER.info(F"Saving file: {full_file_path}") thread_pool_executor.submit(save_scan, client, scan_id, file, full_file_path) saved_scan = Scan.objects.create(config=config, file=str(full_file_path)) file_url = F"{getattr(settings, 'ABSOLUTE_URI', '')}{reverse('download_scan', args=[saved_scan.file_id])}" targets = copy.deepcopy(file) LOGGER.info(F'Retrieving discovered assets for {config.name}') discovered_assets = AssetDocument.get_assets_with_tag(tag=AssetStatus.DISCOVERED, config=config) LOGGER.info(F'Trying to parse scan file {scan_id}') vulns, scanned_hosts = parser.parse(file, file_url) LOGGER.info(F'File parsed: {scan_id}') LOGGER.info(F'Trying to parse targets from file {scan_id}') targets = parser.get_targets(targets) LOGGER.info(F'Targets parsed: {scan_id}') if targets: LOGGER.info(F'Attempting to update discovered assets in {config.name}') AssetDocument.update_gone_discovered_assets(targets=targets, scanned_hosts=scanned_hosts, discovered_assets=discovered_assets, config=config) LOGGER.info(F'Attempting to update vulns data in {config.name}') VulnerabilityDocument.create_or_update(vulns, scanned_hosts, config) config.last_scans_pull = now_date config.set_status(Config.Status.SUCCESS) config.save(update_fields=['last_scans_pull']) except Exception as e: config.set_status(status=Config.Status.ERROR, error_description=e) LOGGER.error(F'Error while loading vulnerability data {e}') finally: thread_pool_executor.wait_for_all()
def _create(self, item: dict): vuln = VulnerabilityDocument() for field in VulnerabilityDocument.get_fields_name(): if field in item: try: setattr(vuln, field, item[field]) except (KeyError, IndexError): setattr(vuln, field, 'UNKNOWN') vuln.source = 'Nessus' self.__parsed[vuln.id] = vuln
def test_reopen_vulnerability(self): vulnerability = create_vulnerability(self.asset, self.cve) self.assertEqual(VulnerabilityDocument.search().count(), 1) VulnerabilityDocument.create_or_update({}, [self.asset.ip_address], ConfigMock()) thread_pool_executor.wait_for_all() self.assertEqual(VulnerabilityDocument.search().count(), 1) result = VulnerabilityDocument.search().filter( 'term', asset__ip_address=self.asset.ip_address).execute() self.assertEqual(result.hits[0].tags, ['test', VulnerabilityStatus.FIXED]) VulnerabilityDocument.create_or_update( {vulnerability.id: vulnerability}, [self.asset.ip_address], ConfigMock()) thread_pool_executor.wait_for_all() self.assertEqual(VulnerabilityDocument.search().count(), 1) result = VulnerabilityDocument.search().filter( 'term', asset__ip_address=self.asset.ip_address).execute() self.assertEqual(result.hits[0].tags, ['test', VulnerabilityStatus.REOPEN])
def create_vulnerability(asset, cve, save=True, index=None): vulnerability = VulnerabilityDocument(id=F"{asset.id}-{cve.id}", asset=asset, cve=cve, description='description', solution='solution', port=22, svc_name='ssh', protocol='tcp', tags=['test']) if save: vulnerability.save(index=index) return vulnerability
def test_not_updated_existing_vulnerability(self): vuln = create_vulnerability(self.asset, self.cve) self.assertEqual(VulnerabilityDocument.search().count(), 1) updated_vuln = vuln.clone() VulnerabilityDocument.create_or_update({updated_vuln.id: updated_vuln}, [], ConfigMock()) thread_pool_executor.wait_for_all() self.assertEqual(VulnerabilityDocument.search().count(), 1) result_2 = VulnerabilityDocument.search().filter( 'term', asset__ip_address=self.asset.ip_address).sort('-modified_date').filter( 'term', cve__id=self.cve.id).execute() self.assertEqual(result_2.hits[0].description, 'description')
def parse(self, report) -> [Dict, Dict]: for r in report.findall('.//results/result'): if float(r.find('nvt//cvss_base').text) > 0: ip_address = r.find('./host').text self.__scanned_host.append(ip_address) asset = AssetDocument.get_or_create(ip_address, self._config) tags = self.parse_tags(r.find('./nvt/tags').text) for cve in r.find('./nvt//cve').text.split(','): port = r.find('./port').text.split('/')[0] protocol = r.find('./port').text.split('/')[1] oid = r.find('./nvt').attrib.get('oid') cve = self.get_cve(cve, oid, tags) if port == 'general': port = None protocol = None uid = self._vuln_id(ip_address, port, oid) self.__parsed[uid] = VulnerabilityDocument( id=uid, port=port, protocol=protocol, description=r.find('./description').text, solution=tags['solution'], cve=cve, asset=asset, source='OpenVas' ) return self.__parsed, self.__scanned_host
def test_document_fields(self): create_vulnerability(self.asset, self.cve) search = VulnerabilityDocument.search().filter('term', port=22).execute() self.assertEqual(len(search.hits), 1) uut = search.hits[0] self.assertEqual(uut.cve.id, self.cve.id) self.assertEqual(uut.cve.base_score_v2, self.cve.base_score_v2) self.assertEqual(uut.cve.base_score_v3, self.cve.base_score_v3) self.assertEqual(uut.cve.summary, self.cve.summary) self.assertEqual(uut.cve.access_vector_v2, self.cve.access_vector_v2) self.assertEqual(uut.cve.access_complexity_v2, self.cve.access_complexity_v2) self.assertEqual(uut.cve.authentication_v2, self.cve.authentication_v2) self.assertEqual(uut.cve.confidentiality_impact_v2, self.cve.confidentiality_impact_v2) self.assertEqual(uut.cve.integrity_impact_v2, self.cve.integrity_impact_v2) self.assertEqual(uut.cve.availability_impact_v2, self.cve.availability_impact_v2) self.assertEqual(uut.cve.attack_vector_v3, self.cve.attack_vector_v3) self.assertEqual(uut.cve.attack_complexity_v3, self.cve.attack_complexity_v3) self.assertEqual(uut.cve.privileges_required_v3, self.cve.privileges_required_v3) self.assertEqual(uut.cve.user_interaction_v3, self.cve.user_interaction_v3) self.assertEqual(uut.cve.scope_v3, self.cve.scope_v3) self.assertEqual(uut.cve.confidentiality_impact_v3, self.cve.confidentiality_impact_v3) self.assertEqual(uut.cve.integrity_impact_v3, self.cve.integrity_impact_v3) self.assertEqual(uut.cve.availability_impact_v3, self.cve.availability_impact_v3) self.assertEqual(uut.asset.ip_address, self.asset.ip_address) self.assertEqual(uut.asset.mac_address, self.asset.mac_address) self.assertEqual(uut.asset.os, self.asset.os) self.assertEqual(uut.asset.confidentiality_requirement, self.asset.confidentiality_requirement) self.assertEqual(uut.asset.integrity_requirement, self.asset.integrity_requirement) self.assertEqual(uut.asset.availability_requirement, self.asset.availability_requirement) self.assertEqual(uut.port, 22) self.assertEqual(uut.svc_name, 'ssh') self.assertEqual(uut.protocol, 'tcp')
def test_call(self): vuln = create_vulnerability(create_asset(), create_cve()) task = Task.objects.create(task_id=15, document_id=vuln.meta.id) process_task_log({ 'operation': 'create', 'objectType': 'case_task_log', 'object': { 'message': 'fixed', 'case_task': { 'id': task.task_id } } }) process_task_log({ 'operation': 'create', 'objectType': 'case_task_log', 'object': { 'message': 'fixed', 'case_task': { 'id': task.task_id } } }) vulns = VulnerabilityDocument.search().filter('match', id=vuln.id).execute() self.assertEqual(len(vulns.hits), 1) self.assertEqual(vulns.hits[0].tags, ['test', 'FIXED'])
def test_update_discovered_asset(self): asset_tenant_1 = self.create_asset(self.config_tenant_1.name) discovered_asset = AssetDocument.get_or_create( asset_tenant_1.ip_address) cve = create_cve() create_vulnerability(discovered_asset, cve) self.assertEqual(1, Search().index(AssetDocument.Index.name).count()) AssetDocument.create_or_update({asset_tenant_1.id: asset_tenant_1}) thread_pool_executor.wait_for_all() self.assertEqual(1, Search().index(AssetDocument.Index.name).count()) self.assertEqual( 1, Search().index(VulnerabilityDocument.Index.name).count()) result = VulnerabilityDocument.search().filter( 'term', cve__id='CVE-2017-0002').execute() self.assertEqual(result.hits[0].asset.id, asset_tenant_1.id) self.assertEqual(result.hits[0].asset.ip_address, asset_tenant_1.ip_address) self.assertEqual(result.hits[0].asset.confidentiality_requirement, asset_tenant_1.confidentiality_requirement) self.assertEqual(result.hits[0].asset.availability_requirement, asset_tenant_1.availability_requirement)
def start_processing_per_tenant(vulnerability_index: str, asset_index: str): LOGGER.info( F'Calculation for {vulnerability_index} and {asset_index} started') try: assets_count = AssetDocument.search(index=asset_index).filter( ~Q('match', tags=AssetStatus.DELETED)).count() vuln_search = VulnerabilityDocument.search( index=vulnerability_index).filter( ~Q('match', tags=VulnerabilityStatus.FIXED) & ~Q('match', asset__tags=AssetStatus.DELETED)) prepare(vulnerability_index) workers_count = get_workers_count() vuln_count = vuln_search.count() slices_count = 1 if vuln_count > 500: slices_count = vuln_count // workers_count slices_count = slices_count if slices_count <= workers_count else workers_count (group( _processing.si(idx, slices_count, assets_count, vulnerability_index) for idx in range(slices_count)) | _end_processing.si(vulnerability_index, asset_index))() except Exception as ex: LOGGER.error(F'Unknown processing exception {ex}')
def prepare(vulnerability_index): s = VulnerabilityDocument.search(index=vulnerability_index).filter( ~Q('match', tags=VulnerabilityStatus.FIXED) & ~Q('match', asset__tags=AssetStatus.DELETED)) s.aggs.bucket('cves', 'terms', field='cve.id', size=10000000) s = s.execute() for result in s.aggregations.cves.buckets: key = '{}-{}'.format(vulnerability_index, result['key']) cache.set(key, result['doc_count'], LOCK_EXPIRE)
def test__update_call_nessus_parser(self): scanners_registry.register('test-scanner', self.client, NessusReportParser) self.client().get_scans.return_value = {'scans': [{'id': 2, 'folder_id': 2}]} with open(Path(__file__).parent / "nessus/fixtures/internal.xml", 'rb') as f: self.client().download_scan.return_value = BytesIO(f.read()) _update_scans(self.config.pk) self.client().download_scan.assert_called_once_with(2) self.assertEqual(VulnerabilityDocument.search().count(), 2)
def _update_scans(config_pk: int): config = Config.objects.filter(pk=config_pk) if config.exists(): config = config.first() try: config.set_status(Config.Status.IN_PROGRESS) client, parser = scanners_registry.get(config) now_date = now() scan_list = client.get_scans(last_modification_date=config.last_scans_pull) scan_list = parser.get_scans_ids(scan_list) for scan_id in scan_list: LOGGER.info(F'Trying to download report form {config.name}') file = client.download_scan(scan_id) targets = copy.deepcopy(file) LOGGER.info(F'Retrieving discovered assets for {config.name}') discovered_assets = AssetDocument.get_assets_with_tag(tag=AssetStatus.DISCOVERED, config=config) LOGGER.info(F'Trying to parse scan file {scan_id}') vulns, scanned_hosts = parser.parse(file) LOGGER.info(F'File parsed: {scan_id}') LOGGER.info(F'Trying to parse targets from file {scan_id}') if hasattr(parser, "get_targets"): targets = parser.get_targets(targets) else: targets = client.get_targets(targets) LOGGER.info(F'Targets parsed: {scan_id}') if targets: LOGGER.info(F'Attempting to update discovered assets in {config.name}') AssetDocument.update_gone_discovered_assets(targets=targets, scanned_hosts=scanned_hosts, discovered_assets=discovered_assets, config=config) LOGGER.info(F'Attempting to update vulns data in {config.name}') VulnerabilityDocument.create_or_update(vulns, scanned_hosts, config) config.last_scans_pull = now_date config.set_status(Config.Status.SUCCESS) config.save(update_fields=['last_scans_pull']) except Exception as e: config.set_status(status=Config.Status.ERROR, error_description=e) LOGGER.error(F'Error while loading vulnerability data {e}') finally: thread_pool_executor.wait_for_all()
def test__update_call_nessus_parser(self): self.manager().get_parser.return_value = NessusReportParser(self.config) scanners_registry.register('test-scanner', self.manager) self.client.get_scans.return_value = {'scans': [{'id': 2, 'folder_id': 2}], 'folders': [{'type': 'custom', 'id': 2, 'name': 'test'}]} with open(Path(__file__).parent / "nessus/fixtures/internal.xml", 'rb') as f: self.client.download_scan.return_value = BytesIO(f.read()) _update_scans(self.config.pk) self.client.download_scan.assert_has_calls( [call(2, self.client.ReportFormat.XML), call(2, self.client.ReportFormat.PRETTY)]) self.assertEqual(VulnerabilityDocument.search().count(), 2)
def search_vulnerabilities(request): tenant = request.GET.get('tenant', None) if tenant: tenant = get_object_or_404(Tenant, name=tenant) ip_address = request.GET.get('ip_address', None) if ip_address and netaddr.valid_ipv4(ip_address): index = registry.get_index_for_tenant(tenant, VulnerabilityDocument) result = VulnerabilityDocument.search(index=index).filter( Q('term', asset__ip_address=ip_address) & ~Q('match', tags=VulnerabilityStatus.FIXED)).execute()[0:100] return Response( VulnerabilityDocumentSerializer(result, many=True).data) raise NotFound()
def _processing(idx, slices_count, assets_count, vulnerability_index): docs = [] try: vuln_search = VulnerabilityDocument.search( index=vulnerability_index).filter( ~Q('match', tags=VulnerabilityStatus.FIXED) & ~Q('match', asset__tags=AssetStatus.DELETED)) LOGGER.debug( F'Calculation for {vulnerability_index} and {idx}, {slices_count} started' ) if slices_count > 1: vuln_search = vuln_search.extra(slice={ "id": idx, "max": slices_count }).params(scroll="60m") # List competence used due to better performance vulns = [vuln for vuln in vuln_search.scan()] LOGGER.debug(F'all vulns for slice {idx} downloaded') for vuln in vulns: score, vector = calculate_environmental_score_v3(vuln) vuln.environmental_score_vector_v3 = vector vuln.environmental_score_v3 = score vuln_count = get_cve_count(vulnerability_index, vuln.cve.id) score, vector = calculate_environmental_score_v2( vuln, vuln_count, assets_count) vuln.environmental_score_vector_v2 = vector vuln.environmental_score_v2 = score docs.append(vuln.to_dict(include_meta=True)) if len(docs) > 10000: async_bulk(docs, vulnerability_index) docs = [] async_bulk(docs, vulnerability_index) except Exception as ex: LOGGER.error(F'Unknown processing exception {ex}') finally: thread_pool_executor.wait_for_all() LOGGER.debug( F'Calculation for {vulnerability_index} and {idx}, {slices_count} done' )
def test_start_processing_per_tenant(self): self.generate_assets() self.generate_vulns() vuln_search = VulnerabilityDocument.search() self.assertEqual(vuln_search.count(), 300) self.assertEqual( vuln_search.filter('exists', field='environmental_score_v2').count(), 0) self.assertEqual( vuln_search.filter('exists', field='environmental_score_vector_v2').count(), 0) self.assertEqual( vuln_search.filter('exists', field='environmental_score_v3').count(), 0) self.assertEqual( vuln_search.filter('exists', field='environmental_score_vector_v3').count(), 0) tasks._processing(0, 1, 1000, VulnerabilityDocument.Index.name) self.assertEqual(vuln_search.count(), 300) self.assertEqual( vuln_search.filter('exists', field='environmental_score_v2').count(), 100) self.assertEqual( vuln_search.filter('exists', field='environmental_score_vector_v2').count(), 100) self.assertEqual( vuln_search.filter('exists', field='environmental_score_v3').count(), 100) self.assertEqual( vuln_search.filter('exists', field='environmental_score_vector_v3').count(), 100)
def process_task_log(event): try: LOGGER.debug(event) if 'object' in event: message = event['object']['message'] task_id = event['object']['case_task']['id'] tasks = Task.objects.filter(task_id=task_id) converter = TheHive4LogConverter.objects.filter( log_message=message) LOGGER.debug(F'Task id {task_id}, found {tasks}') LOGGER.debug(F'Converter found: {converter}') if tasks.exists() and converter.exists(): task = tasks.first() tag = converter.first().tag try: tenant = Tenant.objects.get(name=task.tenant) index = registry.get_index_for_tenant( tenant, VulnerabilityDocument) except Tenant.DoesNotExist: index = VulnerabilityDocument.Index.name doc = VulnerabilityDocument.get(task.document_id, index=index) LOGGER.debug(F'Documents found') if hasattr(doc, 'tags'): if tag not in doc.tags: doc.tags.append(tag) LOGGER.debug(F'Saved') doc.save() else: doc.tags = [tag] LOGGER.debug(F'Saved') doc.save() except Exception as ex: LOGGER.error(ex)
def test_calculate_environmental_score_v2(self, cr, ir, ar, expected): self.prepare_asset(cr, ir, ar) vuln = VulnerabilityDocument(cve=self.cve, asset=self.asset) self.assertEqual( tasks.calculate_environmental_score_v2(vuln, 100, 100), expected)
def test_environmental_score_v3(self, scope, cr, ir, ar, expected): self.prepare_asset(cr, ir, ar) self.change_scope(scope) vuln = VulnerabilityDocument(cve=self.cve, asset=self.asset) self.assertEqual(tasks.calculate_environmental_score_v3(vuln), expected)
def generate_vulns(asset_count, asset_search, cve_sets): bulk_executor = concurrent.futures.ThreadPoolExecutor(max_workers=4) bulk_pool, vulns, assets = [], [], [] asset_vuln = {} vuln_count = 0 step = 10 for a in asset_search.scan(): asset_vuln[a.ip_address] = [] assets.append(a) while vuln_count / asset_count < 30: vuln_count = 0 if len(assets) == 0: break for asset in assets: cve_set = cve_sets[asset.os] if len(cve_sets[asset.os]) == len(asset_vuln[asset.ip_address]): asset.last_scan_date = datetime.now() assets.remove(asset) asset.save() break cve = random.choice(cve_set) if cve.id not in asset_vuln[asset.ip_address]: asset_vuln[asset.ip_address].append(cve) vulns.append( VulnerabilityDocument( id=F'{asset.id}-{cve.id}', asset=asset, cve=cve, name=F'Detected {cve.id} for {asset.ip_address}', description=cve.summary, solution='Sample solution received from scanner', protocol='tcp', created_date=datetime.now(), modified_date=datetime.now(), source='Generate Vulns Script', scan_file_url='http://generated_from_script').to_dict( )) if len(vulns) > 10000: bulk_pool.append(bulk_executor.submit(_bulk, vulns)) vulns = [] for ip in asset_vuln: vuln_count += len(asset_vuln[ip]) if vuln_count / asset_count > step: print(F'Generated vulns: {vuln_count}, ' F'assets count: {asset_count}, ' F'ratio: {round(vuln_count / asset_count, 2)}') step += 10 if vulns: bulk_pool.append(bulk_executor.submit(_bulk, vulns)) concurrent.futures.wait(bulk_pool) print(F'Generated vulns: {vuln_count}, ' F'assets count: {asset_count}, ' F'ratio: {round(vuln_count / asset_count, 2)}')