def _create_asset_on_import(asset_value, scan, asset_type='unknown'): Event.objects.create(message="[EngineTasks/_create_asset_on_import()] create: '{}/{}'.".format(asset_value, asset_type), type="DEBUG", severity="INFO", scan=scan) # create assets if data_type is ip-subnet or ip-range if scan and asset_type == 'ip': assets = scan.assets.filter(type__in=['ip-subnet', 'ip-range']) # Search parent asset parent_asset = None for pa in assets: if net.is_ip_in_ipset(ip=asset_value, ipset=pa.value): parent_asset = pa break if parent_asset: name = "{} (from '{}')".format(asset_value, parent_asset.name) criticity = parent_asset.criticity owner = parent_asset.owner else: name = asset_value criticity = 'medium' owner = User.objects.filter(username='******').first() else: if net.is_valid_ip(asset_value): asset_type = "ip" elif net._is_valid_domain(asset_value): asset_type = "domain" elif net._is_valid_url(asset_value): asset_type = "url" else: asset_type = "fqdn" # default :/ name = asset_value criticity = 'medium' owner = User.objects.filter(username='******').first() # Create the new asset ... asset_args = { 'value': asset_value, 'name': name, 'type': asset_type, 'criticity': criticity, 'description': "Asset dynamically created", 'owner': owner } asset = Asset(**asset_args) asset.save() scan.assets.add(asset) # Then add the asset to every related asset groups for ag in AssetGroup.objects.filter(assets__type__in=['ip-subnet', 'ip-range']): for aga in ag.assets.all(): if net.is_ip_in_ipset(ip=asset_value, ipset=aga.value): ag.assets.add(asset) ag.save() ag.calc_risk_grade() ag.save() return asset
def _create_asset_on_import(asset_value, scan, asset_type='unknown', parent=None): evt_prefix = "[EngineTasks/_create_asset_on_import()]" Event.objects.create(message="{} Create: '{}/{} from parent {}'.".format( evt_prefix, asset_value, asset_type, parent), type="DEBUG", severity="INFO", scan=scan) # create assets if data_type is ip-subnet or ip-range if scan and net.is_valid_ip(asset_value): assets = scan.assets.filter(type__in=['ip-subnet', 'ip-range']) asset_type = "ip" # Search parent asset parent_asset = None for pa in assets: if net.is_ip_in_ipset(ip=asset_value, ipset=pa.value): parent_asset = pa break if parent_asset: name = asset_value criticity = parent_asset.criticity owner = parent_asset.owner else: name = asset_value criticity = 'medium' owner = get_user_model().objects.filter(username='******').first() else: if net.is_valid_ip(asset_value): asset_type = "ip" elif net._is_valid_domain(asset_value): asset_type = "domain" elif net._is_valid_url(asset_value): asset_type = "url" else: asset_type = "keyword" # default :/ name = asset_value criticity = 'medium' owner = get_user_model().objects.filter(username='******').first() # Create the new asset ... asset_args = { 'value': asset_value, 'name': name, 'type': asset_type, 'criticity': criticity, 'description': "Asset dynamically created", 'owner': owner } asset = Asset(**asset_args) asset.save() scan.assets.add(asset) # Then add the asset to every related asset groups for ag in AssetGroup.objects.filter( assets__type__in=['ip-subnet', 'ip-range']): for aga in ag.assets.all(): if net.is_ip_in_ipset(ip=asset_value, ipset=aga.value): ag.assets.add(asset) ag.save() ag.calc_risk_grade() ag.save() # Creation/Update of the AssetGroup if parent is not None: Event.objects.create( message="{} Looking for a group named : {}".format( evt_prefix, parent), type="DEBUG", severity="INFO", scan=scan) asset_group = AssetGroup.objects.filter( name="{} assets".format(parent)).first() if asset_group is None: # Create an asset group dynamically Event.objects.create(message="{} Create a group named : {}".format( evt_prefix, parent), type="DEBUG", severity="INFO", scan=scan) assetgroup_args = { 'name': "{} assets".format(parent), 'criticity': criticity, 'description': "AssetGroup dynamically created", 'owner': owner } asset_group = AssetGroup(**assetgroup_args) asset_group.save() Event.objects.create(message="{} Add {} in group {}".format( evt_prefix, asset, parent), type="DEBUG", severity="INFO", scan=scan) # Add the asset to the new group asset_group.assets.add(asset) asset_group.save() # Caculate the risk grade asset_group.calc_risk_grade() asset_group.save() if "new_assets_group" in scan.scan_definition.engine_policy.options.keys( ) and scan.scan_definition.engine_policy.options[ "new_assets_group"] not in ["", None]: asset_groupname = str( scan.scan_definition.engine_policy.options["new_assets_group"]) Event.objects.create( message="{} Looking for a group named : {}".format( evt_prefix, asset_groupname), type="DEBUG", severity="INFO", scan=scan) asset_group = AssetGroup.objects.filter(name=asset_groupname).first() if asset_group is None: assetgroup_args = { 'name': asset_groupname, 'criticity': criticity, 'description': "AssetGroup dynamically created by policy", 'owner': owner } asset_group = AssetGroup(**assetgroup_args) asset_group.save() Event.objects.create(message="{} Add {} in group {}".format( evt_prefix, asset, asset_groupname), type="DEBUG", severity="INFO", scan=scan) # Add the asset to the group asset_group.assets.add(asset) asset_group.save() # Caculate the risk grade asset_group.calc_risk_grade() asset_group.save() return asset
def importfindings_task(self, report_filename, owner_id, engine, min_level): Event.objects.create( message= "[EngineTasks/importfindings_task/{}] Task started with engine {}.". format(self.request.id, engine), type="INFO", severity="INFO") level_to_value = { 'info': 0, 'low': 1, 'medium': 2, 'high': 3, 'critical': 4 } value_to_level = {v: k for k, v in level_to_value.iteritems()} min_level = level_to_value.get(min_level, 0) if engine == 'nessus': summary = { "info": 0, "low": 0, "medium": 0, "high": 0, "critical": 0, "missing": 0, "new": 0, "total": 0 } Event.objects.create( message='[EngineTasks/importfindings_task()] engine: nessus', type="INFO", severity="INFO") try: import cElementTree as ET except ImportError: try: # Python 2.5 need to import a different module import xml.etree.cElementTree as ET except ImportError: Event.objects.create( message= "[EngineTasks/importfindings_task()] Unable to import xml parser.", type="ERROR", severity="ERROR") return False # parse nessus file data = list() try: dom = ET.parse(open(report_filename, "r")) root = dom.getroot() except Exception as e: Event.objects.create( message= "[EngineTasks/importfindings_task()] Unable to open and parse report file.", description="{}".format(e.message), type="ERROR", severity="ERROR") return False try: for block in root: if block.tag == 'Report': for report_host in block: asset = dict() asset['name'] = report_host.attrib['name'] for report_item in report_host: if report_item.tag == 'HostProperties': for tag in report_item: asset[tag.attrib['name']] = tag.text if not net.is_valid_ip( asset.get('host-ip', asset.get('name'))): Event.objects.create( message= "[EngineTasks/importfindings_task()] finding not added.", type="DEBUG", severity="INFO", description= "No ip address for asset {} found".format( asset.get('name'))) summary['missing'] += 1 continue if 'pluginName' in report_item.attrib: summary['total'] += 1 finding = { "target": { "addr": [ asset.get('host-ip', asset.get('name')) ] }, "metadata": { "risk": { "cvss_base_score": "0.0" }, "vuln_refs": {}, "links": list(), "tags": list() }, "title": report_item.attrib['pluginName'], "type": "Vuln", "confidence": "3", "severity": "info", "description": "n/a", "solution": "n/a", "raw": None } if int(report_item.attrib['severity'] ) < min_level: # if below min level descard finding summary['missing'] += 1 continue finding['severity'] = value_to_level.get( int(report_item.attrib['severity']), 'info') summary[finding['severity']] += 1 for param in report_item: if param.tag == 'vuln_publication_date': finding['metadata'][ 'vuln_publication_date'] = param.text if param.tag == 'solution': finding['solution'] = param.text if param.tag == 'description': finding['description'] = param.text if param.tag == 'cvss_vector': finding['metadata']['risk'][ 'cvss_vector'] = param.text if param.tag == 'cvss_base_score': finding['metadata']['risk'][ 'cvss_base_score'] = param.text if param.tag == 'cvss_temporal_vector': finding['metadata']['risk'][ 'cvss_temporal_vector'] = param.text if param.tag == 'cvss_temporal_score': finding['metadata']['risk'][ 'cvss_temporal_score'] = param.text if param.tag == 'cvss3_vector': finding['metadata']['risk'][ 'cvss3_vector'] = param.text if param.tag == 'cvss3_base_score': finding['metadata']['risk'][ 'cvss3_base_score'] = param.text if param.tag == 'cvss3_temporal_vector': finding['metadata']['risk'][ 'cvss3_temporal_vector'] = param.text if param.tag == 'cvss3_temporal_score': finding['metadata']['risk'][ 'cvss3_temporal_score'] = param.text if param.tag == 'exploit_available': finding['metadata']['risk'][ 'exploit_available'] = param.text if param.tag == 'exploitability_ease': finding['metadata']['risk'][ 'exploitability_ease'] = param.text if param.tag == 'exploited_by_nessus': finding['metadata']['risk'][ 'exploited_by_nessus'] = param.text if param.tag == 'patch_publication_date': finding['metadata']['risk'][ 'patch_publication_date'] = param.text if param.tag == 'cve': finding['metadata']['vuln_refs'][ 'cve'] = param.text if param.tag == 'bid': finding['metadata']['vuln_refs'][ 'bid'] = param.text if param.tag == 'xref': finding['metadata']['vuln_refs'][ param.text.split(':') [0]] = param.text.split(':')[1] if param.tag == 'see_also': for link in param.text.split('\n'): finding['metadata'][ 'links'].append(link) if param.tag == 'plugin_output': finding['raw'] = param.text data.append(finding) except Exception as e: Event.objects.create( message= "[EngineTasks/importfindings_task()] Error parsing nessus file.", description="{}".format(e.message), type="ERROR", severity="ERROR") return False try: nessus_engine = Engine.objects.filter(name='NESSUS').first() nessus_import_policy = EnginePolicy.objects.filter(id=17).first() scan_definition = ScanDefinition.objects.filter( title='Nessus import').first() if scan_definition is None: scan_definition = ScanDefinition.objects.create( title='Nessus import', scan_type='single', description='Scan definition for nessus imports', engine_type=nessus_engine, engine_policy=nessus_import_policy) scan = Scan.objects.create( title='nessus_' + datetime.date.today().isoformat(), status='finished', summary=summary, engine_type=nessus_engine, engine_policy=nessus_import_policy, owner=User.objects.filter(id=owner_id).first(), scan_definition=scan_definition) scan.save() _import_findings(findings=data, scan=scan) except Exception as e: Event.objects.create( message= "[EngineTasks/importfindings_task()] Error importing findings.", description="{}".format(e.message), type="ERROR", severity="ERROR") return False else: # has to be json with open(report_filename) as data_file: data = json.load(data_file) try: _import_findings(findings=data['issues'], scan=Scan.objects.filter(title='test').first()) except Exception as e: Event.objects.create( message= "[EngineTasks/importfindings_task()] Error importing findings.", description="{}".format(e.message), type="ERROR", severity="ERROR") return False return True
def importfindings_task(self, report_filename, owner_id, engine, min_level): Event.objects.create( message= "[EngineTasks/importfindings_task/{}] Task started with engine {}.". format(self.request.id, engine), type="INFO", severity="INFO") level_to_value = { 'info': 0, 'low': 1, 'medium': 2, 'high': 3, 'critical': 4 } value_to_level = {v: k for k, v in level_to_value.iteritems()} min_level = level_to_value.get(min_level, 0) if engine == 'nessus': summary = { "info": 0, "low": 0, "medium": 0, "high": 0, "critical": 0, "missing": 0, "new": 0, "total": 0 } Event.objects.create( message='[EngineTasks/importfindings_task()] engine: nessus', type="INFO", severity="INFO") try: import cElementTree as ET except ImportError: try: # Python 2.5 need to import a different module import xml.etree.cElementTree as ET except ImportError: Event.objects.create( message= "[EngineTasks/importfindings_task()] Unable to import xml parser.", type="ERROR", severity="ERROR") return False # parse nessus file data = list() try: dom = ET.parse(open(report_filename, "r")) root = dom.getroot() except Exception as e: Event.objects.create( message= "[EngineTasks/importfindings_task()] Unable to open and parse report file.", description="{}".format(e.message), type="ERROR", severity="ERROR") return False try: start = None end = None for block in root: if block.tag == 'Report': for report_host in block: asset = dict() asset['name'] = report_host.attrib['name'] asset['categories'] = list() asset['description'] = None for report_item in report_host: if report_item.tag == 'HostProperties': for tag in report_item: asset[tag.attrib['name']] = tag.text if asset.get('HOST_START') is not None: host_start = datetime.datetime.strptime( asset.get('HOST_START'), '%a %b %d %H:%M:%S %Y') if start is None: start = host_start elif host_start < start: start = host_start if asset.get('HOST_END') is not None: host_end = datetime.datetime.strptime( asset.get('HOST_END'), '%a %b %d %H:%M:%S %Y') if end is None: end = host_end elif host_end > end: end = host_end if asset.get('host-fqdn') is not None: asset['type'] = 'fqdn' asset['value'] = asset.get('host-fqdn') elif asset.get( 'host-ip' ) is not None and net.is_valid_ip( asset.get('host-ip')): asset['type'] = 'ip' asset['value'] = asset.get('host-ip') else: if net.is_valid_ip(asset['name']): asset['type'] = 'ip' asset['value'] = asset['name'] else: asset['type'] = 'domain' asset['value'] = asset['name'] if asset.get('operating-system') is not None: asset['categories'].extend( asset.get('operating-system').split( '\n')) elif asset.get('os') is not None: asset['categories'].append(asset.get('os')) if 'pluginName' in report_item.attrib: summary['total'] += 1 finding = { "target": { "addr": [asset['value']] }, "metadata": { "risk": { "cvss_base_score": "0.0" }, "vuln_refs": {}, "links": list(), "tags": list() }, "title": report_item.attrib['pluginName'], "type": "Vuln", "confidence": "3", "severity": "info", "description": "n/a", "solution": "n/a", "raw": None } if int(report_item.attrib['severity'] ) < min_level: # if below min level descard finding summary['missing'] += 1 continue finding['severity'] = value_to_level.get( int(report_item.attrib['severity']), 'info') summary[finding['severity']] += 1 if report_item.attrib.get( 'port', 0) != 0 and report_item.attrib.get( 'svc_name', 'general') != 'general': finding['metadata']['tags'].append( report_item.attrib.get('svc_name')) if report_item.attrib.get( 'protocol') is not None: finding['metadata']['tags'].append( 'port=' + str(report_item.attrib.get( 'port')) + '/' + report_item.attrib.get('protocol')) else: finding['metadata']['tags'].append( 'port=' + str(report_item.attrib.get('port')) ) for param in report_item: if param.tag == 'vuln_publication_date': finding['metadata'][ 'vuln_publication_date'] = param.text if param.tag == 'solution': finding['solution'] = param.text if param.tag == 'description': finding['description'] = param.text if param.tag == 'cvss_vector': finding['metadata']['risk'][ 'cvss_vector'] = param.text if param.tag == 'cvss_base_score': finding['metadata']['risk'][ 'cvss_base_score'] = param.text if param.tag == 'cvss_temporal_vector': finding['metadata']['risk'][ 'cvss_temporal_vector'] = param.text if param.tag == 'cvss_temporal_score': finding['metadata']['risk'][ 'cvss_temporal_score'] = param.text if param.tag == 'cvss3_vector': finding['metadata']['risk'][ 'cvss3_vector'] = param.text if param.tag == 'cvss3_base_score': finding['metadata']['risk'][ 'cvss3_base_score'] = param.text if param.tag == 'cvss3_temporal_vector': finding['metadata']['risk'][ 'cvss3_temporal_vector'] = param.text if param.tag == 'cvss3_temporal_score': finding['metadata']['risk'][ 'cvss3_temporal_score'] = param.text if param.tag == 'exploit_available': finding['metadata']['risk'][ 'exploit_available'] = param.text if param.tag == 'exploitability_ease': finding['metadata']['risk'][ 'exploitability_ease'] = param.text if param.tag == 'exploited_by_nessus': finding['metadata']['risk'][ 'exploited_by_nessus'] = param.text if param.tag == 'exploited_by_malware': finding['metadata']['risk'][ 'exploited_by_malware'] = param.text if re.match('exploit\_framework\_.*', param.tag): finding['metadata']['risk'][ param.tag] = param.text if param.tag == 'metasploit_name': finding['metadata']['risk'][ 'metasploit_name'] = param.text if param.tag == 'patch_publication_date': finding['metadata']['risk'][ 'patch_publication_date'] = param.text if param.tag == 'cve': finding['metadata']['vuln_refs'][ 'cve'] = param.text if param.tag == 'bid': finding['metadata']['vuln_refs'][ 'bid'] = param.text if param.tag == 'xref': finding['metadata']['vuln_refs'][ param.text.split(':') [0]] = param.text.split(':')[1] if param.tag == 'see_also': for link in param.text.split('\n'): finding['metadata'][ 'links'].append(link) if param.tag == 'plugin_output': finding['raw'] = param.text data.append(finding) except Exception as e: Event.objects.create( message= "[EngineTasks/importfindings_task()] Error parsing nessus file.", description="{}".format(e.message), type="ERROR", severity="ERROR") return False try: nessus_engine = Engine.objects.filter(name='NESSUS').first() nessus_import_policy = EnginePolicy.objects.filter(id=17).first() scan_definition = ScanDefinition.objects.filter( title='Nessus import').first() if scan_definition is None: scan_definition = ScanDefinition.objects.create( title='Nessus import', scan_type='single', description='Scan definition for nessus imports', engine_type=nessus_engine, engine_policy=nessus_import_policy) scan = Scan.objects.create( title='nessus_import_' + datetime.datetime.now().strftime('%Y/%m/%d-%H:%M:%S'), status='started', summary=summary, engine_type=nessus_engine, engine_policy=nessus_import_policy, owner=User.objects.filter(id=owner_id).first(), scan_definition=scan_definition) if start is not None and end is not None: scan.started_at = start scan.finished_at = end scan.save() _import_findings(findings=data, scan=scan) scan.status = 'finished' scan.save() except Exception as e: Event.objects.create( message= "[EngineTasks/importfindings_task()] Error importing findings.", description="{}".format(e.message), type="ERROR", severity="ERROR") return False else: # has to be json with open(report_filename) as data_file: data = json.load(data_file) try: _import_findings(findings=data['issues'], scan=Scan.objects.filter(title='test').first()) except Exception as e: Event.objects.create( message= "[EngineTasks/importfindings_task()] Error importing findings.", description="{}".format(e.message), type="ERROR", severity="ERROR") return False return True