def virustotal_passive(self, indicator, indicator_type): current_time = datetime.datetime.utcnow() scraper = VirusTotalScraper() scraper.run(indicator) passive = scraper.parse_passive() source = "VirusTotal" if passive: # Delete old entries before inserting new ones - not ideal solution but will work for now HostRecord.objects.filter(query_keyword=indicator, resolution_source=source).delete() if indicator_type == "ip": ip_location = geolocate_ip(indicator) HostRecord.objects.bulk_create([ HostRecord(domain_name=record[1], ip_address=indicator, ip_location=ip_location, resolution_date=record[0], resolution_source=source, query_keyword=indicator, query_date=current_time) for record in passive ]) elif indicator_type == "domain": HostRecord.objects.bulk_create([ HostRecord(domain_name=indicator, ip_address=record[1], ip_location=geolocate_ip(record[1]), resolution_date=record[0], resolution_source=source, query_keyword=indicator, query_date=current_time) for record in passive ])
def host_data(self, indicator, request): host_data = [] try: host_records = IndicatorRecord.objects.historical_hosts(indicator, request) # host_count = len(host_records) # LOGGER.warn("host_count for indicator '%s': '%s' ", indicator, host_count) # Set dataset limit if it's too large # if host_count > 1000: # recordsdisplay = host_records.order_by('-created')[:500] # else: #recordsdisplay = host_records # We must lookup the country for each IP address for use in the template. # We do this outside the task because we don't know the IP addresses until the task completes. for record in host_records.iterator(): info = record['info'] record['location'] = geolocate_ip(info['ip']) if (record['info_source'] == "PT"): record['firstseen'] = dateutil.parser.parse(info['firstseen']) record['lastseen'] = dateutil.parser.parse(info['lastseen']) record['info_source'] = record['info_source'] # info = getattr(record, 'info') # record.location = geolocate_ip(info['ip']) host_data.append(record) except Exception as err: LOGGER.error("Historical processing failed for indicator '%s': %s", indicator, str(err)) return host_data
def domain_hosts(domain): try: hosts = resolve_domain(domain) except LookupException as e: logger.error("Error performing domain resolution for domain '%s': %s", domain, e.message) return if type(hosts) == list: record_type = RecordType.HR record_source = RecordSource.DNS for host in hosts: ip_location = geolocate_ip(host) https_cert = lookup_ip_censys_https(host) info = OrderedDict({ "geo_location": ip_location, "https_cert": https_cert, "ip": host, "domain": domain }) try: save_record(record_type, record_source, info, domain) except Exception: logger.exception("Error saving %s (%s) record from %s", record_type.name, record_type.title, record_source.title)
def create_rows(self, record): if record is not None: if (record.info_source =='PDS'): for result in record.info['results']: new_record = { 'domain': result['domain'], 'ip': result['ip'], 'firstseen': dateutil.parser.parse(result['firstseen']), 'lastseen': dateutil.parser.parse(result['lastseen']), 'info_date': record.info_date, 'geo_location': geolocate_ip(result['ip'])['country'], } yield [record.info_date, record.info_source, new_record["domain"], new_record["ip"], new_record["geo_location"], new_record["firstseen"], new_record["lastseen"]] else: new_record = { 'domain': record.info["domain"], 'ip': record.info["ip"], 'geo_location': geolocate_ip(record.info["ip"])['country'], 'firstseen':record.info_date, 'lastseen':'' } yield [record.info_date, record.info_source, new_record["domain"], new_record["ip"], new_record["geo_location"], new_record["firstseen"], new_record["lastseen"]]
def domain_hosts(self, domain): current_time = datetime.datetime.utcnow() hosts = resolve_domain(domain) source = "DNS Query" if hosts: HostRecord.objects.bulk_create([ HostRecord(domain_name=domain, ip_address=host, ip_location=geolocate_ip(host), resolution_date=current_time, resolution_source=source, query_keyword=domain, query_date=current_time) for host in hosts ])
def ip_hosts(self, ip_address): current_time = datetime.datetime.utcnow() scraper = RobtexScraper() hosts = scraper.run(ip_address) ip_location = geolocate_ip(ip_address) if type(hosts) == list: for host in hosts: try: record_entry = IndicatorRecord(record_type="HR", info_source="REX", info_date=current_time, info=OrderedDict({"geo_location": ip_location, "ip": ip_address, "domain": host})) record_entry.save() except Exception as e: print(e)
def ip_hosts(self, ip_address): current_time = datetime.datetime.utcnow() scraper = RobtexScraper() hosts = scraper.run(ip_address) ip_location = geolocate_ip(ip_address), source = "Robtex" if hosts: HostRecord.objects.bulk_create([ HostRecord(domain_name=host, ip_address=ip_address, ip_location=ip_location, resolution_date=current_time, resolution_source=source, query_keyword=ip_address, query_date=current_time) for host in hosts ])
def domain_hosts(self, domain): current_time = datetime.datetime.utcnow() hosts = resolve_domain(domain) if type(hosts) == list: for host in hosts: ip_location = geolocate_ip(host) try: record_entry = IndicatorRecord(record_type="HR", info_source="DNS", info_date=current_time, info=OrderedDict({"geo_location": ip_location, "ip": host, "domain": domain})) record_entry.save() except Exception as e: print(e)
def internet_identity(self, indicator): current_time = datetime.datetime.utcnow() source = "InternetIdentity" scraper = InternetIdentityScraper() passive = scraper.run(indicator) # returns table of data rows [Date, IP, Domain] if passive: # Delete old entries before inserting new ones - not ideal solution but will work for now HostRecord.objects.filter(query_keyword=indicator, resolution_source=source).delete() HostRecord.objects.bulk_create([ HostRecord(domain_name=record[2], ip_address=record[1], ip_location=geolocate_ip(record[1]), resolution_date=record[0], resolution_source=source, query_keyword=indicator, query_date=current_time) for record in passive ])
def domain_hosts(domain): hosts = resolve_domain(domain) if type(hosts) == list: record_type = RecordType.HR record_source = RecordSource.DNS for host in hosts: ip_location = geolocate_ip(host) https_cert = lookup_ip_censys_https(host) info = OrderedDict({"geo_location": ip_location, "https_cert": https_cert, "ip": host, "domain": domain}) try: save_record(record_type, record_source, info) except Exception: logger.exception("Error saving %s (%s) record from %s", record_type.name, record_type.title, record_source.title)
def pds_data(self, indicator, request): pds_data = [] try: pds_records = IndicatorRecord.objects.pds_hosts(indicator, request) # pds_count = len(pds_records) # LOGGER.warn("pds_count for indicator '%s': '%s' ", indicator, pds_count) # We must lookup the country for each IP address for use in the template. # We do this outside the task because we don't know the IP addresses until the task completes. for record in pds_records.iterator(): info = record['info'] # info_source = record.info_source # info = getattr(record, 'info') # resultcount = len(info['results']) # LOGGER.warn("pds_records.info count for indicator & date '%s' on '%s': '%s' ", indicator, # record.info_date, resultcount) # Set dataset limit if it's too large # if resultcount > 1000: # displaylist = info['results'][:500] # else: #displaylist = info['results'] for result in info['results']: # info = getattr(record, 'info') result['location'] = geolocate_ip(result['ip']) result['firstseen'] = dateutil.parser.parse(result['firstseen']) result['lastseen'] = dateutil.parser.parse(result['lastseen']) result['info_source'] = record['info_source'] pds_data.append(result) except Exception as err: LOGGER.error("Historical PDS processing failed for indicator '%s': %s ", indicator, str(err)) return pds_data
def ip_hosts(ip_address): scraper = RobtexScraper() hosts = scraper.run(ip_address) ip_location = geolocate_ip(ip_address) https_cert = lookup_ip_censys_https(ip_address) if type(hosts) == list: record_type = RecordType.HR record_source = RecordSource.REX for host in hosts: try: info = OrderedDict({"geo_location": ip_location, "https_cert": https_cert, "ip": ip_address, "domain": host}) save_record(record_type, record_source, info) except Exception: logger.exception("Error saving %s (%s) record from %s", record_type.name, record_type.title, record_source.title)
def pto_data(self, indicator, request): pto_data = [] try: pto_records = IndicatorRecord.objects.pto_hosts(indicator, request) # pto_count = len(pto_records) # LOGGER.warn("pto_count for indicator '%s': '%s' ", indicator, pto_count) # We must lookup the country for each IP address for use in the template. # We do this outside the task because we don't know the IP addresses until the task completes. for record in pto_records.iterator(): info = record['info'] record['location'] = geolocate_ip(info['ip']) # info = getattr(record, 'info') # record.location = geolocate_ip(info['ip']) # record['firstseen'] = dateutil.parser.parse(info['firstseen']) # record['lastseen'] = dateutil.parser.parse(info['lastseen']) pto_data.append(record) except Exception as err: LOGGER.error("Historical PassiveTotal processing failed for indicator '%s': %s", indicator, str(err)) return pto_data
def post(self, request): task = request.POST['task_id'] res = GroupResult.restore(task) if res and not res.ready(): return HttpResponse(json.dumps({"status": "loading"}), content_type="application/json") # Task completion allows for origin information to be pulled try: task_origin = TaskTracker.objects.get(group_id=task) record_type = task_origin.type indicator = task_origin.keyword except MultipleObjectsReturned: task_origin = TaskTracker.objects.filter( group_id=task).latest('date') record_type = task_origin.type indicator = task_origin.keyword except ObjectDoesNotExist: record_type = None indicator = None # Pull data according to the record type if record_type == "Recent": self.template_name = "pivoteer/RecentRecords.html" # Current hosting records host_record = IndicatorRecord.objects.recent_hosts(indicator) # We must lookup the country for each IP address for use in the template. # We do this outside the task because we don't know the IP addresses until the task completes. host_records_complete = [] for record in host_record: info = getattr(record, 'info') record.location = geolocate_ip(info['ip']) host_records_complete.append(record) self.template_vars["current_hosts"] = host_records_complete # Current WHOIS record whois_record = IndicatorRecord.objects.recent_whois(indicator) self.template_vars["current_whois"] = whois_record # Current ThreatCrowd record tc_info = IndicatorRecord.objects.recent_tc(indicator) self.template_vars["tc_info"] = tc_info cert_info = IndicatorRecord.objects.recent_cert(indicator) self.template_vars["cert_info"] = cert_info elif record_type == "Historical": self.template_name = "pivoteer/HistoricalRecords.html" # Historical hosting records host_records = IndicatorRecord.objects.historical_hosts( indicator, request) # We must lookup the country for each IP address for use in the template. # We do this outside the task because we don't know the IP addresses until the task completes. host_records_complete = [] for record in host_records: info = getattr(record, 'info') record.location = geolocate_ip(info['ip']) host_records_complete.append(record) self.template_vars["hosting_records"] = host_records_complete # Historical WHOIS records whois_record = IndicatorRecord.objects.historical_whois(indicator) self.template_vars["historical_whois"] = whois_record elif record_type == "Malware": self.template_name = "pivoteer/MalwareRecords.html" malware_records = IndicatorRecord.objects.malware_records( indicator) self.template_vars["malware_records"] = malware_records self.template_vars["origin"] = indicator elif record_type == "SafeBrowsing": safebrowsing_records = IndicatorRecord.objects.safebrowsing_record( indicator) self.template_name = "pivoteer/Google.html" self.template_vars["records"] = safebrowsing_records self.template_vars[ "google_url"] = settings.GOOGLE_SAFEBROWSING_URL + indicator self.template_vars["origin"] = indicator elif record_type == "Search": self.template_name = "pivoteer/SearchRecords.html" search_records = IndicatorRecord.objects.get_search_records( indicator) self.template_vars["search_records"] = search_records elif record_type == "External": self.template_name = "pivoteer/ExternalRecords.html" self.template_vars['indicator'] = indicator self.template_vars['type'] = discover_type(indicator) return render(request, self.template_name, self.template_vars)
def run(self, **kwargs): start_timestamp = datetime.datetime.utcnow() minute_timestamp = start_timestamp.strftime('%Y-%m-%d %H:%M') current_time = datetime.datetime.strptime(minute_timestamp, '%Y-%m-%d %H:%M') desired_time = current_time + datetime.timedelta(minutes=1) # Check for overdue domain monitors overdue_domains = DomainMonitor.objects.filter( next_lookup__lt=current_time) for overdue_domain in overdue_domains: overdue_domain.next_lookup = current_time + datetime.timedelta( minutes=5) overdue_domain.save() # Check for overdue IP address monitors overdue_ips = IpMonitor.objects.filter(next_lookup__lt=current_time) for overdue_ip in overdue_ips: overdue_ip.next_lookup = current_time + datetime.timedelta( minutes=5) overdue_ip.save() # Compile list of domains to resolve based on lookup time domain_lookups = DomainMonitor.objects.filter( next_lookup__gte=current_time, next_lookup__lte=desired_time) # Compile list of IP addresses to resolve based on lookup time ip_lookups = IpMonitor.objects.filter(next_lookup__gte=current_time, next_lookup__lte=desired_time) # Lookup domain resolutions for domain_lookup in domain_lookups: owner = domain_lookup.owner last_hosts = domain_lookup.last_hosts domain_resolutions = resolve_domain(domain_lookup.domain_name) if type(domain_resolutions) == list: for host in domain_resolutions: ip_location = geolocate_ip(host) try: record_entry = IndicatorRecord( record_type="HR", info_source="DNS", info_date=current_time, info={ "geo_location": ip_location, "ip": host, "domain": domain_lookup.domain_name }) record_entry.save() except: pass if domain_resolutions and last_hosts: # Check for new or missing hosts since last lookup missing_hosts = list( set(last_hosts).difference(domain_resolutions)) new_hosts = list( set(domain_resolutions).difference(last_hosts)) # Sanitize domain name for safe email content sanitized_domain = domain_lookup.domain_name.replace( '.', '[.]') email_recipient = [owner.email] # Compose alert and email content for hosting changes if missing_hosts and new_hosts: sanitized_missing = [ host.replace('.', '[.]') for host in missing_hosts ] sanitized_new = [ host.replace('.', '[.]') for host in new_hosts ] alert_text = 'Removed hosts: %s' % ', '.join( missing_hosts) self.create_alert(domain_lookup.domain_name, alert_text, owner) alert_text = 'New hosts: %s' % ', '.join(new_hosts) self.create_alert(domain_lookup.domain_name, alert_text, owner) email_subject = 'IP Address Changes for ' + sanitized_domain email_body = """ DNS lookup performed at %s indicates that the tracked domain %s has dropped the following IP addresses: %s and has added the following IP addresses: %s """ % (str(current_time), sanitized_domain, sanitized_missing, sanitized_new) deliver_email.delay(email_subject, email_body, email_recipient) elif missing_hosts: sanitized_missing = [ host.replace('.', '[.]') for host in missing_hosts ] alert_text = 'Removed hosts: %s' % ', '.join( missing_hosts) self.create_alert(domain_lookup.domain_name, alert_text, owner) email_subject = 'IP Address Drops for ' + sanitized_domain email_body = """ DNS lookup performed at %s indicates that the tracked domain %s has dropped the following IP addresses: %s """ % (str(current_time), sanitized_domain, sanitized_missing) deliver_email.delay(email_subject, email_body, email_recipient) elif new_hosts: sanitized_new = [ host.replace('.', '[.]') for host in new_hosts ] alert_text = 'New hosts: %s' % ', '.join(new_hosts) self.create_alert(domain_lookup.domain_name, alert_text, owner) email_subject = 'IP Address Additions for ' + sanitized_domain email_body = """ DNS lookup performed at %s indicates that the tracked domain %s has changed to the following IP addresses: %s """ % (str(current_time), sanitized_domain, sanitized_new) deliver_email.delay(email_subject, email_body, email_recipient) else: alert_text = domain_resolutions self.create_alert(domain_lookup.domain_name, alert_text, owner) # Update entry information domain_lookup.last_hosts = domain_resolutions domain_lookup.next_lookup = current_time + datetime.timedelta( hours=domain_lookup.lookup_interval) domain_lookup.save() # Lookup IP address resolutions scraper = RobtexScraper() for ip_lookup in ip_lookups: owner = ip_lookup.owner last_hosts = ip_lookup.last_hosts ip_resolutions = scraper.run(ip_lookup.ip_address) ip_location = geolocate_ip(ip_lookup.ip_address) if type(ip_resolutions) == list: for host in ip_resolutions: try: record_entry = IndicatorRecord( record_type="HR", info_source="REX", info_date=current_time, info={ "geo_location": ip_location, "ip": ip_lookup.ip_address, "domain": host }) record_entry.save() except: pass if ip_resolutions and last_hosts: # Check for new or missing hosts since last lookup missing_hosts = list( set(last_hosts).difference(ip_resolutions)) new_hosts = list( set(ip_resolutions).difference(last_hosts)) # Sanitize ip address for safe email content sanitized_ip = ip_lookup.ip_address.replace('.', '[.]') email_recipient = [owner.email] # Compose alert and email content for hosting changes if missing_hosts and new_hosts: sanitized_missing = [ host.replace('.', '[.]') for host in missing_hosts ] sanitized_new = [ host.replace('.', '[.]') for host in new_hosts ] alert_text = 'Removed hosts: %s' % ', '.join( missing_hosts) self.create_alert(ip_lookup.ip_address, alert_text, owner) alert_text = 'New hosts: %s' % ', '.join(new_hosts) self.create_alert(ip_lookup.ip_address, alert_text, owner) email_subject = 'Domain Changes for ' + sanitized_ip email_body = """ IP lookup performed at %s indicates that the tracked IP address %s has dropped the following domains: %s and has added the following domains: %s """ % (str(current_time), sanitized_ip, sanitized_missing, sanitized_new) deliver_email.delay(email_subject, email_body, email_recipient) elif missing_hosts: sanitized_missing = [ host.replace('.', '[.]') for host in missing_hosts ] alert_text = 'Removed hosts: %s' % ', '.join( missing_hosts) self.create_alert(ip_lookup.ip_address, alert_text, owner) email_subject = 'Domain Drops for ' + sanitized_ip email_body = """ IP lookup performed at %s indicates that the tracked IP address %s has dropped the following domains: %s """ % (str(current_time), sanitized_ip, sanitized_missing) deliver_email.delay(email_subject, email_body, email_recipient) elif new_hosts: sanitized_new = [ host.replace('.', '[.]') for host in new_hosts ] alert_text = 'New hosts: %s' % ', '.join(new_hosts) self.create_alert(ip_lookup.ip_address, alert_text, owner) email_subject = 'Domain Additions for ' + sanitized_ip email_body = """ IP lookup performed at %s indicates that the tracked IP address %s has added the following domains: %s """ % (str(current_time), sanitized_ip, sanitized_new) deliver_email.delay(email_subject, email_body, email_recipient) else: alert_text = ip_resolutions self.create_alert(ip_lookup.ip_address, alert_text, owner) # Update entry information ip_lookup.last_hosts = ip_resolutions ip_lookup.next_lookup = current_time + datetime.timedelta( hours=ip_lookup.lookup_interval) ip_lookup.save()
def run(self, **kwargs): start_timestamp = datetime.datetime.utcnow() minute_timestamp = start_timestamp.strftime('%Y-%m-%d %H:%M') current_time = datetime.datetime.strptime(minute_timestamp, '%Y-%m-%d %H:%M') desired_time = current_time + datetime.timedelta(minutes=1) # Check for overdue domain monitors overdue_domains = DomainMonitor.objects.filter(next_lookup__lt=current_time) for overdue_domain in overdue_domains: overdue_domain.next_lookup = current_time + datetime.timedelta(minutes=5) overdue_domain.save() # Check for overdue IP address monitors overdue_ips = IpMonitor.objects.filter(next_lookup__lt=current_time) for overdue_ip in overdue_ips: overdue_ip.next_lookup = current_time + datetime.timedelta(minutes=5) overdue_ip.save() # Compile list of domains to resolve based on lookup time domain_lookups = DomainMonitor.objects.filter(next_lookup__gte=current_time, next_lookup__lte=desired_time) # Compile list of IP addresses to resolve based on lookup time ip_lookups = IpMonitor.objects.filter(next_lookup__gte=current_time, next_lookup__lte=desired_time) # Lookup domain resolutions for domain_lookup in domain_lookups: owner = domain_lookup.owner last_hosts = domain_lookup.last_hosts domain_resolutions = resolve_domain(domain_lookup.domain_name) if type(domain_resolutions) == list: for host in domain_resolutions: ip_location = geolocate_ip(host) try: record_entry = IndicatorRecord(record_type="HR", info_source="DNS", info_date=current_time, info={"geo_location": ip_location, "ip": host, "domain": domain_lookup.domain_name}) record_entry.save() except: pass if domain_resolutions and last_hosts: # Check for new or missing hosts since last lookup missing_hosts = list(set(last_hosts).difference(domain_resolutions)) new_hosts = list(set(domain_resolutions).difference(last_hosts)) # Sanitize domain name for safe email content sanitized_domain = domain_lookup.domain_name.replace('.', '[.]') email_recipient = [owner.email] # Compose alert and email content for hosting changes if missing_hosts and new_hosts: sanitized_missing = [host.replace('.', '[.]') for host in missing_hosts] sanitized_new = [host.replace('.', '[.]') for host in new_hosts] alert_text = 'Removed hosts: %s' % ', '.join(missing_hosts) self.create_alert(domain_lookup.domain_name, alert_text, owner) alert_text = 'New hosts: %s' % ', '.join(new_hosts) self.create_alert(domain_lookup.domain_name, alert_text, owner) email_subject = 'IP Address Changes for ' + sanitized_domain email_body = """ DNS lookup performed at %s indicates that the tracked domain %s has dropped the following IP addresses: %s and has added the following IP addresses: %s """ % (str(current_time), sanitized_domain, sanitized_missing, sanitized_new) deliver_email.delay(email_subject, email_body, email_recipient) elif missing_hosts: sanitized_missing = [host.replace('.', '[.]') for host in missing_hosts] alert_text = 'Removed hosts: %s' % ', '.join(missing_hosts) self.create_alert(domain_lookup.domain_name, alert_text, owner) email_subject = 'IP Address Drops for ' + sanitized_domain email_body = """ DNS lookup performed at %s indicates that the tracked domain %s has dropped the following IP addresses: %s """ % (str(current_time), sanitized_domain, sanitized_missing) deliver_email.delay(email_subject, email_body, email_recipient) elif new_hosts: sanitized_new = [host.replace('.', '[.]') for host in new_hosts] alert_text = 'New hosts: %s' % ', '.join(new_hosts) self.create_alert(domain_lookup.domain_name, alert_text, owner) email_subject = 'IP Address Additions for ' + sanitized_domain email_body = """ DNS lookup performed at %s indicates that the tracked domain %s has changed to the following IP addresses: %s """ % (str(current_time), sanitized_domain, sanitized_new) deliver_email.delay(email_subject, email_body, email_recipient) else: alert_text = domain_resolutions self.create_alert(domain_lookup.domain_name, alert_text, owner) # Update entry information domain_lookup.last_hosts = domain_resolutions domain_lookup.next_lookup = current_time + datetime.timedelta(hours=domain_lookup.lookup_interval) domain_lookup.save() # Lookup IP address resolutions scraper = RobtexScraper() for ip_lookup in ip_lookups: owner = ip_lookup.owner last_hosts = ip_lookup.last_hosts ip_resolutions = scraper.run(ip_lookup.ip_address) ip_location = geolocate_ip(ip_lookup.ip_address) if type(ip_resolutions) == list: for host in ip_resolutions: try: record_entry = IndicatorRecord(record_type="HR", info_source="REX", info_date=current_time, info={"geo_location": ip_location, "ip": ip_lookup.ip_address, "domain": host}) record_entry.save() except: pass if ip_resolutions and last_hosts: # Check for new or missing hosts since last lookup missing_hosts = list(set(last_hosts).difference(ip_resolutions)) new_hosts = list(set(ip_resolutions).difference(last_hosts)) # Sanitize ip address for safe email content sanitized_ip = ip_lookup.ip_address.replace('.', '[.]') email_recipient = [owner.email] # Compose alert and email content for hosting changes if missing_hosts and new_hosts: sanitized_missing = [host.replace('.', '[.]') for host in missing_hosts] sanitized_new = [host.replace('.', '[.]') for host in new_hosts] alert_text = 'Removed hosts: %s' % ', '.join(missing_hosts) self.create_alert(ip_lookup.ip_address, alert_text, owner) alert_text = 'New hosts: %s' % ', '.join(new_hosts) self.create_alert(ip_lookup.ip_address, alert_text, owner) email_subject = 'Domain Changes for ' + sanitized_ip email_body = """ IP lookup performed at %s indicates that the tracked IP address %s has dropped the following domains: %s and has added the following domains: %s """ % (str(current_time), sanitized_ip, sanitized_missing, sanitized_new) deliver_email.delay(email_subject, email_body, email_recipient) elif missing_hosts: sanitized_missing = [host.replace('.', '[.]') for host in missing_hosts] alert_text = 'Removed hosts: %s' % ', '.join(missing_hosts) self.create_alert(ip_lookup.ip_address, alert_text, owner) email_subject = 'Domain Drops for ' + sanitized_ip email_body = """ IP lookup performed at %s indicates that the tracked IP address %s has dropped the following domains: %s """ % (str(current_time), sanitized_ip, sanitized_missing) deliver_email.delay(email_subject, email_body, email_recipient) elif new_hosts: sanitized_new = [host.replace('.', '[.]') for host in new_hosts] alert_text = 'New hosts: %s' % ', '.join(new_hosts) self.create_alert(ip_lookup.ip_address, alert_text, owner) email_subject = 'Domain Additions for ' + sanitized_ip email_body = """ IP lookup performed at %s indicates that the tracked IP address %s has added the following domains: %s """ % (str(current_time), sanitized_ip, sanitized_new) deliver_email.delay(email_subject, email_body, email_recipient) else: alert_text = ip_resolutions self.create_alert(ip_lookup.ip_address, alert_text, owner) # Update entry information ip_lookup.last_hosts = ip_resolutions ip_lookup.next_lookup = current_time + datetime.timedelta(hours=ip_lookup.lookup_interval) ip_lookup.save()
def post(self, request): task = request.POST['task_id'] res = GroupResult.restore(task) if res and not res.ready(): return HttpResponse(json.dumps({"status": "loading"}), content_type="application/json") # Task completion allows for origin information to be pulled try: task_origin = TaskTracker.objects.get(group_id=task) record_type = task_origin.type indicator = task_origin.keyword except MultipleObjectsReturned: task_origin = TaskTracker.objects.filter(group_id=task).latest('date') record_type = task_origin.type indicator = task_origin.keyword except ObjectDoesNotExist: record_type = None indicator = None # Pull data according to the record type if record_type == "Recent": self.template_name = "pivoteer/RecentRecords.html" # Current hosting records host_record = IndicatorRecord.objects.recent_hosts(indicator) # We must lookup the country for each IP address for use in the template. # We do this outside the task because we don't know the IP addresses until the task completes. host_records_complete = [] for record in host_record: info = getattr(record, 'info') record.location = geolocate_ip(info['ip']) host_records_complete.append(record) self.template_vars["current_hosts"] = host_records_complete # Current WHOIS record whois_record = IndicatorRecord.objects.recent_whois(indicator) self.template_vars["current_whois"] = whois_record # Current ThreatCrowd record tc_info = IndicatorRecord.objects.recent_tc(indicator) self.template_vars["tc_info"] = tc_info cert_info = IndicatorRecord.objects.recent_cert(indicator) self.template_vars["cert_info"] = cert_info elif record_type == "Historical": self.template_name = "pivoteer/HistoricalRecords.html" # Historical hosting records host_records = IndicatorRecord.objects.historical_hosts(indicator, request) # We must lookup the country for each IP address for use in the template. # We do this outside the task because we don't know the IP addresses until the task completes. host_records_complete = [] for record in host_records: info = getattr(record, 'info') record.location = geolocate_ip(info['ip']) host_records_complete.append(record) self.template_vars["hosting_records"] = host_records_complete # Historical WHOIS records whois_record = IndicatorRecord.objects.historical_whois(indicator) self.template_vars["historical_whois"] = whois_record elif record_type == "Malware": self.template_name = "pivoteer/MalwareRecords.html" malware_records = IndicatorRecord.objects.malware_records(indicator) self.template_vars["malware_records"] = malware_records self.template_vars["origin"] = indicator elif record_type == "SafeBrowsing": safebrowsing_records = IndicatorRecord.objects.safebrowsing_record(indicator) self.template_name = "pivoteer/Google.html" self.template_vars["records"] = safebrowsing_records self.template_vars["google_url"] = settings.GOOGLE_SAFEBROWSING_URL + indicator self.template_vars["origin"] = indicator elif record_type == "Search": self.template_name = "pivoteer/SearchRecords.html" search_records = IndicatorRecord.objects.get_search_records(indicator) self.template_vars["search_records"] = search_records elif record_type == "External": self.template_name = "pivoteer/ExternalRecords.html" self.template_vars['indicator'] = indicator self.template_vars['type'] = discover_type(indicator) return render(request, self.template_name, self.template_vars)
def passivetotal_resolutions(self, indicator, indicator_type): current_time = datetime.datetime.utcnow() collector = PassiveTotal(settings.PASSIVE_TOTAL_API) response = collector.search(indicator) source = "PassiveTotal" try: # Attempt to gather resolutions from query resolutions = response['results']['resolutions'] except Exception as unexpected_error: print(unexpected_error) resolutions = None if resolutions: cleaned = [] # Cleanup and de-duplicate resolution results for entry in resolutions: if entry['firstSeen'] != 'None': if indicator_type == "domain": cleaned.append((entry['value'], entry['firstSeen'], entry['country'])) else: cleaned.append((entry['value'], entry['firstSeen'])) if entry['lastSeen'] != 'None' and entry['lastSeen'] != entry['firstSeen']: if indicator_type == "domain": cleaned.append((entry['value'], entry['lastSeen'], entry['country'])) else: cleaned.append((entry['value'], entry['lastSeen'])) # Delete old entries before inserting new ones - not ideal solution but will work for now HostRecord.objects.filter(query_keyword=indicator, resolution_source=source).delete() if indicator_type == "ip": ip_location = geolocate_ip(indicator) HostRecord.objects.bulk_create([ HostRecord(domain_name=entry[0], ip_address=indicator, ip_location=ip_location, resolution_date=entry[1], resolution_source=source, query_keyword=indicator, query_date=current_time) for entry in cleaned ]) elif indicator_type == "domain": HostRecord.objects.bulk_create([ HostRecord(domain_name=indicator, ip_address=entry[0], ip_location=[entry[2]], resolution_date=entry[1], resolution_source=source, query_keyword=indicator, query_date=current_time) for entry in cleaned ])