def create_IOCs(self, save=True): if not self.ioc_set.all(): key_url = AnalysisSession.INFO_ATTRIBUTES[self.analysis_session.type_file]['url'] if key_url in self.attributes_obj: url = self.attributes_obj[key_url] else: return None, None ioc_domain = None ioc_ip = None try: d_type, domain = get_data_from_url(url) if not domain: raise Exception("Domain value cannot be None") else: ioc_domain = IOC.objects.create_IOC_from_weblog(domain, d_type, self,save) except Exception as ex: logger.error("Error creating domain IOC , weblog-id " + str(self.id) + " | " + str(ex)) try: ip = self.ip if not ip: raise Exception("IP value cannot be None") else: ioc_ip = IOC.objects.create_IOC_from_weblog(ip, 'ip', self, save) except: logger.error("Error creating IP IOC , weblog-id " + str(self.id)+ " | " + str(ex)) return ioc_domain, ioc_ip
def migrate_weblogs(apps, schema_editor): with transaction.atomic(): with connection.cursor() as cursor: cursor.execute("SELECT id,type_file FROM manati_analysis_sessions") analysis_sessions = dictfetchall(cursor) for analysis_session in analysis_sessions: type_file = analysis_session['type_file'] if type_file == '': logger.error("ERROR TYPE_FILE EMPTY AS_ID " + str(analysis_session['id'])) continue weblogs = get_all_weblogs( analysis_session_id=analysis_session['id']) key_url = AnalysisSession.INFO_ATTRIBUTES[type_file]['url'] key_ip = AnalysisSession.INFO_ATTRIBUTES[type_file]['ip_dist'] for weblog in weblogs: weblog_id = weblog['id'] attributes = json.loads(weblog['attributes']) try: url = attributes[key_url] ip = attributes[key_ip] d_type, domain = get_data_from_url(url) except Exception as ex: logger.error("Error processing weblog id " + weblog_id + " | " + str(ex)) continue ioc_id1 = check_and_create_ioc(domain, d_type) ioc_id2 = check_and_create_ioc(ip, 'ip') create_relation_weblog_ioc(weblog_id, ioc_id1, ioc_id2)
def domain(self): if self.analysis_session.type_file == '': self.analysis_session.type_file = AnalysisSession.TYPE_FILES.cisco_file self.analysis_session.save() key_url = AnalysisSession.INFO_ATTRIBUTES[self.analysis_session.type_file]['url'] url = self.attributes_obj[key_url] d_type, domain = get_data_from_url(url) return domain
def get_domain_name(): pattern = r'^.*Domain Name:.*$' indices = [i for i, x in enumerate(raw) if re.search(pattern, x)] fields = str(raw[indices[0]]).split(':') if len(indices) > 0 else [] domain_name = fields[1].strip() if len(fields) > 0 else '' if not domain_name or domain_name == '': _, domain_name = get_data_from_url(self.query_node) domain_name = domain_name if domain_name else '' return domain_name
def get_features_info(content_object, url_or_ip): query_type, query_node = get_data_from_url(url_or_ip) if not query_node: return {} elif query_type == 'domain': return WhoisConsult.get_features_info_by_domain(content_object, query_node) elif query_type == 'ip': # TO-DO IP version return {} else: pass
def get_features_info_by_set_url(content_object, urls_or_ips): query_ips = [] query_domains = [] result = {} for url_or_ip in urls_or_ips: query_type, query_node = get_data_from_url(url_or_ip) if query_type == 'ip': query_ips.append(query_node) elif query_type == 'domain': query_domains.append(query_node) result[query_node] = {} with transaction.atomic(): # domain whois_objs = WhoisConsult.objects.filter(query_node__in=query_domains, query_type='domain') query_node_created = [] for whois_obj in whois_objs: result[whois_obj.query_node] = whois_obj.check_features_info() query_node_created.append(whois_obj.query_node) whois_objs = [] for query_node in query_domains: if not query_node in query_node_created: whois_objs.append(WhoisConsult(query_node=query_node, query_type='domain', content_object=content_object)) WhoisConsult.objects.bulk_create(whois_objs) for whois_obj in whois_objs: result[whois_obj.query_node] = whois_obj.check_features_info() # bulk_update(whois_objs) # ip TO-DO by IP # whois_objs_ip = WhoisConsult.objects.filter(query_node__in=query_domains, query_type='ip') for query_node in query_ips: result[query_node] = {} return result