def feeder(message, count=0): if flag_the_hive or flag_misp: tag, item_id = message.split(';') ## FIXME: remove it if not item_basic.exist_item(item_id): if count < 10: r_serv_db.zincrby('mess_not_saved_export', message, 1) return 0 else: r_serv_db.zrem('mess_not_saved_export', message) print('Error: {} do not exist, tag= {}'.format(item_id, tag)) return 0 source = item_basic.get_source(item_id) if HiveApi != False: if int(r_serv_db.get('hive:auto-alerts')) == 1: if r_serv_db.sismember('whitelist_hive', tag): create_the_hive_alert(source, item_id, tag) else: print('hive, auto alerts creation disable') if flag_misp: if int(r_serv_db.get('misp:auto-events')) == 1: if r_serv_db.sismember('whitelist_misp', tag): misp_wrapper.pushToMISP(uuid_ail, item_id, tag) else: print('misp, auto events creation disable')
def get_retro_hunt_task_nb_src_done(task_uuid, sources=[]): if not sources: sources = list(get_retro_hunt_task_sources(task_uuid, r_sort=True)) else: sources = list(sources) last_id = get_retro_hunt_last_analyzed(task_uuid) if last_id: last_source = item_basic.get_source(last_id) try: nb_src_done = sources.index(last_source) except ValueError: nb_src_done = 0 else: nb_src_done = 0 return nb_src_done
def get_retro_hunt_dir_day_to_analyze(task_uuid, date, filter_last=False, sources=[]): if not sources: sources = get_retro_hunt_task_sources(task_uuid, r_sort=True) # filter last if filter_last: last = get_retro_hunt_last_analyzed(task_uuid) if last: curr_source = item_basic.get_source(last) # remove processed sources set_sources = sources.copy() for source in sources: if source != curr_source: set_sources.remove(source) else: break sources = set_sources # return all dirs by day date = f'{date[0:4]}/{date[4:6]}/{date[6:8]}' dirs = set() for source in sources: dirs.add(os.path.join(source, date)) return dirs
def get_source(item_id): return item_basic.get_source(item_id)
def main(): publisher.port = 6380 publisher.channel = "Script" config_section = 'DomClassifier' p = Process(config_section) addr_dns = p.config.get("DomClassifier", "dns") publisher.info("""ZMQ DomainClassifier is Running""") c = DomainClassifier.domainclassifier.Extract(rawtext="", nameservers=[addr_dns]) cc = p.config.get("DomClassifier", "cc") cc_tld = p.config.get("DomClassifier", "cc_tld") while True: try: item_id = p.get_from_set() if item_id is None: publisher.debug("Script DomClassifier is idling 1s") time.sleep(1) continue item_content = item_basic.get_item_content(item_id) mimetype = item_basic.get_item_mimetype(item_id) item_basename = item_basic.get_basename(item_id) item_source = item_basic.get_source(item_id) item_date = item_basic.get_item_date(item_id) if mimetype.split('/')[0] == "text": c.text(rawtext=item_content) c.potentialdomain() c.validdomain(passive_dns=True, extended=False) print(c.vdomain) if c.vdomain and d4.is_passive_dns_enabled(): for dns_record in c.vdomain: p.populate_set_out(dns_record) localizeddomains = c.include(expression=cc_tld) if localizeddomains: print(localizeddomains) publisher.warning( f"DomainC;{item_source};{item_date};{item_basename};Checked {localizeddomains} located in {cc_tld};{item_id}" ) localizeddomains = c.localizedomain(cc=cc) if localizeddomains: print(localizeddomains) publisher.warning( f"DomainC;{item_source};{item_date};{item_basename};Checked {localizeddomains} located in {cc};{item_id}" ) except IOError: print("CRC Checksum Failed on :", item_id) publisher.error( f"Duplicate;{item_source};{item_date};{item_basename};CRC Checksum Failed" )