def get_retro_hunt_task_current_date(task_uuid): last = get_retro_hunt_last_analyzed(task_uuid) if last: curr_date = item_basic.get_item_date(last) else: curr_date = get_retro_hunt_task_date_from(task_uuid) return curr_date
def remove_tracked_item(item_id): item_date = item_basic.get_item_date(item_id) for tracker_uuid in get_item_all_trackers_uuid(item_id): r_serv_tracker.srem(f'obj:trackers:item:{item_id}', tracker_uuid) res = r_serv_tracker.srem(f'tracker:item:{tracker_uuid}:{item_date}', item_id) if res: r_serv_tracker.zincrby('tracker:stat:{}'.format(tracker_uuid), int(item_date), -1)
def process_json_meta(self, process, item_id): ''' Process JSON meta filed. ''' twitter_id = str(self.json_item['meta']['twitter:tweet_id']) item_basic.add_map_obj_id_item_id(twitter_id, item_id, 'twitter_id') username = str(self.json_item['meta']['twitter:id']) item_date = item_basic.get_item_date(item_id) Username.save_item_correlation('twitter', username, item_id, item_date) return None
def add_tracked_item(tracker_uuid, item_id): item_date = item_basic.get_item_date(item_id) # track item r_serv_tracker.sadd(f'obj:trackers:item:{item_id}', tracker_uuid) res = r_serv_tracker.sadd(f'tracker:item:{tracker_uuid}:{item_date}', item_id) # track nb item by date if res == 1: nb_items = r_serv_tracker.zincrby('tracker:stat:{}'.format(tracker_uuid), int(item_date), 1) if nb_items == 1: update_tracker_daterange(tracker_uuid, item_date)
def yara_rules_match(data): #print(data) tracker_uuid = data['namespace'] item_date = item_basic.get_item_date(item_id) Tracker.add_tracked_item(tracker_uuid, item_id, item_date) # Tags tags_to_add = Tracker.get_tracker_tags(tracker_uuid) for tag in tags_to_add: msg = '{};{}'.format(tag, item_id) p.populate_set_out(msg, 'Tags') # Mails mail_to_notify = Tracker.get_tracker_mails(tracker_uuid) if mail_to_notify: mail_subject = Tracker.get_email_subject(tracker_uuid) mail_body = mail_body_template.format(data['rule'], item_id, full_item_url, item_id) for mail in mail_to_notify: NotificationHelper.sendEmailNotification(mail, mail_subject, mail_body) return yara.CALLBACK_CONTINUE
def delete_obj_relationship(self, subtype, obj_id, obj2_type, obj2_id): if obj2_type == 'domain': self.delete_domain_correlation(obj2_id, subtype, obj_id) elif obj2_type == 'item': self.delete_item_correlation(subtype, obj_id, obj2_id, item_basic.get_item_date(obj2_id))
def get_date(self, separator=False): """ Returns Item date """ return item_basic.get_item_date(self.id, add_separator=separator)
def get_item_date(item_id, add_separator=False): return item_basic.get_item_date(item_id, add_separator=add_separator)
def main(): publisher.port = 6380 publisher.channel = "Script" config_section = 'DomClassifier' p = Process(config_section) addr_dns = p.config.get("DomClassifier", "dns") publisher.info("""ZMQ DomainClassifier is Running""") c = DomainClassifier.domainclassifier.Extract(rawtext="", nameservers=[addr_dns]) cc = p.config.get("DomClassifier", "cc") cc_tld = p.config.get("DomClassifier", "cc_tld") while True: try: item_id = p.get_from_set() if item_id is None: publisher.debug("Script DomClassifier is idling 1s") time.sleep(1) continue item_content = item_basic.get_item_content(item_id) mimetype = item_basic.get_item_mimetype(item_id) item_basename = item_basic.get_basename(item_id) item_source = item_basic.get_source(item_id) item_date = item_basic.get_item_date(item_id) if mimetype.split('/')[0] == "text": c.text(rawtext=item_content) c.potentialdomain() c.validdomain(passive_dns=True, extended=False) print(c.vdomain) if c.vdomain and d4.is_passive_dns_enabled(): for dns_record in c.vdomain: p.populate_set_out(dns_record) localizeddomains = c.include(expression=cc_tld) if localizeddomains: print(localizeddomains) publisher.warning( f"DomainC;{item_source};{item_date};{item_basename};Checked {localizeddomains} located in {cc_tld};{item_id}" ) localizeddomains = c.localizedomain(cc=cc) if localizeddomains: print(localizeddomains) publisher.warning( f"DomainC;{item_source};{item_date};{item_basename};Checked {localizeddomains} located in {cc};{item_id}" ) except IOError: print("CRC Checksum Failed on :", item_id) publisher.error( f"Duplicate;{item_source};{item_date};{item_basename};CRC Checksum Failed" )
def get_obj_date(object_type, object_id): if object_type == "item": return int(item_basic.get_item_date(object_id)) else: return None
def save_retro_hunt_match(task_uuid, id, object_type='item'): item_date = item_basic.get_item_date(id) res = r_serv_tracker.sadd(f'tracker:retro_hunt:task:item:{task_uuid}:{item_date}', id) # track nb item by date if res == 1: r_serv_tracker.zincrby(f'tracker:retro_hunt:task:stat:{task_uuid}', int(item_date), 1)