def analyze(self, line): line = line.strip() sline = line.split() try: if line[0] != '#' and len( sline ) > 2: #ignore comments and entries with no clear reference if sline[0].isdigit(): del sline[0] #remove the useless first field _hostname = Hostname(hostname=sline[0]) evil = {} evil['source'] = self.name evil['id'] = md5.new(sline[0] + sline[1]).hexdigest() evil['description'] = sline[1] #malware, EK, etc evil['reference'] = sline[ 2] #GG safe browsing, blog, other blacklist, etc... if sline[3]: #add the last date of inclusion in the feed if sline[3] == 'relisted' and sline[4]: evil['date_added'] = datetime.datetime.strptime( sline[4], "%Y%m%d") else: evil['date_added'] = datetime.datetime.strptime( sline[3], "%Y%m%d") _hostname.add_evil(evil) _hostname.seen(first=evil['date_added']) self.commit_to_db(_hostname) except Exception, e: toolbox.debug_output(str(e), type='error')
def content(self): # Check if the session packets are set to 0 (i.e. session packets are not loaded in memory) if not self.dns_requests: # Try to load results from database debug_output("Loading entry from DB") self.dns_requests = self.load_entry() if not self.dns_requests: debug_output("No results in DB, processing PCAP") filename = self.session.pcap_filename self.session.pkts = sniff( stopper=self.session.stop_sniffing, filter=self.session.filter, prn=self.on_packet, stopperTimeout=1, offline=self.session.engine.setup['SNIFFER_DIR'] + "/" + filename) # now that everything has been processed, save the results to DB self.save_entry(self.dns_requests) else: self.dns_requests = self.dns_requests content = "<table class='table table-condensed'><tr><th>Query</th><th>Answers</th><th>Count</th></tr>" for q in self.dns_requests: content += "<tr><td>{}</td><td>{}</td><td>{}</td></tr>".format( q, ", ".join(self.dns_requests[q]['answers']), self.dns_requests[q]['count']) content += "</table>" return content
def __init__(self, analytics_instance): super(AnalyticsMessenger, self).__init__() self.name = 'analytics' self.analytics_instance = analytics_instance self.subscribe_channel('analytics', self.message_handler) #self.status_update() debug_output("[+] Analytics Messenger started")
def analytics(self): debug_output( "(host analytics for %s)" % self.value) new = [] # only resolve A and CNAME records for subdomains if toolbox.is_subdomain(self.value): dns_info = toolbox.dns_get_records(self.value, ['A', 'CNAME']) else: dns_info = toolbox.dns_get_records(self.value) for rtype in dns_info: for entry in dns_info[rtype]: art = toolbox.find_artifacts(entry) for t in art: for findings in art[t]: if t == 'hostnames': new.append((rtype, Hostname(findings))) if t == 'urls': new.append((rtype, Url(findings))) if t == 'ips': new.append((rtype, Ip(findings))) # is _hostname a subdomain ? if len(self.value.split(".")) > 2: domain = toolbox.is_subdomain(self.value) if domain: new.append(('domain', Hostname(domain))) self['last_analysis'] = datetime.datetime.utcnow() self['next_analysis'] = self['last_analysis'] + datetime.timedelta(seconds=self['refresh_period']) return new
def __init__(self): super(SnifferMessenger, self).__init__() self.name = 'sniffer' self.snifferengine = None self.subscribe_channel('sniffer-commands', self.command_handler) self.command_lock = threading.Lock() debug_output("[+] Sniffer Messenger started")
def analytics(self): debug_output("(url analytics for %s)" % self['value']) new = [] #link with hostname # host = toolbox.url_get_host(self['value']) # if host == None: # self['hostname'] = "No hostname" # else: # self['hostname'] = host # find path path, scheme, hostname = toolbox.split_url(self['value']) self['path'] = path self['scheme'] = scheme self['hostname'] = hostname if toolbox.is_ip(self['hostname']): new.append(('host', Ip(toolbox.is_ip(self['hostname'])))) elif toolbox.is_hostname(self['hostname']): new.append(('host', Hostname(toolbox.is_hostname(self['hostname'])))) else: debug_output("No hostname found for %s" % self['value'], type='error') return self['last_analysis'] = datetime.datetime.utcnow() return new
def get_pcap(self): debug_output("Generating PCAP (length: %s)" % len(self.pkts)) if len(self.pkts) == 0: return "" wrpcap("/tmp/temp.cap", self.pkts) pcap = open("/tmp/temp.cap").read() return pcap
def generate_pcap(self): if len (self.pkts) > 0: debug_output("Generating PCAP for %s (length: %s)" % (self.name, len(self.pkts))) filename = Malcom.config['SNIFFER_DIR'] + "/" + self.pcap_filename wrpcap(filename, self.pkts) debug_output("Saving session to DB") self.analytics.data.save_sniffer_session(self)
def send_nodes(self, elts=[], edges=[]): data = {"querya": {}, "nodes": elts, "edges": edges, "type": "nodeupdate"} try: if (len(elts) > 0 or len(edges) > 0) and self.ws: self.ws.send(dumps(data)) except Exception, e: debug_output("Could not send nodes: %s" % e)
def analyze(self, line): line = line.strip() sline = line.split() try: if line[0] != "#" and len(sline) > 2: # ignore comments and entries with no clear reference if sline[0].isdigit(): del sline[0] # remove the useless first field _hostname = Hostname(hostname=sline[0]) evil = {} evil["source"] = self.name evil["id"] = md5.new(sline[0] + sline[1]).hexdigest() evil["description"] = sline[1] # malware, EK, etc evil["reference"] = sline[2] # GG safe browsing, blog, other blacklist, etc... if sline[3]: # add the last date of inclusion in the feed if sline[3] == "relisted" and sline[4]: evil["date_added"] = datetime.datetime.strptime(sline[4], "%Y%m%d") else: evil["date_added"] = datetime.datetime.strptime(sline[3], "%Y%m%d") _hostname.add_evil(evil) _hostname.seen(first=evil["date_added"]) self.commit_to_db(_hostname) except Exception, e: toolbox.debug_output(str(e), type="error")
def __init__(self, feedengine_instance): super(FeedsMessenger, self).__init__() self.name = "feeds" self.feedengine_instance = feedengine_instance debug_output("[+] Feed messenger started") self.subscribe_channel('feeds', self.message_handler)
def subscribe_channel(self, channel, callback): debug_output("[+] Subscribing to %s" % channel) t = threading.Thread(target=self.__listen_on_channel, args=(channel, callback)) t.setDaemon(True) t.start() return t
def analytics(self): debug_output("(url analytics for %s)" % self['value']) new = [] #link with hostname # host = toolbox.url_get_host(self['value']) # if host == None: # self['hostname'] = "No hostname" # else: # self['hostname'] = host # find path path, scheme, hostname = toolbox.split_url(self['value']) self['path'] = path self['scheme'] = scheme self['hostname'] = hostname if toolbox.is_ip(self['hostname']): new.append(('host', Ip(toolbox.is_ip(self['hostname'])))) elif toolbox.is_hostname(self['hostname']): new.append(('host', Hostname(toolbox.is_hostname(self['hostname'])))) else: debug_output("No hostname found for %s" % self['value'], type='error') return [] self['last_analysis'] = datetime.datetime.utcnow() # this information is constant and does not change through time # we'll have to change this when we check for URL availability self['next_analysis'] = None return new
def run_scheduled_feeds(self): for feed_name in [f for f in self.feeds if (self.feeds[f].next_run < datetime.utcnow() and self.feeds[f].enabled)]: debug_output('Starting thread for feed %s...' % feed_name) self.run_feed(feed_name) for t in self.threads: if self.threads[t].is_alive(): self.threads[t].join()
def generate_pcap(self): if len(self.pkts) > 0: debug_output("Generating PCAP for %s (length: %s)" % (self.name, len(self.pkts))) filename = Malcom.config['SNIFFER_DIR'] + "/" + self.pcap_filename wrpcap(filename, self.pkts) debug_output("Saving session to DB") self.analytics.data.save_sniffer_session(self)
def save_pcap(self): if self.packet_count > 0 and not self.pcap: debug_output("Generating PCAP for %s (length: %s)" % (self.name, len(self.pkts))) filename = self.engine.setup[ 'SNIFFER_DIR'] + "/" + self.pcap_filename wrpcap(filename, self.pkts) self.pcap = True
def load_yara_rules(self, path): debug_output("Compiling YARA rules from %s" % path) if path[-1] != '/': path += '/' # add trailing slash if not present filepaths = {} for file in os.listdir(path): filepaths[file] = path + file debug_output("Loaded %s YARA rule files in %s" % (len(filepaths), path)) return yara.compile(filepaths=filepaths)
def load_pcap(self): filename = self.pcap_filename debug_output("Loading PCAP from %s " % filename) self.pkts += self.sniff(stopper=self.stop_sniffing, filter=self.filter, prn=self.handlePacket, stopperTimeout=1, offline=Malcom.config['SNIFFER_DIR']+"/"+filename) debug_output("Loaded %s packets from file." % len(self.pkts)) return True
def broadcast(self, msg, channel, type="bcast"): queryid = str(random.random()) message = json.dumps({'msg': msg, 'queryid': queryid, 'src': self.name, 'type':type}) try: # print "broadcast [%s] : %s" % (channel, type) self.r.publish(channel, message) except Exception, e: debug_output("Could not broadcast: %s" % (e), 'error')
def send_flow_statistics(self, flow): data = {} data['flow'] = flow.get_statistics() data['type'] = 'flow_statistics_update' if self.ws: try: self.ws.send(dumps(data)) except Exception, e: debug_output("Could not send flow statistics: %s" % e)
def run_all_feeds(self): debug_output("Running all feeds") for feed_name in [f for f in self.feeds if self.feeds[f].enabled]: debug_output("Starting thread for feed %s..." % feed_name) self.run_feed(feed_name) for t in self.threads: if self.threads[t].is_alive(): self.threads[t].join()
def run_all_feeds(self): debug_output("Running all feeds") for feed_name in [f for f in self.feeds if self.feeds[f].enabled]: debug_output('Starting thread for feed %s...' % feed_name) self.run_feed(feed_name) for t in self.threads: if self.threads[t].is_alive(): self.threads[t].join()
def load_feeds(self, activated_feeds): globals_, locals_ = globals(), locals() feeds_dir = self.configuration['FEEDS_DIR'] package_name = 'feeds' debug_output("Loading feeds in %s" % feeds_dir) for filename in os.listdir(feeds_dir): export_names = [] export_classes = [] modulename, ext = os.path.splitext(filename) if modulename[0] != "_" and ext in ['.py']: subpackage = 'Malcom.%s.%s' % (package_name, modulename) module = __import__(subpackage, globals_, locals_, [modulename]) modict = module.__dict__ names = [name for name in modict if name[0] != '_'] for n in names: # print n, activated_feeds if n == 'Feed' or n.lower() not in activated_feeds: continue class_n = modict.get(n) if issubclass(class_n, Feed) and class_n not in globals_: new_feed = class_n(n) # create new feed object new_feed.model = self.model # attach model instance to feed self.feeds[n] = new_feed self.feeds[n].enabled = True if n.lower( ) in activated_feeds else False # this may be for show for now export_names.append(n) export_classes.append(class_n) sys.stderr.write(" + Loaded %s...\n" % n) # now that feeds are loaded, check their state in the db feed_status = self.model.get_feed_progress([f for f in self.feeds]) for status in feed_status: name = status['name'] self.feeds[name].last_run = status['last_run'] self.feeds[name].next_run = status['last_run'] + self.feeds[ name].run_every globals_.update( (export_names[i], c) for i, c in enumerate(export_classes)) return export_names, export_classes
def load_yara_rules(self, path): debug_output("Compiling YARA rules from %s" % path) if path[-1] != '/': path += '/' # add trailing slash if not present filepaths = {} for file in os.listdir(path): if file.endswith('.yar'): print file filepaths[file] = path + file debug_output("Loaded %s YARA rule files in %s" % (len(filepaths), path)) return yara.compile(filepaths=filepaths)
def __init__(self): super(WebMessenger, self).__init__() self.name = 'web' debug_output("[+] WebMessenger started") self.subscribe_channel('analytics', self.analytics_handler) self.subscribe_channel('sniffer-data', self.sniffer_data_handler) self.websocket_for_session = {} self.analytics_ws = None
def send_nodes(self, elts=[], edges=[]): for e in elts: e['fields'] = e.default_fields data = {'querya': {}, 'nodes': elts, 'edges': edges, 'type': 'nodeupdate', 'session_name': self.name} try: if (len(elts) > 0 or len(edges) > 0): self.engine.messenger.broadcast(bson_dumps(data), 'sniffer-data', 'nodeupdate') except Exception, e: debug_output("Could not send nodes: {}".format(e), 'error')
def load_pcap(self): filename = self.pcap_filename debug_output("Loading PCAP from %s " % filename) self.sniff(stopper=self.stop_sniffing, filter=self.filter, prn=self.handlePacket, stopperTimeout=1, offline=self.engine.setup['SNIFFER_DIR'] + "/" + filename) debug_output("Loaded %s packets from file." % len(self.pkts)) return True
def load_modules(self): modules_directory = self.engine.setup['MODULES_DIR'] modules = [] module_activated = self.engine.setup['ACTIVATED_MODULES'] for modulename in os.listdir(modules_directory): if '.' not in modulename and modulename in module_activated: full_filename = "{}/{}/{}.py".format(modules_directory, modulename, modulename) debug_output("Loading sniffer module: {}".format(modulename)) module = imp.load_source(modulename, full_filename) modules.append(module.__dict__.get(module.classname)(self)) return modules
def run(self): self.messenger = FeedsMessenger(self) self.shutdown = False while not self.shutdown: try: debug_output("FeedEngine heartbeat") if self.scheduler: self.run_scheduled_feeds() time.sleep(self.period) # run a new thread every period seconds except KeyboardInterrupt: self.shutdown = True
def run(self): self.messenger = FeedsMessenger(self) self.shutdown = False while not self.shutdown: try: debug_output("FeedEngine heartbeat") if self.scheduler: self.run_scheduled_feeds() time.sleep(self.period) # run a new thread every period seconds except KeyboardInterrupt, e: self.shutdown = True
def load_yara_rules(self, path): debug_output("Compiling YARA rules from {}".format(path)) if not path.endswith('/'): path += '/' # add trailing slash if not present filepaths = {} for filename in os.listdir(path): if filename.endswith('.yar'): print filename filepaths[filename] = path + filename debug_output("Loaded {} YARA rule files in {}".format(len(filepaths), path)) return yara.compile(filepaths=filepaths)
def send_nodes(self, elts=[], edges=[]): for e in elts: e['fields'] = e.display_fields data = { 'querya': {}, 'nodes':elts, 'edges': edges, 'type': 'nodeupdate'} try: if (len(elts) > 0 or len(edges) > 0) and self.ws: self.ws.send(dumps(data)) except Exception, e: debug_output("Could not send nodes: %s" % e)
def load_feeds(self, activated_feeds): globals_, locals_ = globals(), locals() feeds_dir = self.configuration['FEEDS_DIR'] package_name = 'feeds' debug_output("Loading feeds in %s" % feeds_dir) for filename in os.listdir(feeds_dir): export_names = [] export_classes = [] modulename, ext = os.path.splitext(filename) if modulename[0] != "_" and ext in ['.py']: subpackage = 'Malcom.%s.%s' % (package_name, modulename) module = __import__(subpackage, globals_, locals_, [modulename]) modict = module.__dict__ names = [name for name in modict if name[0] != '_'] for n in names: # print n, activated_feeds if n == 'Feed' or n.lower() not in activated_feeds: continue class_n = modict.get(n) if issubclass(class_n, Feed) and class_n not in globals_: new_feed = class_n(n) # create new feed object new_feed.model = self.model # attach model instance to feed new_feed.engine = self self.feeds[n] = new_feed self.feeds[n].enabled = True if n.lower() in activated_feeds else False # this may be for show for now export_names.append(n) export_classes.append(class_n) sys.stderr.write(" + Loaded %s...\n" % n) # now that feeds are loaded, check their state in the db feed_status = self.model.get_feed_progress([f for f in self.feeds]) for status in feed_status: name = status['name'] self.feeds[name].last_run = status['last_run'] self.feeds[name].next_run = status['last_run'] + self.feeds[name].run_every globals_.update((export_names[i], c) for i, c in enumerate(export_classes)) return export_names, export_classes
def load_yara_rules(self, path): debug_output("Compiling YARA rules from {}".format(path)) if not path.endswith('/'): path += '/' # add trailing slash if not present filepaths = {} for filename in os.listdir(path): if filename.endswith('.yar'): print filename filepaths[filename] = path + filename debug_output("Loaded {} YARA rule files in {}".format( len(filepaths), path)) return yara.compile(filepaths=filepaths)
def send_pcap(self, pcap_file, directory): command = 'pcap-file' arguments = {} if os.path.isfile(pcap_file) and os.path.isdir(directory): arguments["filename"] = pcap_file arguments["output-dir"] = directory cmdret = self.send_command(command, arguments) if cmdret["return"] == "NOK": debug_output(json.dumps(cmdret["message"], sort_keys=True, indent=4, separators=(',', ': ')), 'error') else: debug_output(json.dumps(cmdret["message"], sort_keys=True, indent=4, separators=(',', ': ')), 'info')
def analytics(self): debug_output( "(ip analytics for %s)" % self['value']) # get geolocation info try: file = os.path.abspath(__file__) datatypes_directory = os.path.dirname(file) gi = pygeoip.GeoIP(datatypes_directory+'/../auxiliary/geoIP/GeoLiteCity.dat') geoinfo = gi.record_by_addr(self.value) for key in geoinfo: self[key] = geoinfo[key] except Exception, e: debug_output( "Could not get IP info for %s: %s" %(self.value, e), 'error')
def analytics(self): debug_output( "(ip analytics for %s)" % self['value']) new = [] # get reverse hostname hostname = toolbox.reverse_dns(self['value']) if hostname: new.append(('reverse', Hostname(hostname))) self['last_analysis'] = datetime.datetime.utcnow() self['next_analysis'] = self['last_analysis'] + datetime.timedelta(seconds=self['refresh_period']) return new
def __init__(self, name, remote_addr, filter, engine, id=None, intercept_tls=False, ws=None, filter_restore=None): self.id = id self.engine = engine self.model = engine.model self.date_created = datetime.datetime.utcnow() self.name = name self.ws = ws self.ifaces = self.engine.setup['IFACES'] filter_ifaces = "" for i in self.ifaces: if self.ifaces[i] == "Not defined": continue filter_ifaces += " and not host %s " % self.ifaces[i] self.filter = "ip and not host 127.0.0.1 and not host %s %s" % ( remote_addr, filter_ifaces) # self.filter = "ip and not host 127.0.0.1 and not host %s" % (remote_addr) if filter != "": self.filter += " and (%s)" % filter self.stopSniffing = False if filter_restore: self.filter = filter_restore self.thread = None self.thread_active = False self.pcap = False self.pcap_filename = "%s-%s.pcap" % ( self.id, self.name) # TODO CHANGE THIS AND MAKE IT SECURE self.pkts = [] self.packet_count = 0 self.nodes = {} self.edges = {} # flows self.flows = {} self.intercept_tls = intercept_tls if self.intercept_tls: debug_output("[+] Intercepting TLS") self.tls_proxy = self.engine.tls_proxy # self.tls_proxy.add_flows(self.flows) else: debug_output("[-] No TLS interception")
def __init__(self, analytics, name, remote_addr, filter, intercept_tls=False, ws=None, filter_restore=None): self.analytics = analytics self.name = name self.ws = ws self.ifaces = Malcom.config['IFACES'] filter_ifaces = "" for i in self.ifaces: filter_ifaces += " and not host %s " % self.ifaces[i] self.filter = "ip and not host 127.0.0.1 and not host %s %s" % ( remote_addr, filter_ifaces) if filter != "": self.filter += " and (%s)" % filter self.stopSniffing = False if filter_restore: self.filter = filter_restore self.thread = None self.public = False self.pcap = False self.pcap_filename = self.name + '.pcap' self.pkts = [] self.packet_count = 0 # nodes, edges, their values, their IDs self.nodes = [] self.edges = [] self.nodes_ids = [] self.nodes_values = [] self.nodes_pk = [] self.edges_ids = [] # flows self.flows = {} self.intercept_tls = intercept_tls if self.intercept_tls: debug_output("[+] Intercepting TLS") self.tls_proxy = Malcom.tls_proxy self.tls_proxy.add_flows(self.flows) else: debug_output("[-] No TLS interception")
def broadcast(self, msg, channel, type="bcast"): queryid = str(random.random()) message = json.dumps({ 'msg': msg, 'queryid': queryid, 'src': self.name, 'type': type }) try: # print "broadcast [%s] : %s" % (channel, type) self.r.publish(channel, message) except Exception, e: debug_output("Could not broadcast: %s" % (e), 'error')
def analytics(self): debug_output( "(ip analytics for %s)" % self['value']) new = [] # get reverse hostname hostname = toolbox.reverse_dns(self['value']) if hostname: if toolbox.is_hostname(hostname): new.append(('reverse', Hostname(hostname))) self['last_analysis'] = datetime.datetime.utcnow() self['next_analysis'] = self['last_analysis'] + datetime.timedelta(seconds=self['refresh_period']) return new
def analytics(self): debug_output("(ip analytics for %s)" % self['value']) # get geolocation info try: file = os.path.abspath(__file__) datatypes_directory = os.path.dirname(file) gi = pygeoip.GeoIP(datatypes_directory + '/../auxiliary/geoIP/GeoLiteCity.dat') geoinfo = gi.record_by_addr(self.value) for key in geoinfo: self[key] = geoinfo[key] except Exception, e: debug_output("Could not get IP info for %s: %s" % (self.value, e), 'error')
def sniffer_data_handler(self, msg): msg = json.loads(msg) # print "webmsgr received", msg queryid = msg['queryid'] src= msg['src'] msg_type = msg.get('type', False) if msg_type == "nodeupdate": data = json.loads(msg['msg']) # data = {nodes, edges, session_name} session_name = data['session_name'] try: send_msg(self.websocket_for_session[session_name], data, type=data['type']) except Exception, e: debug_output('Error sending node udpate: %s' % e, 'error')
def __init__(self, session): self.session = session self.display_name = "Suricata" self.name = "suricata" self.pull_content = 'suricata' super(Suricata, self).__init__() interface, mode, conf_suricata, socket_unix = self.setup() self.result = super(Suricata, self).load_entry() self.reload = False self.actions = Actions(interface=interface, conf_sniffer=conf_suricata, mode=mode, socket_unix=socket_unix) if not self.result: #or self.result['timeout'] < datetime.datetime.utcnow(): self.reload = True if self.reload: debug_output('Suricata Start') self.actions.start() sleep(10)
def send_nodes(self, elts=[], edges=[]): for e in elts: e['fields'] = e.display_fields data = { 'querya': {}, 'nodes': elts, 'edges': edges, 'type': 'nodeupdate' } try: if (len(elts) > 0 or len(edges) > 0) and self.ws: self.ws.send(dumps(data)) except Exception, e: debug_output("Could not send nodes: %s" % e)
def location_info(self): # get geolocation info (v2) if geoip: try: geoinfo = geoip_reader.city(self.value) self['city'] = geoinfo.city.name self['postal_code'] = geoinfo.postal.code self['time_zone'] = geoinfo.location.time_zone self['country_code'] = geoinfo.country.iso_code self['latitude'] = str(geoinfo.location.latitude) self['longitude'] = str(geoinfo.location.longitude) except Exception, e: debug_output( "Could not get IP location info for %s: %s" %(self.value, e), 'error')
def __init__(self, name, remote_addr, filter, engine, id=None, intercept_tls=False, ws=None, filter_restore=None): self.id = id self.engine = engine self.model = engine.model self.date_created = datetime.datetime.utcnow() self.name = name self.ws = ws self.ifaces = self.engine.setup['IFACES'] filter_ifaces = "" for i in self.ifaces: if self.ifaces[i] == "Not defined": continue filter_ifaces += " and not host {} ".format(self.ifaces[i]) self.filter = "ip and not host 127.0.0.1 and not host {} {}".format(remote_addr, filter_ifaces) # self.filter = "ip and not host 127.0.0.1 and not host %s" % (remote_addr) if filter != "": self.filter += " and ({})".format(filter) self.stopSniffing = False if filter_restore: self.filter = filter_restore self.thread = None self.thread_active = False self.pcap = False self.pcap_filename = "{}-{}.pcap".format(self.id, self.name) # TODO CHANGE THIS AND MAKE IT SECURE self.pkts = [] self.packet_count = 0 self.live_analysis = {} self.offline_delay = 0 self.nodes = {} self.edges = {} # flows self.flows = {} self.intercept_tls = intercept_tls if self.intercept_tls: debug_output("[+] Intercepting TLS") self.tls_proxy = self.engine.tls_proxy # self.tls_proxy.add_flows(self.flows) else: debug_output("[-] No TLS interception") modules = self.load_modules() self.modules = {m.name: m for m in modules}