class RuleEngine(object): def __init__(self): self.vt_req_counter = 0 self.vt_req_timer = time.time() self.vd = Validator() self.vt = VirusTotal(apikey.APIKEY_1) self.cache = {} def iterpcap(self, path): for dirPath, dirNames, fileNames in os.walk(path): for f in fileNames: if f.split('.')[1] == 'pcap': # check the file is pcap file yield os.path.join(dirPath, f) def _iterpayload(self, path): connection = utils.follow_tcp_stream(path) for conn, frame in connection.iteritems(): for seq, content in frame.iteritems(): if content: # Generate the content and 5-tuple yield content, conn else: # Some packets have no payload pass def _check_timer_counter(self): if self.vt_req_counter == 4: self.vt_req_counter = 0 period = time.time() - self.vt_req_timer waiting = 60 - period + 1 if waiting > 0: logger.info("Waiting %s seconds", (str(waiting))) time.sleep(waiting) self.vt_req_timer = time.time() def _make_rule(self, content, uricontent, dst_port, sid=0): rule = SnortRule() pattern = dict() pattern['msg'] = '"Trojan.Gen.uricontent"' pattern['content'] = ['"{host}"'.format(host=content), 'nocase'] pattern['uricontent'] = ['"{uri}"'.format(uri=uricontent), 'nocase'] # pattern['sid'] = sid pattern['dst_port'] = dst_port rule.set_malicious_pattern(**pattern) return rule def _get_url_positive(self, resource): m = hashlib.sha1(resource) urlkey = m.hexdigest() if urlkey in self.cache.keys(): # logger.info("%s in cache" % resource) positives = self.cache.get(urlkey)[1] return positives else: self.vt_req_counter += 1 logger.info("Search on VirusTotal counter: %s", str(self.vt_req_counter)) response = self.vt.get_url_report(resource) if response.get('error') is not None: logger.info("Error: {e}".format(e=response.get('error'))) sys.exit(0) results = response.get('results') positives = results.get('positives') self._check_timer_counter() if positives >= 0: self.cache[urlkey] = [resource, positives] return positives elif positives is None: logger.info('''No report. Submmit the URL to VirusTotal countert: %s''', str(self.vt_req_counter)) self.vt.scan_url(resource) self._check_timer_counter() return None else: logger.debug("Get reports failed.") return None def gen_rule(self, pcap_path): self.cache = pickle_tool.check_json() for content, conn in self._iterpayload('%s' % (pcap_path)): # print content, utils.connection_key_2_str(conn) get_method = self.vd.is_get_method(content) host = self.vd.is_hsot(content) if host and get_method: if get_method.group(1) == '/': url = self.vd.is_valid_url(host.group(1).rstrip()) else: url = self.vd.is_valid_url(host.group(1).rstrip() + get_method.group(1)) if url is not None: # valid_utf8 = True try: url.group(0).decode('utf-8') except UnicodeDecodeError: with open('invalid_utf8', 'a') as fp: fp.write('{u}\n'.format(u=url.group(0))) url = None # valid_utf8 = False if url is not None: host_content = host.group(0).rstrip() uricontent = get_method.group(1) pos = self._get_url_positive(url.group(0)) if pos > 0: if uricontent == '/': uricontent = None #print host_content rule = self._make_rule(host_content, uricontent, conn[3], 0) with open('uricontent.rules', 'a') as fp: fp.write('{r}\n'.format(r=str(rule))) yield rule else: # positives == 0 or positives == None pass else: # invalid_url pass else: pass pickle_tool.update_json(self.cache)
class RuleEngine(object): def __init__(self): self.vt_req_counter = 0 self.vt_req_timer = time.time() self.vd = Validator() self.vt = VirusTotal(apikey.APIKEY_1) self.cache = {} def iterpcap(self, path): for dirPath, dirNames, fileNames in os.walk(path): for f in fileNames: if f.split('.')[1] == 'pcap': # check the file is pcap file yield os.path.join(dirPath, f) def _iterpayload(self, path): connection = utils.follow_tcp_stream(path) for conn, frame in connection.iteritems(): for seq, content in frame.iteritems(): if content: # Generate the content and 5-tuple yield content, conn else: # Some packets have no payload pass def _check_timer_counter(self): if self.vt_req_counter == 4: self.vt_req_counter = 0 period = time.time() - self.vt_req_timer waiting = 60 - period + 1 if waiting > 0: logger.info("Waiting %s seconds", (str(waiting))) time.sleep(waiting) self.vt_req_timer = time.time() def _make_rule(self, content, uricontent, dst_port, sid=0): rule = SnortRule() pattern = dict() pattern['msg'] = '"Trojan.Gen.uricontent"' pattern['content'] = ['"{host}"'.format(host=content), 'nocase'] pattern['uricontent'] = ['"{uri}"'.format(uri=uricontent), 'nocase'] # pattern['sid'] = sid pattern['dst_port'] = dst_port rule.set_malicious_pattern(**pattern) return rule def _get_url_positive(self, resource): m = hashlib.sha1(resource) urlkey = m.hexdigest() if urlkey in self.cache.keys(): # logger.info("%s in cache" % resource) positives = self.cache.get(urlkey)[1] return positives else: self.vt_req_counter += 1 logger.info("Search on VirusTotal counter: %s", str(self.vt_req_counter)) response = self.vt.get_url_report(resource) if response.get('error') is not None: logger.info("Error: {e}".format(e=response.get('error'))) sys.exit(0) results = response.get('results') positives = results.get('positives') self._check_timer_counter() if positives >= 0: self.cache[urlkey] = [resource, positives] return positives elif positives is None: logger.info( '''No report. Submmit the URL to VirusTotal countert: %s''', str(self.vt_req_counter)) self.vt.scan_url(resource) self._check_timer_counter() return None else: logger.debug("Get reports failed.") return None def gen_rule(self, pcap_path): self.cache = pickle_tool.check_json() for content, conn in self._iterpayload('%s' % (pcap_path)): # print content, utils.connection_key_2_str(conn) get_method = self.vd.is_get_method(content) host = self.vd.is_hsot(content) if host and get_method: if get_method.group(1) == '/': url = self.vd.is_valid_url(host.group(1).rstrip()) else: url = self.vd.is_valid_url( host.group(1).rstrip() + get_method.group(1)) if url is not None: # valid_utf8 = True try: url.group(0).decode('utf-8') except UnicodeDecodeError: with open('invalid_utf8', 'a') as fp: fp.write('{u}\n'.format(u=url.group(0))) url = None # valid_utf8 = False if url is not None: host_content = host.group(0).rstrip() uricontent = get_method.group(1) pos = self._get_url_positive(url.group(0)) if pos > 0: if uricontent == '/': uricontent = None #print host_content rule = self._make_rule(host_content, uricontent, conn[3], 0) with open('uricontent.rules', 'a') as fp: fp.write('{r}\n'.format(r=str(rule))) yield rule else: # positives == 0 or positives == None pass else: # invalid_url pass else: pass pickle_tool.update_json(self.cache)