def put_session(self, session): connhash = self.calc_connhash(session["stream"]).encode("hex") backend_user = self.session.query(User).filter( User.username == session["backend_username"]).first() conn = Connection(ip=session["ip"], user=session["user"], date=session["date"], password=session["pass"], stream=json.dumps(session["stream"]), connhash=connhash, backend_user_id=backend_user.id) conn.user = filter_ascii(conn.user) conn.password = filter_ascii(conn.password) if self.do_ip_to_asn_resolution: range = self.get_ip_range(conn.ip) if range: conn.country = range.country conn.city = range.city conn.lat = range.latitude conn.lon = range.longitude conn.asn = range.asn self.session.add(conn) self.session.flush() # to get id network_id = None samples = [] urls = [] for sample_json in session["samples"]: # Ignore junk - may clean up the db a bit if sample_json["length"] < 2000: continue sample, url = self.create_url_sample(sample_json) if sample: if network_id == None and sample.network_id != None: network_id = sample.network_id samples.append(sample) if url: if network_id == None and url.network_id != None: network_id = url.network_id conn.urls.append(url) urls.append(url) # Find previous connections # A connection is associated when: # - same honeypot/user # - connection happened as long as 120s before # - same client ip OR same username/password combo assoc_timediff = 120 assoc_timediff_sameip = 3600 previous_conns = (self.session.query(Connection).filter( or_( and_(Connection.date > (conn.date - assoc_timediff), Connection.user == conn.user, Connection.password == conn.password), and_(Connection.date > (conn.date - assoc_timediff_sameip), Connection.ip == conn.ip)), Connection.backend_user_id == conn.backend_user_id, Connection.id != conn.id).all()) for prev in previous_conns: if network_id == None and prev.network_id != None: network_id = prev.network_id conn.conns_before.append(prev) # Check connection against all tags tags = self.session.query(Tag).all() for tag in tags: json_obj = conn.json(depth=0) json_obj["text_combined"] = filter_ascii(json_obj["text_combined"]) if simple_eval(tag.code, names=json_obj) == True: self.db.link_conn_tag(conn.id, tag.id) # Only create new networks for connections with urls or associtaed conns, # to prevent the creation of thousands of networks # NOTE: only conns with network == NULL will get their network updated # later so whe should only create a network where we cannot easily # change it later haslogin = conn.user != None and conn.user != "" if (len(conn.urls) > 0 or len(previous_conns) > 0) and network_id == None and haslogin: print(" --- create network --- ") network_id = self.create_network().id # Update network on self conn.network_id = network_id # Update network on all added Urls for url in urls: if url.network_id == None: url.network_id = network_id # Update network on all added Samples for sample in samples: if sample.network_id == None: sample.network_id = network_id # Update network on all previous connections withut one if network_id != None: for prev in previous_conns: if prev.network_id == None: prev.network_id = network_id # Update number of first conns on network if len(prev.conns_before) == 0: conn.network.nb_firstconns += 1 self.session.flush() # Check for Malware type # only if our network exists AND has no malware associated if conn.network != None and conn.network.malware == None: # Find connections with similar connhash similar_conns = (self.session.query(Connection).filter( func.length(Connection.connhash) == len(connhash)).all()) min_sim = 2 min_conn = None for similar in similar_conns: if similar.network_id != None: c1 = connhash.decode("hex") c2 = similar.connhash.decode("hex") sim = self.calc_connhash_similiarity(c1, c2) if sim < min_sim and similar.network.malware != None: min_sim = sim min_conn = similar # 0.9: 90% or more words in session are equal # think this is probably the same kind of malware # doesn't need to be the same botnet though! if min_sim < 0.9: conn.network.malware = min_conn.network.malware else: conn.network.malware = Malware() conn.network.malware.name = random.choice(ANIMAL_NAMES) self.session.add(conn.network.malware) self.session.flush() # Update network number of first connections if len(previous_conns) == 0 and conn.network_id != None: conn.network.nb_firstconns += 1 return conn.json(depth=1)
def put_session(self, session): ipinfo = None asn = None block = None country = None network_id = None if self.do_ip_to_asn_resolution: ipinfo = get_ip_info(session["ip"]) if ipinfo: asn_obj = self.get_asn(ipinfo["asn"]) asn = ipinfo["asn"] block = ipinfo["ipblock"] country = ipinfo["country"] # Calculate "hash" connhash = "" for event in session["stream"]: if event["in"]: line = event["data"] line = ''.join(char for char in line if ord(char) < 128 and ord(char) > 32) if line != "": linehash = abs(hash(line)) % 0xFFFF connhash += struct.pack("!H", linehash) connhash = connhash.encode("hex") backend_user = self.session.query(User).filter( User.username == session["backend_username"]).first() conn = Connection(ip=session["ip"], user=session["user"], date=session["date"], password=session["pass"], stream=json.dumps(session["stream"]), asn_id=asn, ipblock=block, country=country, connhash=connhash, backend_user_id=backend_user.id) self.session.add(conn) self.session.flush() # to get id samples = [] urls = [] for sample_json in session["samples"]: sample, url = self.create_url_sample(sample_json) if network_id == None and sample.network_id != None: network_id = sample.network_id if network_id == None and url.network_id != None: network_id = url.network_id conn.urls.append(url) samples.append(sample) urls.append(url) # Find previous connections # A connection is associated when: # - same honeypot/user # - connection happened as long as 120s before # - same client ip OR same username/password combo assoc_timediff = 120 previous_conns = (self.session.query(Connection).filter( Connection.date > (conn.date - assoc_timediff), or_( and_(Connection.user == conn.user, Connection.password == conn.password), Connection.ip == conn.ip), Connection.backend_user_id == conn.backend_user_id, Connection.id != conn.id).all()) for prev in previous_conns: if network_id == None and prev.network_id != None: network_id = prev.network_id conn.conns_before.append(prev) # Check connection against all tags tags = self.session.query(Tag).all() for tag in tags: json_obj = conn.json(depth=0) json_obj["text_combined"] = filter_ascii(json_obj["text_combined"]) if simple_eval(tag.code, names=json_obj) == True: self.db.link_conn_tag(conn.id, tag.id) # Only create new networks for connections with urls or associtaed conns, # to prevent the creation of thousands of networks # NOTE: only conns with network == NULL will get their network updated # later so whe should only create a network where we cannot easily # change it later if (len(conn.urls) > 0 or len(previous_conns) > 0) and network_id == None: network_id = self.create_network().id # Update network on self conn.network_id = network_id # Update network on all added Urls for url in urls: if url.network_id == None: url.network_id = network_id # Update network on all added Samples for sample in samples: if sample.network_id == None: sample.network_id = network_id # Update network on all previous connections withut one if network_id != None: for prev in previous_conns: if prev.network_id == None: prev.network_id = network_id self.session.flush() return []
def put_session(self, session): ipinfo = None asn = None block = None country = None if self.do_ip_to_asn_resolution: ipinfo = get_ip_info(session["ip"]) if ipinfo: asn_obj = self.get_asn(ipinfo["asn"]) asn = ipinfo["asn"] block = ipinfo["ipblock"] country = ipinfo["country"] report = {} if self.vt != None: if self.web.get_ip_report(session["ip"]) == None: report = self.vt.query_ip_reports(session["ip"]) ipReport = IpReport(ip=session["ip"], report=json.dumps(report)) self.session.add(ipReport) # Calculate "hash" connhash = "" for event in session["stream"]: if event["in"]: line = event["data"] line = ''.join(char for char in line if ord(char) < 128 and ord(char) > 32) if line != "": linehash = abs(hash(line)) % 0xFFFF connhash += struct.pack("!H", linehash) connhash = connhash.encode("hex") backend_user = self.session.query(User).filter( User.username == session["backend_username"]).first() conn = Connection(ip=session["ip"], user=session["user"], date=session["date"], password=session["pass"], stream=json.dumps(session["stream"]), asn_id=asn, ipblock=block, country=country, connhash=connhash, backend_user_id=backend_user.id) self.session.add(conn) self.session.flush() req_urls = [] set_urls = set(session["urls"]) for url in set_urls: db_url = self.db.get_url(url).fetchone() url_id = 0 report = '' parsed_uri = urlparse(url) domain = '{uri.netloc}'.format(uri=parsed_uri) if self.vt != None: report = self.vt.query_domain_reports(domain) domainReport = DomainReport(domain=domain, report=json.dumps(report)) self.session.add(domainReport) if db_url == None: url_ip = None url_asn = None url_country = None if self.do_ip_to_asn_resolution: url_ip, url_info = get_url_info(url) if url_info: asn_obj_url = self.get_asn(url_info["asn"]) url_asn = url_info["asn"] url_country = url_info["country"] url_id = self.db.put_url(url, session["date"], url_ip, url_asn, url_country) req_urls.append(url) elif db_url["sample"] == None: req_urls.append(url) url_id = db_url["id"] else: # Sample exists already # TODO: Check url for oldness url_id = db_url["id"] self.db.link_conn_url(conn.id, url_id) # Find previous connections # A connection is associated when: # - same honeypot/user # - connection happened as long as 120s before # - same client ip OR same username/password combo assoc_timediff = 120 previous_conns = (self.session.query(Connection).filter( Connection.date > (conn.date - assoc_timediff), or_( and_(Connection.user == conn.user, Connection.password == conn.password), Connection.ip == conn.ip), Connection.backend_user_id == conn.backend_user_id, Connection.id != conn.id).all()) for prev in previous_conns: conn.conns_before.append(prev) # Check connection against all tags tags = self.session.query(Tag).all() conn = self.session.query(Connection).filter( Connection.id == conn.id).first() for tag in tags: json_obj = conn.json(depth=0) json_obj["text_combined"] = filter_ascii(json_obj["text_combined"]) if simple_eval(tag.code, names=json_obj) == True: self.db.link_conn_tag(conn.id, tag.id) return req_urls
def put_session(self, session): ipinfo = get_ip_info(session["ip"]) asn = None block = None country = None if ipinfo: asn_obj = self.get_asn(ipinfo["asn"]) asn = ipinfo["asn"] block = ipinfo["ipblock"] country = ipinfo["country"] # Calculate "hash" connhash = "" for line in session["text_in"].split("\n"): line = line.strip() linehash = abs(hash(line)) % 0xFFFF connhash += struct.pack("!H", linehash) connhash = connhash.encode("hex") backend_user = self.session.query(User).filter( User.username == session["backend_username"]).first() conn = Connection(ip=session["ip"], user=session["user"], date=session["date"], password=session["pass"], text_combined=session["text_combined"], asn_id=asn, ipblock=block, country=country, connhash=connhash, backend_user_id=backend_user.id) self.session.add(conn) self.session.flush() req_urls = [] for url in session["urls"]: db_url = self.db.get_url(url).fetchone() url_id = 0 if db_url == None: url_ip, url_info = get_url_info(url) url_asn = None url_country = None if url_info: asn_obj_url = self.get_asn(url_info["asn"]) url_asn = url_info["asn"] url_country = url_info["country"] url_id = self.db.put_url(url, session["date"], url_ip, url_asn, url_country) req_urls.append(url) elif db_url["sample"] == None: req_urls.append(url) url_id = db_url["id"] else: # Sample exists already # TODO: Check url for oldness url_id = db_url["id"] self.db.link_conn_url(conn.id, url_id) # Find previous connections # A connection is associated when: # - same honeypot/user # - connection happened as long as 120s before # - same client ip OR same username/password combo assoc_timediff = 120 previous_conns = (self.session.query(Connection).filter( Connection.date > (conn.date - assoc_timediff), or_( and_(Connection.user == conn.user, Connection.password == conn.password), Connection.ip == conn.ip), Connection.backend_user_id == conn.backend_user_id, Connection.id != conn.id).all()) for prev in previous_conns: conn.conns_before.append(prev) # Check connection against all tags tags = self.session.query(Tag).all() conn = self.session.query(Connection).filter( Connection.id == conn.id).first() for tag in tags: json_obj = conn.json(depth=0) json_obj["text_combined"] = filter_ascii(json_obj["text_combined"]) if simple_eval(tag.code, names=json_obj) == True: self.db.link_conn_tag(conn.id, tag.id) return req_urls