def list_of_anomalies(self): if hasattr(self, "_list_of_anomalies"): return self._list_of_anomalies signals = self.anomaly_signal_times if len(signals) != len(self.anomaly_labels): raise Exception( "anomaly signals amount, %s, does not match anomaly labels, %s." % (len(signals), len(self.anomaly_labels))) elif len(signals) != len(self.unsuccessful_tag_ranges): raise Exception( "anomaly signals amount, %s, does not match unsuccessful skill amount, %s." % (len(signals), len(self.unsuccessful_tag_ranges))) else: labels = self.anomaly_labels list_of_anomalies = [] for label, signal in itertools.izip(labels, signals): anomaly_instance = Anomaly() anomaly_instance.label = label anomaly_instance.time = signal unsucc_skill_idx = None for idx, (tag, (st, et)) in enumerate(self.unsuccessful_tag_ranges): if st > signal: break unsucc_skill_idx = idx if unsucc_skill_idx is None: raise Exception() tup = self.unsuccessful_tag_ranges[unsucc_skill_idx] skill = Skill() skill.tag = str(tup[0]) skill.start_time = tup[1][0] skill.end_time = tup[1][1] anomaly_instance.skill_belonged_to = skill list_of_anomalies.append(anomaly_instance) self._list_of_anomalies = list_of_anomalies return list_of_anomalies
def object_algorithm(self, df, foi, df_number=0): end_of_patience = 0 j = len(self.objects) anomaly_mode = False anomalies = dict() for i in range(len(df)): if abs( df[foi].iloc[i] ) > self.object_settings.threshold: # Start or prolong anomaly_mode end_of_patience = i + self.object_settings.patience if anomaly_mode: # Means we were in anomaly mode previously pass else: # We werent in anomaly_mode previously, reset counters anomaly = [] anomaly_mode = True # Regardless, either set or keep anomaly_mode True if anomaly_mode and i < end_of_patience: # We are in anomaly mode and it is not over anomaly.append(df[foi].iloc[i]) if abs( df[foi].iloc[i] ) < self.object_settings.max_filter else anomaly.append(0) elif anomaly_mode and i == end_of_patience: anomaly.append(df[foi].iloc[i]) if abs( df[foi].iloc[i] ) < self.object_settings.max_filter else anomaly.append(0) if len(anomaly) <= self.object_settings.patience + 1: pass else: # Filter to remove anomalies with a single anomalous data point anomalies.update({ j: Anomaly({ 'array': anomaly, 'end_index': i, 'df_number': df_number, 'feature': foi, 'ts': df['ts'].iloc[i] }) }) j += 1 anomaly_mode = False else: pass return anomalies
def find_anomaly(self): self.getUFID() host_cnt = self.UFID tmp_str = '' tmp_str2 = 'S.No, Hostname, Req_cnt, Dist_Req_Cnt, high_req_cnt_single_domain, avg_req_per_min, high_req_per_min, cnt_query_a, cnt_query_mx, cnt_query_ns, cnt_query_ptr, dist_tld, dist_sld, dist_dns_server, res_cnt,dist_city_cnt, dist_subdivision_cnt, dist_country_cnt, res_rec_cnt, res_success_cnt, res_failed, avg_ttl_value, high_ttl_value, res_ip_cnt\n' for m_hosts in self.hosts: req_cnt = 0 dist_req_cnt = 0 high_req_cnt_single_domain = 0 first_req_time = 0 last_req_time = 0 avg_req_per_min = 0.0 res_cnt = 0 min_stats = {} high_req_per_min = 0 cnt_query_a = 0 cnt_query_mx = 0 cnt_query_ns = 0 cnt_query_ptr = 0 list_tld = [] dist_tld = 0 list_sld = [] dist_sld = 0 dist_dns_server = 0 dist_city_cnt = 0 dist_subdivision_cnt = 0 dist_country_cnt = 0 res_rec_cnt = 0 res_success_cnt = 0 res_failed = 0 avg_ttl_value = 0 high_ttl_value = 0 list_dns_server_ip = [] list_res_ip = [] list_country_name = [] list_sub_name = [] list_city_name = [] res_ip_cnt = 0 host_cnt += 1 obj = self.hosts[m_hosts] for m_urls in obj.domain: tld = self.get_tld(m_urls) if tld not in list_tld: list_tld.append(tld) sld = self.get_sld(m_urls) if sld not in list_sld: list_sld.append(sld) dist_req_cnt += 1 token_list = obj.domain[m_urls] if high_req_cnt_single_domain < len(token_list.list): high_req_cnt_single_domain = len(token_list.list) for token in token_list.list: req_cnt += 1 request = token_list.list[token] if len(request.response) != 0: res_cnt += 1 seconds = self.get_sec(request.req_timestamp[9:17]) if first_req_time == 0: first_req_time = seconds if seconds < first_req_time: first_req_time = seconds elif seconds > last_req_time: last_req_time = seconds if int(request.req_type) == 1: cnt_query_a += 1 elif int(request.req_type) == 2: cnt_query_ns += 1 elif int(request.req_type) == 15: cnt_query_mx += 1 elif int(request.req_type) == 12: cnt_query_ptr += 1 if request.dns_server_ip not in list_dns_server_ip: list_dns_server_ip.append(request.dns_server_ip) mints = request.req_timestamp[9:14] if mints in min_stats: min_stats[mints] += 1 else: min_stats[mints] = 1 for res in request.response: if int(res.res_code) == 0: res_success_cnt += 1 if int(res.res_code) == 3: res_failed += 1 res_rec_cnt += 1 if res.resolved_ip != 0: if high_ttl_value < int(res.ttl): high_ttl_value = int(res.ttl) avg_ttl_value += int(res.ttl) if res.resolved_ip not in list_res_ip: list_res_ip.append(res.resolved_ip) obj_ip = IPDetails(res.resolved_ip) if obj_ip.country_name not in list_country_name: list_country_name.append( obj_ip.country_name) if obj_ip.sub_name not in list_sub_name: list_sub_name.append(obj_ip.sub_name) if obj_ip.city_name not in list_city_name: list_city_name.append(obj_ip.city_name) for items in min_stats: if min_stats[items] > high_req_per_min: high_req_per_min = min_stats[items] res_ip_cnt = len(list_res_ip) if res_success_cnt != 0: avg_ttl_value /= res_success_cnt if last_req_time - first_req_time > 60: avg_req_per_min = req_cnt / ( (last_req_time - first_req_time) / 60) else: avg_req_per_min = req_cnt dist_city_cnt = len(list_city_name) dist_subdivision_cnt = len(list_sub_name) dist_country_cnt = len(list_country_name) dist_dns_server = len(list_dns_server_ip) dist_sld = len(list_sld) dist_tld = len(list_tld) tmp_list = self.filename.split('/') uuid = tmp_list[(len(tmp_list) - 1)] tmp_str += str( host_cnt) + ',' + m_hosts + '_' + uuid[4:8] + uuid[9:11] + ',' tmp_str += str(req_cnt) + ',' tmp_str += str(dist_req_cnt) + ',' tmp_str += str(high_req_cnt_single_domain) + ',' tmp_str += str(avg_req_per_min) + ',' tmp_str += str(high_req_per_min) + ',' tmp_str += str(cnt_query_a) + ',' tmp_str += str(cnt_query_mx) + ',' tmp_str += str(cnt_query_ns) + ',' tmp_str += str(cnt_query_ptr) + ',' tmp_str += str(dist_tld) + ',' tmp_str += str(dist_sld) + ',' tmp_str += str(dist_dns_server) + ',' tmp_str += str(res_cnt) + ',' tmp_str += str(dist_city_cnt) + ',' tmp_str += str(dist_subdivision_cnt) + ',' tmp_str += str(dist_country_cnt) + ',' tmp_str += str(res_rec_cnt) + ',' tmp_str += str(res_success_cnt) + ',' tmp_str += str(res_failed) + ',' tmp_str += str(avg_ttl_value) + ',' tmp_str += str(high_ttl_value) + ',' tmp_str += str(res_ip_cnt) + '\n' self.updateUFID(host_cnt) csv_file_name = 'Output\\DNS_FP.csv' if os.path.exists(csv_file_name): csv_outfile = open(csv_file_name, 'a') else: csv_outfile = open(csv_file_name, 'w') tmp_str = tmp_str2 + tmp_str csv_outfile.write(tmp_str) csv_outfile.close() obj_anomaly = Anomaly(csv_file_name) obj_anomaly.parse_file()
class Network: def __init__(self, *args, **kwargs): self.hosts = {} self.filename = args[0] self.h = {} self.UFID = 1 self.obj_anomaly = {} # Process records in CSV File def process_record(self, token, hostname, url, nbr_of_tokens, request_type, url_len, timestamp, dns_server_ip): if hostname in self.hosts: obj = self.hosts[hostname] obj.add_request(token, url, nbr_of_tokens, request_type, url_len, timestamp, dns_server_ip) else: obj = Host(hostname) self.hosts[hostname] = obj obj.add_request(token, url, nbr_of_tokens, request_type, url_len, timestamp, dns_server_ip) def process_response(self, token, hostname, url, res_code, ttl, resolved_ip, timestamp): if hostname in self.hosts: obj = self.hosts[hostname] obj.update_response(token, url, res_code, ttl, resolved_ip, timestamp) # else # If Request doesn't exist, discard response # Display top n hosts def display_count(self, count): # obj = Host("127.0.0.1") i = 1 for m_hosts in self.hosts: print(i, ". ", m_hosts) obj = self.hosts[m_hosts] obj.display() i += 1 if count == i: break # Display all requests by hostname def display_host(self, hostname): if hostname in self.hosts: obj = self.hosts[hostname] obj.display() else: print("Hostname not found !!") # Save all requests by hostname def save_host(self, hostname): tmp_str = "" if hostname in self.hosts: obj = self.hosts[hostname] for m_urls in obj.domain: token_list = obj.domain[m_urls] for token in token_list.list: request = token_list.list[token] tmp_str += request.txn_id + "," + request.req_url + "," + request.req_timestamp \ + "," + request.req_type for res in request.response: if res.resolved_ip != "0": tmp_str += res.res_code + "," + res.ttl + "," + res.res_timestamp + "," + res.resolved_ip tmp_str += "\n" xml_outfile = open("host_" + hostname + ".csv", "w") xml_outfile.write(tmp_str) xml_outfile.close() else: print("Hostname not found !!") # Display all requests by hostname def save_map(self, hostname, tmp_str): if hostname in self.hosts: obj = self.hosts[hostname] for m_urls in obj.domain: token_list = obj.domain[m_urls] for token in token_list.list: request = token_list.list[token] for res in request.response: if res.resolved_ip != "0": tmp_str += request.req_url + "," + res.resolved_ip + "\n" xml_outfile = open(hostname + "_map.csv", "w") xml_outfile.write(tmp_str) xml_outfile.close() else: for m_hosts in self.hosts: obj = self.hosts[m_hosts] for m_urls in obj.domain: token_list = obj.domain[m_urls] for token in token_list.list: request = token_list.list[token] for res in request.response: if res.resolved_ip != "0": tmp_str += request.req_url + "," + res.resolved_ip + "\n" xml_outfile = open(self.filename + "_map.csv", "w") xml_outfile.write(tmp_str) xml_outfile.close() def display(self): for hostname in self.hosts: obj = self.hosts[hostname] obj.display() # plot using Hostname def plot(self, hostname): if hostname in self.hosts: obj = self.hosts[hostname] obj.plot(self.filename) else: print("Hostname not found !!") # Returns seconds given a timestamp def get_sec(self, time_str): h, m, s = time_str.split(':') return int(h) * 3600 + int(m) * 60 + int(s) # Return Top Level Domain of a FQDN def get_tld(self, url): ptr = url.split('.') if len(ptr) > 0: return ptr[len(ptr) - 1] # Return Second Level Domain of a FQDN def get_sld(self, url): ptr = url.split('.') if len(ptr) > 1: return ptr[len(ptr) - 2] # Get UFID Unique Footprint ID def getUFID(self): if os.path.exists("Output\\UFID.txt"): f = open('Output\\UFID.txt', 'r') tmp_str = f.read() self.UFID = int(tmp_str) f.close() else: f = open('Output\\UFID.txt', 'w') f.write("1") f.close() def updateUFID(self, ufid): f = open('Output\\UFID.txt', 'w') f.write(str(ufid)) f.close() # Find number of requests, number of responses, number of response record, Number of resolved URL def find_anomaly(self): self.getUFID() host_cnt = self.UFID tmp_str = "" tmp_str2 = 'S.No, Hostname, Req_cnt, Dist_Req_Cnt, high_req_cnt_single_domain, avg_req_per_min, ' \ 'high_req_per_min, cnt_query_a, cnt_query_mx, cnt_query_ns, cnt_query_ptr, dist_tld, ' \ 'dist_sld, dist_dns_server, res_cnt,dist_city_cnt, dist_subdivision_cnt, dist_country_cnt,' \ ' res_rec_cnt, res_success_cnt, res_failed, avg_ttl_value, high_ttl_value, res_ip_cnt\n' for m_hosts in self.hosts: req_cnt = 0 dist_req_cnt = 0 high_req_cnt_single_domain = 0 first_req_time = 0 last_req_time = 0 avg_req_per_min = 0.0 res_cnt = 0 min_stats = {} high_req_per_min = 0 cnt_query_a = 0 cnt_query_mx = 0 cnt_query_ns = 0 cnt_query_ptr = 0 list_tld = [] dist_tld = 0 list_sld = [] dist_sld = 0 dist_dns_server = 0 dist_city_cnt = 0 dist_subdivision_cnt = 0 dist_country_cnt = 0 res_rec_cnt = 0 res_success_cnt = 0 res_failed = 0 avg_ttl_value = 0 high_ttl_value = 0 list_dns_server_ip = [] list_res_ip = [] list_country_name = [] list_sub_name = [] list_city_name = [] res_ip_cnt = 0 host_cnt += 1 obj = self.hosts[m_hosts] for m_urls in obj.domain: tld = self.get_tld(m_urls) if tld not in list_tld: list_tld.append(tld) sld = self.get_sld(m_urls) if sld not in list_sld: list_sld.append(sld) dist_req_cnt += 1 token_list = obj.domain[m_urls] if high_req_cnt_single_domain < len(token_list.list): high_req_cnt_single_domain = len(token_list.list) for token in token_list.list: req_cnt += 1 request = token_list.list[token] if len(request.response) != 0: res_cnt += 1 seconds = self.get_sec(request.req_timestamp[9:17]) if first_req_time == 0: first_req_time = seconds if seconds < first_req_time: first_req_time = seconds elif seconds > last_req_time: last_req_time = seconds if int(request.req_type) == 1: cnt_query_a += 1 elif int(request.req_type) == 2: cnt_query_ns += 1 elif int(request.req_type) == 15: cnt_query_mx += 1 elif int(request.req_type) == 12: cnt_query_ptr += 1 if request.dns_server_ip not in list_dns_server_ip: list_dns_server_ip.append(request.dns_server_ip) mints = request.req_timestamp[9:14] if mints in min_stats: min_stats[mints] += 1 else: min_stats[mints] = 1 # DNS_RCODE_NXDOMAIN = 3 for res in request.response: if int(res.res_code) == 0: res_success_cnt += 1 if int(res.res_code) == 3: res_failed += 1 res_rec_cnt += 1 if res.resolved_ip != 0: if high_ttl_value < int(res.ttl): high_ttl_value = int(res.ttl) avg_ttl_value += int(res.ttl) if res.resolved_ip not in list_res_ip: list_res_ip.append(res.resolved_ip) obj_ip = IPDetails(res.resolved_ip) if obj_ip.country_name not in list_country_name: list_country_name.append( obj_ip.country_name) if obj_ip.sub_name not in list_sub_name: list_sub_name.append(obj_ip.sub_name) if obj_ip.city_name not in list_city_name: list_city_name.append(obj_ip.city_name) for items in min_stats: if min_stats[items] > high_req_per_min: high_req_per_min = min_stats[items] res_ip_cnt = len(list_res_ip) if res_success_cnt != 0: avg_ttl_value /= res_success_cnt if (last_req_time - first_req_time) > 60: avg_req_per_min = req_cnt / ( (last_req_time - first_req_time) / 60) else: avg_req_per_min = req_cnt dist_city_cnt = len(list_city_name) dist_subdivision_cnt = len(list_sub_name) dist_country_cnt = len(list_country_name) dist_dns_server = len(list_dns_server_ip) dist_sld = len(list_sld) dist_tld = len(list_tld) tmp_list = self.filename.split('/') uuid = tmp_list[len(tmp_list) - 1] tmp_str += str( host_cnt) + "," + m_hosts + "_" + uuid[4:8] + uuid[9:11] + "," tmp_str += str(req_cnt) + "," tmp_str += str(dist_req_cnt) + "," tmp_str += str(high_req_cnt_single_domain) + "," tmp_str += str(avg_req_per_min) + "," tmp_str += str(high_req_per_min) + "," tmp_str += str(cnt_query_a) + "," tmp_str += str(cnt_query_mx) + "," tmp_str += str(cnt_query_ns) + "," tmp_str += str(cnt_query_ptr) + "," tmp_str += str(dist_tld) + "," tmp_str += str(dist_sld) + "," tmp_str += str(dist_dns_server) + "," tmp_str += str(res_cnt) + "," tmp_str += str(dist_city_cnt) + "," tmp_str += str(dist_subdivision_cnt) + "," tmp_str += str(dist_country_cnt) + "," tmp_str += str(res_rec_cnt) + "," tmp_str += str(res_success_cnt) + "," tmp_str += str(res_failed) + "," tmp_str += str(avg_ttl_value) + "," tmp_str += str(high_ttl_value) + "," tmp_str += str(res_ip_cnt) + "\n" # print tmp_str # xml_outfile = open(self.filename + "_DNS_FP.csv", "w+") self.updateUFID(host_cnt) #out_file_name = self.filename.split(".") #csv_file_name = out_file_name[0] + "_DNS_fp.csv" csv_file_name = "Output\DNS_FP.csv" if os.path.exists(csv_file_name): #csv_outfile = open(csv_file_name, "a") csv_outfile = open(csv_file_name, "w") else: csv_outfile = open(csv_file_name, "w") tmp_str = tmp_str2 + tmp_str csv_outfile.write(tmp_str) csv_outfile.close() self.obj_anomaly = Anomaly(csv_file_name) self.obj_anomaly.parse_file() # Find hosts that requested for URl with the Resolved Ip as received in resolved_Ip def find_resolved_ip(self, resolved_ip): for m_hosts in self.hosts: obj = self.hosts[m_hosts] next_host = False for m_urls in obj.domain: token_list = obj.domain[m_urls] for token in token_list.list: request = token_list.list[token] for res in request.response: if res.resolved_ip == resolved_ip: print(m_hosts) next_host = True break if next_host: break if next_host: break if next_host: continue # Find hosts that requested the same URL def find_req_url(self, req_url): for m_hosts in self.hosts: obj = self.hosts[m_hosts] next_host = False for m_urls in obj.domain: if m_urls == req_url: print(m_hosts) next_host = True break if next_host: continue # Print summary of all hosts with greater than 300 requests def summary(self): print("Total Number of Hosts :" + str(len(self.hosts))) print("Number of Hosts with over 300 Different Requests:") i = 1 for m_hosts in self.hosts: obj = self.hosts[m_hosts] #for m_urls in obj.domain: #token_list = obj.domain[m_urls] if len(obj.domain) > 300: print(m_hosts + "\t" + str(len(obj.domain))) def save_html(self): tmp_str = "<html><body><table border='1px'><thead><tr><th colspan='5'>DNS Summary</th></tr></thead>" print("Saving Requests:" + str(len(self.hosts))) host_cnt = 0 for m_hosts in self.hosts: host_cnt += 1 tmp_str += "<tr><td>" + str( host_cnt) + "</td><td colspan='4'>" + m_hosts + "</td></tr>" obj = self.hosts[m_hosts] url_cnt = 0 for m_urls in obj.domain: url_cnt += 1 tmp_str += "<tr><td></td><td>" + str( url_cnt) + "</td><td colspan='4'>" + m_urls + "</td></tr>" token_list = obj.domain[m_urls] token_cnt = 0 for token in token_list.list: token_cnt += 1 request = token_list.list[token] tmp_str += "<tr><td></td><td></td><td>" + str(token_cnt) + "</td><td >" + request.txn_id + "</td><td >" + request.req_type + "</td><td >"\ + request.req_timestamp + "</td></tr>" res_cnt = 0 for res in request.response: res_cnt += 1 tmp_str += "<tr><td></td><td></td><td></td><td>" + str(res_cnt) + "</td><td >" + res.res_code + "</td><td >" + res.ttl + "</td><td >" \ + res.resolved_ip + "</td><td >" + res.res_timestamp + "</td></tr>" tmp_str += "</table></body></html>" xml_outfile = open(self.filename + ".html", "w") xml_outfile.write(tmp_str) xml_outfile.close() # Save all requests in csv def save_csv(self): i = 1 tmp_str = "Hostname,count,nbr_requests,nbr_unique_req,avg_req_min,max_req_min,failed_cnt,ratio," \ "nbr_countries,req_type,sum_url,sum_token\n" for m_hosts in self.hosts: tmp_str += m_hosts + "," obj = self.hosts[m_hosts] tmp_str += str(obj.req_count) + "," tmp_str += str(obj.nbr_of_requests) + "," tmp_str += str(obj.nbr_of_distinct_requests) + "," tmp_str += str(obj.avg_req_per_min) + "," tmp_str += str(obj.max_req_per_min) + "," tmp_str += str(obj.failed_req_count) + "," tmp_str += str(obj.ratio_req_response) + "," tmp_str += str(obj.nbr_of_countries) + "," tmp_str += str(obj.req_type) + "," tmp_str += str(obj.sum_url_len) + "," tmp_str += str(obj.sum_nbr_domain_token) + "\n" xml_outfile = open(self.filename + ".csv", "w") xml_outfile.write(tmp_str) xml_outfile.close() # Get all hosts with requests greater than threshold def get_top_hosts(self, thresh_hold, result): for m_hosts in self.hosts: obj = self.hosts[m_hosts] if len(obj.domain) > thresh_hold: result.append(m_hosts) def start_console(self): syntax = "l - list \t m - Save Map \t p - plot \td/D - Display/Save " \ "\t h - saveHtml \t x - saveCSV \t F - Find Req URl\t " \ "f - Find Resolved IP\t q - quit" print(syntax) print("console>") choice = "l" while choice != "q": choice = raw_input() if choice == "l": result = [] self.get_top_hosts(100, result) i = 1 print("Hosts with over 100 distinct requests ") for m_host in result: print( str(i) + ".\t" + m_host + "\t" + str(self.hosts[m_host].req_count)) i += 1 elif choice == "p": print("Enter Hostname :") hostname = raw_input() self.plot(hostname) elif choice == "f": print("Enter Resolved IP :") resolved_ip = raw_input() self.find_resolved_ip(resolved_ip) elif choice == "F": print("Enter Request URL :") req_url = raw_input() self.find_req_url(req_url) elif choice == "d": print("Enter Hostname :") hostname = raw_input() self.display_host(hostname) elif choice == "D": print("Enter Hostname :") hostname = raw_input() self.save_host(hostname) elif choice == "m": print("Enter Hostname :") hostname = raw_input() tmp_str = "" self.save_map(hostname, tmp_str) print(tmp_str) elif choice == "q": continue elif choice == "x": self.save_csv() elif choice == "h": self.save_html() else: print("Invalid Choice !!") print(syntax) print("console>") print("Console Terminated.") ''' def console(h): syntax = "a - Anomaly \tl - list \t m - Save Map \t " \ "p - plot \td/D - Display/Save \t \ "\t h - saveHtml \t x - saveCSV \t F - Find Req URl\t " \ "f - Find Resolved IP\t q - quit" print(syntax) print("console>") choice = "l" while choice != "q": choice = raw_input() if choice == "l": result = [] h.get_top_hosts(100, result) i = 1 print("Hosts with over 100 distinct requests ") for m_host in result: print(str(i) + ".\t" + m_host + "\t" + str(h.hosts[m_host].req_count)) i += 1 elif choice == "s": h.find_anomaly() elif choice == "a": h.find_anomaly() elif choice == "p": print("Enter Hostname :") hostname = raw_input() h.plot(hostname) elif choice == "f": print("Enter Resolved IP :") resolved_ip = raw_input() h.find_resolved_ip(resolved_ip) elif choice == "F": print("Enter Request URL :") req_url = raw_input() h.find_req_url(req_url) elif choice == "d": print("Enter Hostname :") hostname = raw_input() h.display_host(hostname) elif choice == "D": print("Enter Hostname :") hostname = raw_input() h.save_host(hostname) elif choice == "m": print("Enter Hostname :") hostname = raw_input() tmp_str = "" h.save_map(hostname, tmp_str) print(tmp_str) elif choice == "q": continue elif choice == "x": h.save_csv() elif choice == "h": h.save_html() else: print("Invalid Choice !!") print(syntax) print("console>") print ("Console Terminated.") def console(h): syntax = "a - Anomaly \tl - list \t m - Save Map \t " \ "p - plot \td/D - Display/Save \t s - summary " \ "\t h - saveHtml \t x - saveCSV \t F - Find Req URl\t " \ "f - Find Resolved IP\t q - quit" print(syntax) print("console>") choice = "l" while choice != "q": choice = raw_input() if choice == "l": result = [] h.get_top_hosts(100, result) i = 1 print("Hosts with over 100 distinct requests ") for m_host in result: print(str(i) + ".\t" + m_host + "\t" + str(h.hosts[m_host].req_count)) i += 1 elif choice == "s": h.find_anomaly() elif choice == "a": h.find_anomaly() elif choice == "p": print("Enter Hostname :") hostname = raw_input() h.plot(hostname) elif choice == "f": print("Enter Resolved IP :") resolved_ip = raw_input() h.find_resolved_ip(resolved_ip) elif choice == "F": print("Enter Request URL :") req_url = raw_input() h.find_req_url(req_url) elif choice == "d": print("Enter Hostname :") hostname = raw_input() h.display_host(hostname) elif choice == "D": print("Enter Hostname :") hostname = raw_input() h.save_host(hostname) elif choice == "m": print("Enter Hostname :") hostname = raw_input() tmp_str = "" h.save_map(hostname, tmp_str) print(tmp_str) elif choice == "q": continue elif choice == "x": h.save_csv() elif choice == "h": h.save_html() else: print("Invalid Choice !!") print(syntax) print("console>") print ("Console Terminated.") ''' # option 1 : console , 2 : Find Anomaly and start console def map_analyse_data(self, filename, option): dns_analyser_start = datetime.datetime.now() # print("===============DNS Analyzer Started at " + str(dns_analyser_start) + ":" + filename + "===============") if option == 3: self.back_track() else: self.rwnbiad(filename, option) # get_stats(filename) # print("===============DNS Analyzer Completed at " + str(datetime.datetime.now()) + "==============") def back_track(self): # 1 Read DNS_FP_ID print("Enter Device Fingerprint: ") fp_id = raw_input() # 2 Extract Hostname and timestamp token = fp_id.split("_") print(token[0], token[1]) # 3 Map pcap.csv filename tmpfilename = "F:\\Research\\PhD\\Traffic\\2016" + token[1][ 0:4] + "_" + token[1][4:6] + "*.pcap" newname = subprocess.check_output("dir " + tmpfilename + " /B ", shell=True) token = newname.splitlines() print(token[0].strip()) # 4 parse csv self.rwnbiad("F:/Research/PhD/Traffic/" + token[0].strip(), 2) # 5 Show Host Details # Rome was not build in a day def rwnbiad(self, filename, option): self.h = Network(filename) req_infile = open(filename + "_req.csv", "r") # req_infile = open("sample\\request.csv", "r") req_reader = csv.reader(req_infile, delimiter=',') for res in req_reader: try: self.h.process_record(str(res[0]), str(res[1]), str(res[2]), str(res[3]), str(res[4]), str(res[5]), str(res[6]), str(res[7])) except: continue res_infile = open(filename + "_res.csv", "r") # res_infile = open("sample\\response.csv", "r") res_reader = csv.reader(res_infile, delimiter=',', quotechar='"', quoting=csv.QUOTE_ALL) for res in res_reader: try: self.h.process_response(str(res[0]), str(res[1]), str(res[2]), str(res[4]), str(res[5]), str(res[6]), str(res[7])) except: continue # TODO: Automate the process after feedback from MSingh if option == 1: self.h.start_console() elif option == 2: self.h.find_anomaly() self.h.start_console() elif option == 4: self.h.find_anomaly()