def IP_details(self, IP): #print(IP+"*") mode = 0 if self.Section_for_Analyse == 1: mode = 0 else: mode = 1 IPs = Draw_Graph.degree(mode) for IP_degree in IPs: if operator.eq(IP, IP_degree[0]) == True: self.inform_Text0.config(text="度") self.inform_Text1.config(text=IP_degree[1]) break IPs = Draw_Graph.betweeness(mode) for IP_betweeness in IPs: if operator.eq(IP, IP_betweeness[0]) == True: self.inform_Text2.config(text="介数中心性") self.inform_Text3.config(text=round(IP_betweeness[1], 5)) break IPs = Draw_Graph.degree_centrality(mode) for IP_centrality in IPs: if operator.eq(IP, IP_centrality[0]) == True: self.inform_Text4.config(text="点度中心性") self.inform_Text5.config(text=round(IP_centrality[1], 5)) break IP_list = [] IP_list.append(IP) self.inform_Text6.config(text="连接数") self.inform_Text8.config(text="小包占比") self.inform_Text7.config( text=Analyser.Node_Links_Analyse(IP_list)[0][1]) self.inform_Text9.config( text=round(Analyser.Node_Length_Analyse(IP_list)[0][1], 5))
def radCall(self): radSelect = self.radVar.get() if flag_for_section != 2: if radSelect == 0: self.control_tip.config( text="IP节点 点的度") IPs = list(Draw_Graph.degree(flag_for_section)) print(IPs) self.Rank(IPs) elif radSelect == 1: self.control_tip.config( text="IP节点 介数中心性") IPs = list(Draw_Graph.betweeness(flag_for_section)) self.Rank(IPs) elif radSelect == 2: self.control_tip.config( text="IP节点 点度中心性") IPs = list(Draw_Graph.degree_centrality(flag_for_section)) self.Rank(IPs) elif radSelect == 3: self.control_tip.config( text="IP节点 连接数") IPs = Draw_Graph.transfer_nodes() self.Rank(Analyser.Node_Links_Analyse(IPs)) elif radSelect == 4: self.control_tip.config( text="IP节点 小包占比") IPs = Draw_Graph.transfer_nodes() self.Rank(Analyser.Node_Length_Analyse(IPs)) elif flag_for_section == 2: if radSelect == 0: self.control_tip.config( text="IP节点 点的度") self.Rank( instrument.change_abnormal_dict_to_tuplelist( abnormal_IPs_detail_dict, "degree")) elif radSelect == 1: self.control_tip.config( text="IP节点 介数中心性") self.Rank( instrument.change_abnormal_dict_to_tuplelist( abnormal_IPs_detail_dict, "betweeness")) elif radSelect == 2: self.control_tip.config( text="IP节点 点度中心性") self.Rank( instrument.change_abnormal_dict_to_tuplelist( abnormal_IPs_detail_dict, "centrality")) elif radSelect == 3: self.control_tip.config( text="IP节点 连接数") self.Rank( instrument.change_abnormal_dict_to_tuplelist( abnormal_IPs_detail_dict, "link")) elif radSelect == 4: self.control_tip.config( text="IP节点 小包占比") self.Rank( instrument.change_abnormal_dict_to_tuplelist( abnormal_IPs_detail_dict, "length_prop"))
def IP_details(self, IP): mode = 0 IPs = self.graphs.degree(mode) for IP_degree in IPs: if operator.eq(IP, IP_degree[0]) == True: self.inform_Text0.config(text="度") self.inform_Text1.config(text=IP_degree[1]) break IPs = self.graphs.betweeness(mode) for IP_betweeness in IPs: if operator.eq(IP, IP_betweeness[0]) == True: self.inform_Text2.config(text="介数中心性") self.inform_Text3.config(text=round(IP_betweeness[1], 5)) break IPs = self.graphs.degree_centrality(mode) for IP_centrality in IPs: if operator.eq(IP, IP_centrality[0]) == True: self.inform_Text4.config(text="点度中心性") self.inform_Text5.config(text=round(IP_centrality[1], 5)) break IP_list = [] IP_list.append(IP) self.inform_Text6.config(text="连接数") self.inform_Text8.config(text="小包占比") self.inform_Text7.config( text=Analyser.Node_Links_Analyse(IP_list)[0][1]) self.inform_Text9.config( text=round(Analyser.Node_Length_Analyse(IP_list)[0][1], 5)) if IP in abnormal_IPs_detail_dict.keys(): self.inform_Text11.config(text=abnormal_IPs_detail_dict[IP]['dga']) self.inform_Text13.config( text=abnormal_IPs_detail_dict[IP]['flawedAmy']) else: self.inform_Text11.config(text="No") self.inform_Text13.config(text="Null")
def Load_In(): Draw_Graph.create_wholeGraph() IPs = Draw_Graph.list_subGraph() Analyser.abnormal_modes_analyse() self.Rank(IPs) self.load_in_flag = 1 self.menu_deepth = 0
def radCall(): radSelect = self.radVar.get() if self.menu_special == 0 and self.menu_deepth != 0: if radSelect == 0: #self.control_title.config(text="度排序") self.control_tip.config( text="IP节点 点的度") IPs = list(Draw_Graph.degree(0)) self.Rank(IPs) elif radSelect == 1: self.control_tip.config( text="IP节点 介数中心性") IPs = list(Draw_Graph.betweeness(0)) self.Rank(Draw_Graph.betweeness(0)) elif radSelect == 2: self.control_tip.config( text="IP节点 点度中心性") IPs = list(Draw_Graph.degree_centrality(0)) self.Rank(Draw_Graph.degree_centrality(0)) elif radSelect == 3: self.control_tip.config( text="IP节点 连接数") IPs = Draw_Graph.transfer_nodes() self.Rank(Analyser.Node_Links_Analyse(IPs)) elif radSelect == 4: self.control_tip.config( text="IP节点 小包占比") IPs = Draw_Graph.transfer_nodes() self.Rank(Analyser.Node_Length_Analyse(IPs))
def Load_In(self): #导入过程中需要为异常IP的三个数据结构写入数据 global abnormal_Links_list global abnormal_IPs_list global abnormal_IPs_detail_dict self.graphs.create_wholeGraph() #生成G图,但不生成png abnormal_IPs_Rank = Analyser.abnormal_modes_analyse( ) #执行异常IP判断程序,详见该函数.返回排序后的[(异常IP,异常数)...] self.Rank(abnormal_IPs_Rank, 0) #显示至上部列表框 abnormal_IPs_list = Analyser.read_abnormal_IP_list() #读取异常IP列表 abnormal_Links_list = Analyser.abnormal_links( abnormal_IPs_list) #读取异常IP连接列表 Links_list = instrument.change_IPs_to_Links_str( abnormal_Links_list) #将异常IP连接列表中的两个IP拼接成字符串.最终返回字符串列表 data_list = [] #存异常IP连接列表中的连接数数据,由于其和Links_list均从一个列表中顺序读取,因此是一一匹配的. for abnormal_Link in abnormal_Links_list: #取数据的过程 data_list.append(abnormal_Link[2]) Links_dic = instrument.change_list_to_dict( Links_list, data_list) #输入两个列表,将其一一匹配的保存为字典格式:{IP字符串:连接数,...} order_Links_Ranks = sorted( Links_dic.items(), key=lambda x: x[1], reverse=True) #按照连接数重新排序字典,排序保存为[(IP字符串,连接数)...] self.Rank(order_Links_Ranks, 1) #显示至下部列表框 self.load_in_flag = 1 #导入完毕,标记置1 # 生成异常IP详细字典,由于需要用到graph的相关指标,输入参数需要graphs对象和异常IP的列表,具体过程详见函数部分 abnormal_IPs_detail_dict = instrument.create_abnormal_IPs_dict( self.graphs, abnormal_IPs_list) self.flag_for_section = 4 #导入后默认为异常IP分析模块
def create_abnormal_IPs_dict(graphs,IPs_list): abnormal_IPs_detail={} abnormal_IPs_detail_dict={} mode = 0 for IP in IPs_list: IPs = graphs.degree(mode) for IP_degree in IPs: if operator.eq(IP, IP_degree[0]) == True: abnormal_IPs_detail["degree"]=IP_degree[1] break IPs = graphs.betweeness(mode) for IP_betweeness in IPs: if operator.eq(IP, IP_betweeness[0]) == True: abnormal_IPs_detail["betweeness"]=round(IP_betweeness[1], 5) break IPs = graphs.degree_centrality(mode) for IP_centrality in IPs: if operator.eq(IP, IP_centrality[0]) == True: abnormal_IPs_detail["centrality"]=round(IP_centrality[1], 5) break IP_list = [] IP_list.append(IP) abnormal_IPs_detail["link"]=Analyser.Node_Links_Analyse(IP_list)[0][1] abnormal_IPs_detail["length_prop"]=round(Analyser.Node_Length_Analyse(IP_list)[0][1], 5) sql = "select has_dga,Necurs_FlawedAmy from abnormal_list where IP='" + IP + "'" results = Analyser.get_data(sql) if results[0][0] == 'YES': abnormal_IPs_detail["dga"] = 'Yes' else: abnormal_IPs_detail["dga"] = 'No' abnormal_IPs_detail["flawedAmy"]=results[0][1] abnormal_IPs_detail["suspicious_degree"]=Analyser.read_abnormal_IP_dict()[IP] abnormal_IPs_detail_dict[IP]=abnormal_IPs_detail.copy() return abnormal_IPs_detail_dict
def diverter(data, choice, filereports): q1 = data if (choice == "all"): aan.analyse(data, filereports) exit() else: an.analyse(data, choice, filereports) exit()
def Link_details(self, IP1, IP2): self.inform_Text1.config(text=Analyser.Link_Connection(IP1, IP2)) String = Analyser.Link_Length_Analyse(IP1, IP2) self.inform_Text3.config(text=String[0]) self.inform_Text5.config(text=String[1]) String = Analyser.Link_Protocol_Analyse(IP1, IP2) self.inform_Text7.config(text=String[0]) self.inform_Text9.config(text=String[1])
def draw_abnormal_SingleIP(IP): #to-do global sql, L, J L.clear() J.clear() abnormal_IPs_detail_dict = instrument.create_abnormal_IPs_dict( abnormal_IP_list) sql = "select * from Link where IP1='" + IP + "'or IP2='" + IP + "'" results = Analyser.get_data(sql) i = 0 for r in results: J.add_edge(r[1], r[2], sty=5, width=1) if r[1] in abnormal_IP_list and r[2] in abnormal_IP_list: role = abnormal_IPs_detail_dict[r[1]]['flawedAmy'] Amy_Attribution_dict = Analyser.Necurs_flaw_Amy_Attribution( r[1], role) Amy_Attribution_list = [ 'download_batch_1', 'download_batch_2', 'download_prefile', 'download_file' ] label = 1 for Amy_Attribution in Amy_Attribution_list: int_packet_length = int(Amy_Attribution_dict[Amy_Attribution]) if int_packet_length != 0: if role == 'C&C Server': L.add_edge(r[1], r[2], sty=label, width=1) elif role == 'Infected': L.add_edge(r[2], r[1], sty=label, width=1) label += 1 elif not (r[1] in abnormal_IP_list and r[2] in abnormal_IP_list): L.add_edge(r[1], r[2], sty=5, width=1) i = i + 1 pos = nx.spring_layout(L, threshold=0.2) ax = plt.gca() draw_MultipleLine(L, pos, ax) ax.autoscale() color_example = [] red_patch = mpatches.Patch(color='red', label='DNS') color_example.append(red_patch) blue_patch = mpatches.Patch(color='blue', label='HTTP') color_example.append(blue_patch) black_patch = mpatches.Patch(color='black', label='HTTPS') color_example.append(black_patch) yellow_patch = mpatches.Patch(color='yellow', label='TCP') color_example.append(yellow_patch) green_patch = mpatches.Patch(color='green', label='UDP') color_example.append(green_patch) plt.legend(handles=color_example) nx.draw_networkx_labels(L, pos, font_size=5, font_family='sans-serif') nodes = list(L.nodes) plt.axis('equal') plt.axis('off') plt.savefig("test.png") plt.clf() plt.close('all') return nodes
def diverter(data, choice): q1 = data if (choice == "all"): aan.analyse(data) else: an.analyse(data, choice) print(" Analyser ki vachindi")
def Link_details(self, IP1, IP2): self.spe_inform_Text1.config(text=Analyser.Link_Connection(IP1, IP2)) String = Analyser.Link_Length_Analyse( IP1, IP2) #详见对应函数,返回得到String列表,分别为小包,大包占比 self.spe_inform_Text3.config(text=String[0]) self.spe_inform_Text5.config(text=String[1]) String = Analyser.Link_Protocol_Analyse( IP1, IP2) #同上,返回各类协议占比,这里String列表长度不止2 self.spe_inform_Text7.config(text=String[0]) self.spe_inform_Text9.config(text=String[1])
def allPack(script_folder, static, dynamic, srcBasePath, crxBasePath): """TODO: Docstring for allPack. :db_path: TODO :script_folder: TODO :returns: TODO """ # __import__('pdb').set_trace() # XXX BREAKPOINT # eList = db.select("select * from extensionTable where downloadStatus = 1") eList = select(e for e in Extension if e.downloadStatus == 1) for e in eList: crxPath = os.path.join(crxBasePath, "{0}.crx".format(e.extensionId)) if not os.path.exists(e.srcPath): logger.info("{0} extension src code not exists".format(e.srcPath)) continue # ds = ["downloadStatus", "userNum"] # for d in ds: # # if e.has_key(d): # if d in e: # e.pop(d) logger.info("Start to analyse extension{0}, dbId:{1}".format( e.extensionId, e.id)) Analyser.detect_background_scripts(e) Analyser.detect_content_scripts(e) if static: Analyser.static_detect_javascript_in_html(e, script_folder) if dynamic: Analyser.dynamic_detect_javascript_in_html(e, script_folder) logger.info("Extension{0} analysed".format(e.extensionId))
def draw_normal_SingleIP(self, IP): self.J.clear() sql = "select * from Link where IP1='" + IP + "'or IP2='" + IP + "'" results = Analyser.get_data(sql) i = 0 weight = [] for r in results: self.J.add_edge(r[1], r[2], weight=r[3]) i = i + 1 edgelists = [(u, v, d['weight']) for (u, v, d) in self.J.edges(data=True)] for edgelist in edgelists: row_weight = edgelist[2] weight.append(math.sqrt(row_weight / 10) + 1) color = self.decide_node_color(self.J) pos = nx.spring_layout(self.J, k=1) nx.draw_networkx_nodes(self.J, pos, node_size=100, node_color=color) nx.draw_networkx_edges(self.J, pos, edgelists, width=weight) nx.draw_networkx_labels(self.J, pos, font_size=5, font_family='sans-serif') nodes = list(self.J.nodes) plt.axis('off') # plt.show() plt.savefig("test.png") plt.clf() return nodes
def Func_One_Select(): if var.get()==1: label_textbox.config(text="IP节点 点的度") Rank(Draw_Graph.degree()) elif var.get()==2: label_textbox.config(text="IP节点 介数中心性") Rank(Draw_Graph.betweeness()) elif var.get()==3: label_textbox.config(text="IP节点 点度中心性") Rank(Draw_Graph.degree_centrality()) elif var.get()==4: label_textbox.config(text="IP节点 连接数") Rank(Analyser.Node_Links_Analyse(IPs)) elif var.get()==5: label_textbox.config(text="IP节点 小包占比") Rank(Analyser.Node_Length_Analyse(IPs))
def create_wholeGraph(self): sql = "select * from Link" results = Analyser.get_data(sql) i = 0 for r in results: self.G.add_edge(r[1], r[2], weight=r[3]) i = i + 1
def draw_all(): global G, sql, J sql = "select * from link" results = Analyser.get_data(sql) i = 0 weight = [] for r in results: G.add_edge(r[1], r[2], weight=r[3]) i = i + 1 edgelists = [(u, v, d['weight']) for (u, v, d) in G.edges(data=True)] for edgelist in edgelists: row_weight = edgelist[2] weight.append(math.sqrt(row_weight / 10) + 1) color = decide_node_color(G) pos = nx.spring_layout(G, k=1) # positions for all nodes nx.draw_networkx_nodes(G, pos, node_size=200, node_color=color) nx.draw_networkx_edges(G, pos, edgelists, width=weight) nx.draw_networkx_labels(G, pos, font_size=5, font_family='sans-serif') nodes = list(G.nodes) J = G.copy() #visualize(G) plt.axis('off') plt.savefig("test.png") plt.clf() return nodes
def draw(): #建立一个空的无向图G global G,sql,J sql="select * from Link" results=Analyser.get_data(sql) i=0 for r in results: weight.append(math.sqrt(r[3]/10)+1) G.add_edge(r[1],r[2],weight=r[3]) i=i+1 sublist=list(nx.connected_components(G)) H=G.subgraph(list(sublist[0])) pos = nx.spring_layout(H, k=1) # positions for all nodes nx.draw_networkx_nodes(H, pos, node_size=200) nx.draw_networkx_edges(H, pos, width=weight) nx.draw_networkx_labels(H, pos, font_size=5, font_family='sans-serif') plt.axis('off') plt.savefig("test.png") plt.clf() nodes=[] sub_number=0 while sub_number < sublist.__len__(): H = G.subgraph(list(sublist[sub_number])) J=H.copy() IPs=list(degree(0)) sub_number+=1 nodes.append(IPs[0]) return nodes
def infobox(se, query): """ infobox(search_engine, query) The function to generate infobox for the query """ json = se.get_search_result(query) if json is None: print 'Error in search' return info_list = None for element in json: topic_id = element['mid'] topic = se.get_topic_result(topic_id) if topic is None: continue # Parse and analyze the topic # Get out of the loop if the topic is valid # Otherwise, continue to check the next topic title = query.title() info_list = Analyser.build_infobox(topic, title) if len(info_list) > 0: # Nonempty result # print info_list break if info_list is not None and len(info_list) > 0: Display.draw_infobox(info_list) else: print 'There is no result for \'' + query + '\'!'
def Special_mode_DH(): if self.load_in_flag == 1: self.Rank(Analyser.DNS_HTTP_Mode()) self.menu_special = 2 self.control_title.config(text="DNS-HTTP模式") self.control_tip.config( text='询问方IP DNS服务器 答复IP 询问与答复IP通讯数量')
def Wrong_Matched(): if self.load_in_flag == 1: self.Rank(Analyser.Wrong_Matched_Port()) self.menu_special = 1 self.control_title.config(text="端口协议不匹配") self.control_tip.config( text= 'IP节点对 协议-端口 数量')
def draw_SingleIP(self,IP): global abnormal_IP_list abnormal_IP_list = Analyser.read_abnormal_IP_list() if not IP in abnormal_IP_list: nodes = self.draw_normal_SingleIP(IP) elif IP in abnormal_IP_list: nodes = self.draw_abnormal_SingleIP(IP) return nodes
def draw_SingleIP(self, IP): global abnormal_IP_list #需要异常IP列表来判断使用不同的图J或L abnormal_IP_list = Analyser.read_abnormal_IP_list() #从Analyser读异常IP列表 if not IP in abnormal_IP_list: nodes = self.draw_normal_SingleIP(IP) #画正常IP elif IP in abnormal_IP_list: nodes = self.draw_abnormal_SingleIP(IP) #画异常IP return nodes #返回节点列表
def draw_initial_graph(): global IPs IPs = list(Draw_Graph.draw()) img = Image.open('test.png') # 打开图片 photo = ImageTk.PhotoImage(img) # 用PIL模块的PhotoImage打开 label_photo.config(image=photo) label_photo.image = photo Rank(Analyser.Node_Links_Analyse(IPs))
def Choose_Ip(): string=str(listbox_main.get(listbox_main.curselection())) IP=string.split(' ',1) IPs=Draw_Graph.draw_SingleIP(IP[0]) img = Image.open('test.png') # 打开图片 photo = ImageTk.PhotoImage(img) # 用PIL模块的PhotoImage打开 label_photo.config(image=photo) label_photo.image = photo label_textbox.config(text="IP节点 连接数") Rank(Analyser.Node_Links_Analyse(IPs))
def IP_details(self, IP): mode = 0 #nx的相关指标在全图或连通图内才有意义,暂设为全图下的统计.该参数使IP对指标的分析不受当前显示的图的影响.总基于全图. IPs = self.graphs.degree( mode) #由于nx无法分析单个指定IP,因此先拉出所有点的相关信息,再从相应的元组列表中找到该IP及对应数据 for IP_degree in IPs: if operator.eq( IP, IP_degree[0] ) == True: #字符串相等,似乎可以直接用'==',IP_degree是元组(IP,data),下同 self.inform_Text0.config(text="度") self.inform_Text1.config(text=IP_degree[1]) break IPs = self.graphs.betweeness(mode) for IP_betweeness in IPs: if operator.eq(IP, IP_betweeness[0]) == True: self.inform_Text2.config(text="介数中心性") self.inform_Text3.config(text=round(IP_betweeness[1], 5)) break IPs = self.graphs.degree_centrality(mode) for IP_centrality in IPs: if operator.eq(IP, IP_centrality[0]) == True: self.inform_Text4.config(text="点度中心性") self.inform_Text5.config(text=round(IP_centrality[1], 5)) break IP_list = [] #由于Analyser的两个函数为了支持批量处理,输入IP都是以列表的方式,因此构造一个仅一项的列表,便于调用函数 IP_list.append(IP) self.inform_Text6.config(text="连接数") self.inform_Text8.config(text="小包占比") self.inform_Text7.config(text=Analyser.Node_Links_Analyse(IP_list)[0] [1]) #具体详见Analyser中的函数注释 self.inform_Text9.config(text=round( Analyser.Node_Length_Analyse(IP_list)[0][1], 5)) #round函数控制小数点位数 #异常属性判断 if IP in abnormal_IPs_detail_dict.keys( ): #其实这里用abnormal_list来判断更简洁,因为dict的keys就是前者,懒得改了 #分别取IP的各类异常状态 self.inform_Text11.config(text=abnormal_IPs_detail_dict[IP]['dga']) self.inform_Text13.config( text=abnormal_IPs_detail_dict[IP]['flawedAmy']) else: #正常的IP一定没有异常属性 self.inform_Text11.config(text="No") self.inform_Text13.config(text="Null")
def do_predict(): try: jd = request.form["jd"] jd = urllib.unquote(jd) X = A.get_single_X(jd) if len(X.nonzero()[0]) < 6: return "信息量太少,无法预测——0" salary = clf.predict(X) print(salary[0]) except Exception, e: print(e)
def Load_In(self): global abnormal_Links_list global abnormal_IPs_list global abnormal_IPs_detail_dict global flag_for_section Draw_Graph.create_wholeGraph() abnormal_IPs_Rank = Analyser.abnormal_modes_analyse() self.Rank(abnormal_IPs_Rank,0) abnormal_IPs_list=Analyser.read_abnormal_IPs() abnormal_Links_list=Analyser.abnormal_links(abnormal_IPs_list) Links_list=instrument.change_IPs_to_Links_str(abnormal_Links_list) data_list=[] for abnormal_Link in abnormal_Links_list: data_list.append(abnormal_Link[2]) Links_dic=instrument.change_list_to_dict(Links_list,data_list) order_Links_Ranks = sorted(Links_dic.items(), key=lambda x: x[1], reverse=True) self.Rank(order_Links_Ranks,1) self.load_in_flag = 1 abnormal_IPs_detail_dict=instrument.create_abnormal_IPs_dict(abnormal_IPs_list) flag_for_section=2
def do_predict(): try: jd = request.form['jd'] jd = urllib.unquote(jd) X = A.get_single_X(jd) if len(X.nonzero()[0]) < 6: return "信息量太少,无法预测——0" salary = clf.predict(X) print(salary[0]) except Exception, e: print(e)
def create_abnormal_IPs_dict(graphs, IPs_list): abnormal_IPs_detail = {} #单个IP的字典 abnormal_IPs_detail_dict = {} #字典的字典 mode = 0 #nx三大指标基于G for IP in IPs_list: IPs = graphs.degree(mode) for IP_degree in IPs: if operator.eq(IP, IP_degree[0]) == True: abnormal_IPs_detail["degree"] = IP_degree[1] break IPs = graphs.betweeness(mode) for IP_betweeness in IPs: if operator.eq(IP, IP_betweeness[0]) == True: abnormal_IPs_detail["betweeness"] = round(IP_betweeness[1], 5) break IPs = graphs.degree_centrality(mode) for IP_centrality in IPs: if operator.eq(IP, IP_centrality[0]) == True: abnormal_IPs_detail["centrality"] = round(IP_centrality[1], 5) break IP_list = [] IP_list.append(IP) #写两个统计分析数据,由于Analyser相关函数需要输入列表,因此以IP_list输入 abnormal_IPs_detail["link"] = Analyser.Node_Links_Analyse( IP_list)[0][1] abnormal_IPs_detail["length_prop"] = round( Analyser.Node_Length_Analyse(IP_list)[0][1], 5) #查询异常IP在数据库中相关特征的记录,并填充字典 sql = "select has_dga,Necurs_FlawedAmy from abnormal_list where IP='" + IP + "'" results = Analyser.get_data(sql) if results[0][0] == 'YES': abnormal_IPs_detail["dga"] = 'Yes' else: abnormal_IPs_detail["dga"] = 'No' abnormal_IPs_detail["flawedAmy"] = results[0][1] #读取异常IP怀疑度 abnormal_IPs_detail[ "suspicious_degree"] = Analyser.read_abnormal_IP_dict()[IP] #创建字典的字典,key是IP,value是字典 abnormal_IPs_detail_dict[IP] = abnormal_IPs_detail.copy() return abnormal_IPs_detail_dict
def solve(self, resultsSearches, resultsCost): location_x = np.random.randint(self.rowSize) location_y = np.random.randint(self.columnSize) while not self.targetFound: self.numberOfSteps += 1 landscapeProbab = self.probOfLandScapes[ self.matrix[location_x][location_y] - 1] #negative probab if location_x == self.target_x and location_y == self.target_y and np.random.uniform( ) > landscapeProbab: print("Target found at ", location_x, location_y) print("Number of Steps: ", self.numberOfSteps) print("landscape : ", self.landScapeType) self.targetFound = True resultsSearches[self.landScapeType] += self.numberOfSteps Utility.resetProbabilityDistribution( self.probabilityDistribution, self.rowSize, self.columnSize) return Analyser(self.matrix, self.rowSize, self.columnSize, self.ruleType, self.isStationary, self.numberOfSteps, self.distanceTravelled, self.trials, self.landScapeType) else: #Bayessian updating after the target is not found Utility.updateProbabilities( self.probabilityDistribution, landscapeProbab, self.rowSize, self.columnSize, location_x, location_y, self.grid, self.rowWindowSize, self.colWindowSize, self.textMatrix) self.transitions = [] Utility.makeTransitions(self.rowSize, self.columnSize, location_x, location_y, self.matrix, self.transitions) Utility.updateMovementProbability(self.rowSize, self.columnSize, self.transitions, self.probabilityDistribution, self.matrix, self.temporaryProbability) print("*&&&&&&&&&&&&&&&&&& transition print begins") print(self.probabilityDistribution) print(self.transitions) print("*&&&&&&&&&&&&&&&&&& transition print ends") if self.ruleType == 'Rule 1': location_x, location_y, d = Utility.getProbabContainTarget( self.probabilityDistribution, self.rowSize, self.columnSize, location_x, location_y) resultsCost[self.landScapeType] += d elif self.ruleType == 'Rule 2': location_x, location_y, d = Utility.getProbabFindingTarget( self.probabilityDistribution, self.probOfLandScapes, self.matrix, self.rowSize, self.columnSize, location_x, location_y) resultsCost[self.landScapeType] += d
def menu_select(): if self.menu_special == 0: if self.Section_for_Analyse == 1 or self.Section_for_Analyse == 0: if self.menu_deepth == 0: self.subgraph_num = self.ip_listbox.curselection()[0] self.control_title.config( text="第" + str(self.subgraph_num + 1) + "连通子图") Draw_Graph.change_subgraph(self.subgraph_num) self.Load_img('test.png') IPs = list(Draw_Graph.degree(0)) self.Rank(IPs) self.menu_deepth = 1 elif self.menu_deepth == 1 or self.menu_deepth == 2: string = str( self.ip_listbox.get( self.ip_listbox.curselection()[0])) string_part = string.split() nodes = Draw_Graph.draw_SingleIP(string_part[0]) self.Load_img('test.png') IPs = Analyser.Node_Links_Analyse(nodes) self.Rank(IPs) self.control_title.config(text=string_part[0]) self.control_tip.config( text="IP节点 连接数") self.IP_details(string_part[0]) self.menu_deepth = 2 elif self.Section_for_Analyse == 2: string = str( self.ip_listbox.get(self.ip_listbox.curselection()[0])) IP = re.split(r"->| ", string) Draw_Graph.draw_Line(IP[0], IP[1]) self.Load_img('Line.png') ''' protocols=Analyser.Link_Protocol_Analyse(IP[0], IP[1],0) protocol_proper=[] for protocol in protocols: string = str(protocol) IP = re.split(r"/|", string) protocol_proper.append(IP[1]) Draw_Graph_0_0_1.draw_pie(protocol_proper) self.img = Image.open('pie.png') # 打开图片 self.photo = ImageTk.PhotoImage(self.img) # 用PIL模块的PhotoImage打开 self.Label_Image.config(image=self.photo) self.Label_Image.image = self.photo ''' elif self.menu_special == 1: self.Choose_Ip_wrong_matched() self.menu_special = 0 self.menu_deepth = 1 elif self.menu_special == 2: self.DH_Mode() self.menu_special = 0 self.menu_deepth = 1
#!/usr/bin/python # -*- coding:utf-8 -*- import os import sqlite3 from flask import Flask, request, session, g, redirect, url_for, abort, render_template, flash import requests import urllib import Analyser as A clf = A.get_clf() app = Flask(__name__) @app.route("/") def index(): return render_template("predict.html") @app.route("/predict/", methods=["GET", "POST"]) def predict(): if request.method == "POST": return render_template("predict.html", jd=request.form["jd"]) if request.method == "GET": return render_template("predict.html") @app.route("/do_predict/", methods=["POST"]) def do_predict():