def main(tday,index, gte, lte, aggs_name, timestamp,serverNum,dport,time_zone): mylog = blacklist_tools.getlog() path=parser_config.get_store_path()[1]+str(tday)+os.path.sep cnt=0 while(cnt<8): if(os.path.exists(path)): filelist=get_all_file(path) break elif cnt==7: #default file path dflg, defaultpath = parser_config.get_self_filelist('defaultlist') if(dflg==1): filelist = get_all_file(defaultpath) else: filelist=[] break else: # check last 7 days file lday=tday+datetime.timedelta(-1) path = parser_config.get_store_path()[1] + str(lday) + os.path.sep cnt=cnt+1 # mylog.warning('no path!') # filelist=[] #get es list es = ESclient(server =serverNum,port=dport) # mylog.info('connected with es') ip_es_list = es.get_es_ip(index,gte,lte,aggs_name,time_zone) # mylog.info('get es data,data size:%d'%len(ip_es_list)) if(filelist): try: #check each file and insert match results checkAndInsert(path,filelist,ip_es_list,index,aggs_name,timestamp,serverNum,dport) except Exception, e: mylog.error(e)
def main(tday, index, gte, lte, aggs_name, timestamp, serverNum, dport, time_zone, querys_str): mylog = set_logger() path = parser_config.get_store_path() + str(tday) + os.path.sep cnt = 0 allThreatIP = {} # 有问题的dip # 不联网情况下,尝试使用过去7天的数据检查 while (cnt < 8): if (cnt < 7 and os.path.exists(path)): filelist = get_all_file(path) if (not filelist): # 目录没有文件则同样检查前一天的数据 lday = tday + datetime.timedelta(-1) path = parser_config.get_store_path() + str(lday) + os.path.sep cnt = cnt + 1 continue else: break elif (cnt == 7 or not os.path.exists(path)): #default file path,达到7天或当前目录不存在 dflg, defaultpath = parser_config.get_self_filelist('defaultlist') if (dflg == 1): filelist = get_all_file(defaultpath) path = defaultpath else: filelist = [] break else: # check last 7 days file lday = tday + datetime.timedelta(-1) path = parser_config.get_store_path() + str(lday) + os.path.sep cnt = cnt + 1 #get es list es = ESclient(server=serverNum, port=dport) # mylog.info('connected with es') ip_es_list = es.get_es_ip(index, gte, lte, aggs_name, time_zone, querys_str) mylog.debug('[mal_ip] ES data size:%d ' % len(ip_es_list)) # 检查下载的网络情报 if (filelist): try: #check each file and insert match results tmpThreatIP = checkAndInsert(path, filelist, ip_es_list, index, aggs_name, timestamp, serverNum, dport) # mylog.info('main_insert Threat_ip size:{}'.format(len(tmpThreatIP))) if (tmpThreatIP): allThreatIP = dict(allThreatIP, **tmpThreatIP) except Exception, e: mylog.error('[mal_ip] Check blacklist error:{}'.format(e))
def new_run(entertime,delta,serverNum,dport,indx='tcp-*',aggs_name='dip',): # new running procedure updatetime=datetime.datetime.now() startTime = entertime # beginTime = datetime.datetime.strptime(begin, '%Y-%m-%d %H:%M:%S') # flgnum is the running times per day flgnum=0 # get format: "yy-mm-dd" tday=datetime.datetime.now().date() # runtime=0 # elapsed time of whole process,included check and merge mylog=blacklist_tools.getlog() while True: if(tday!=datetime.datetime.now().date()): flgnum=0 # reset flgnum per day tday=datetime.datetime.now().date() dirpath = parser_config.get_store_path()[1] + str(tday) + os.path.sep os.mkdir(dirpath) while datetime.datetime.now() < startTime: #print('time sleep...') mylog.info("time sleep...") time.sleep((startTime-datetime.datetime.now()).total_seconds()) try: # st=time.clock() #update source dataset if(datetime.datetime.now()>updatetime): update_blacklist.main(tday,flgnum) updatetime=updatetime+delta # check interval time is 5mins checkES(startTime,indx,aggs_name,serverNum,dport,tday) startTime = startTime + delta flgnum+=1 # runtime=time.clock()-st# get the time of whole process except Exception, e: # print e mylog.error(e)
def main(tday, flgnum): mylog = set_logger() #print("Starting update command."), time.ctime() mylog.info("[mal_ip] Starting update command.") # dirpath=".\data\\%s\\"%tday dirpath = parser_config.get_store_path() + str(tday) + os.path.sep if (not os.path.exists(dirpath)): os.mkdir(dirpath) update_blacklist_module(flgnum) # print("update finish."), time.ctime() mylog.info("[mal_ip] Update finish.")
def new_run(entertime, delta, serverNum, dport, offset, querys, indx='tcp-*', aggs_name='dip'): # new running procedure updatetime = datetime.datetime.now() startTime = entertime # beginTime = datetime.datetime.strptime(begin, '%Y-%m-%d %H:%M:%S') # flgnum is the running times per day flgnum = 0 # get format: "yy-mm-dd" tday = datetime.datetime.now().date() # runtime=0 # elapsed time of whole process,included check and merge mylog = blacklist_tools.getlog() updateFlg = parser_config.update_flg() # while True: if (tday != datetime.datetime.now().date()): flgnum = 0 # reset flgnum per day tday = datetime.datetime.now().date() dirpath = parser_config.get_store_path()[1] + str( tday) + os.path.sep os.mkdir(dirpath) while datetime.datetime.now() < startTime: #print('time sleep...') mylog.info("Time sleeping ...") time.sleep((startTime - datetime.datetime.now()).total_seconds()) try: # st=time.clock() #update source dataset if (updateFlg == 1): if (datetime.datetime.now() > updatetime): update_blacklist.main(tday, flgnum) updatetime = updatetime + delta # check interval time is 5mins all_IP = checkES(startTime, indx, aggs_name, serverNum, dport, tday, offset, querys) #IP second check for C&C flg_C2 = parser_config.get_ip_secondcheck() if (flg_C2 == 1): # 从info中IP检查,提升为warn mylog.info('all_IP size:{}'.format(len(all_IP))) ip_check_C2.main(startTime, all_IP, serverNum, dport, indx) startTime = startTime + delta flgnum += 1 # runtime=time.clock()-st# get the time of whole process except Exception, e: # print e mylog.error(e)
def main(tday,index, gte, lte, aggs_name, timestamp,serverNum,dport): path=parser_config.get_store_path()[1]+str(tday)+'\\' filelist=get_all_file(path) #get es list es = ESclient(server =serverNum,port=dport) ip_es_list = es.get_es_ip(index,gte,lte,aggs_name) #check each file for fname in filelist: fpath=path+fname dataset=load_dict(fpath) #get match result fullmatch,segmentmatch=treatip(dataset,ip_es_list) insert_result(index,aggs_name,timestamp,serverNum,dport,fullmatch,segmentmatch,dataset)
def store_json(dict,name): ''' 保存为json ''' mylog=blacklist_tools.getlog() tday = datetime.datetime.now().date() file_name = name+ '.json' savepath=parser_config.get_store_path()[1]+str(tday)+os.path.sep+file_name try: with open(savepath,'w') as f: f.write(json.dumps(dict)) except IOError: # print 'store_json Error' mylog.warning('change date time!download again!')
def subnet_lpm(subnet, es_ip): lpm.init() sndict = {} fpath = parser_config.get_store_path()[1] sn_lte16 = {} ip_subnet = subnet.keys() for sn in ip_subnet: subnet_split = sn.split('/') ip_num = ip_split_num(subnet_split[0]) netMask = int(subnet_split[1]) if (sn == '192.168.0.0/16' or sn == '172.16.0.0/12' or sn == '10.0.0.0/8'): #略过私网 continue # return 'False' elif (netMask < 16): #暂时不处理 sn_lte16[sn] = subnet[sn] # return 'False' elif (netMask == 16): newip1 = [] ip_num[2] = ip_num[2] | 1 newip1.append(str(ip_num[0])) newip1.append(str(ip_num[1])) newip1.append('*') newip1.append('*') ipstr1 = '.'.join(newip1) lpm.insert_rule(ipstr1) elif (netMask == 23): newip1 = [] ip_num[2] = ip_num[2] | 1 newip1.append(str(ip_num[0])) newip1.append(str(ip_num[1])) newip1.append(str(ip_num[2])) newip1.append('*') ipstr1 = '.'.join(newip1) lpm.insert_rule(ipstr1) newip2 = [] ip_num[2] = ip_num[2] & 0 newip2.append(str(ip_num[0])) newip2.append(str(ip_num[1])) newip2.append(str(ip_num[2])) newip2.append('*') ipstr2 = '.'.join(newip2) lpm.insert_rule(ipstr2) elif (netMask == 25 or netMask == 24): #/25当/24处理 newip1 = [] newip1.append(str(ip_num[0])) newip1.append(str(ip_num[1])) newip1.append(str(ip_num[2])) newip1.append('*') ipstr1 = '.'.join(newip1) lpm.insert_rule(ipstr1) else: #netMask>16 and not in [16,3,24,25],save them sndict[sn] = subnet[sn] saveToJSON(sndict, fpath, "remain_subnet") saveToJSON(sn_lte16, fpath, 'lte16_subnet') #match subnet_result = [] for ips in es_ip: ip_es_num = socket.ntohl( struct.unpack("I", socket.inet_aton(str(ips)))[0]) if (lpm.search_ip(ip_es_num)): subnet_result.append({ips: 'subnet_lpm'}) return subnet_result, sndict, sn_lte16
def subnet_lpm(subnet, es_ip): mylog = blacklist_tools.getlog() lpm.init() sndict = {} fpath = parser_config.get_store_path()[1] sn_lte16 = {} lpmdict = {} sn_gte24 = {} ip_subnet = subnet.keys() for sn in ip_subnet: subnet_split = sn.split('/') ip_num = ip_split_num(subnet_split[0]) netMask = int(subnet_split[1]) if (sn == '192.168.0.0/16' or sn == '172.16.0.0/12' or sn == '10.0.0.0/8'): #略过私网 continue # return 'False' elif (netMask < 16): #暂时不处理 sn_lte16[sn] = subnet[sn] # return 'False' elif (netMask == 16): lpmdict[sn] = subnet[sn] newip1 = [] ip_num[2] = ip_num[2] | 1 newip1.append(str(ip_num[0])) newip1.append(str(ip_num[1])) newip1.append('*') newip1.append('*') ipstr1 = '.'.join(newip1) lpm.insert_rule(ipstr1) elif (netMask >= 21 and netMask <= 24): lpmdict[sn] = subnet[sn] idx = pow(2, 24 - netMask) - 1 # print idx ip_base = ip_num[2] & (255 - idx) i = 0 while (i <= idx): newip1 = [] ipstr1 = '' ip_num[2] = ip_base + i newip1.append(str(ip_num[0])) newip1.append(str(ip_num[1])) newip1.append(str(ip_num[2])) newip1.append('*') ipstr1 = '.'.join(newip1) # print ipstr1 lpm.insert_rule(ipstr1) i = i + 1 # elif(netMask==24): # #/25当/24处理 # lpmdict[sn] = subnet[sn] # newip1 = [] # newip1.append(str(ip_num[0])) # newip1.append(str(ip_num[1])) # newip1.append(str(ip_num[2])) # newip1.append('*') # ipstr1 = '.'.join(newip1) # lpm.insert_rule(ipstr1) elif (netMask > 24): # range match sn_gte24[sn] = subnet[sn] else: #netMask>16 and netMask<21,save them sndict[sn] = subnet[sn] mylog.info('lpm data size: %d' % len(lpmdict)) mylog.info('remaining subnet size:%d' % len(sndict)) mylog.info('lte16 size:%d' % len(sn_lte16)) mylog.info('gte24 size:%d' % len(sn_gte24)) #save snpath = getsavepath(fpath, 'remaining_subnet') ltepath = getsavepath(fpath, 'lte16_subnet') lpmpath = getsavepath(fpath, 'lpm_subnet_data') gtepath = getsavepath(fpath, 'gte24_subnet') if (sndict): if (os.path.exists(snpath)): newsndict = blacklist_tools.load_dict(snpath) newsndict1 = dict(newsndict, **sndict) #merge saveToJSON(newsndict1, fpath, "remaining_subnet") else: saveToJSON(sndict, fpath, "remaining_subnet") if (sn_lte16): if (os.path.exists(ltepath)): newlte = blacklist_tools.load_dict(ltepath) newlte16 = dict(newlte, **sn_lte16) #merge saveToJSON(newlte16, fpath, 'lte16_subnet') else: saveToJSON(sn_lte16, fpath, 'lte16_subnet') if (lpmdict): if (os.path.exists(lpmpath)): newlpmdict = blacklist_tools.load_dict(lpmpath) newlpmdict1 = dict(newlpmdict, **lpmdict) #merge saveToJSON(newlpmdict1, fpath, 'lpm_subnet_data') else: saveToJSON(lpmdict, fpath, 'lpm_subnet_data') if (sn_gte24): if (os.path.exists(gtepath)): newlpmdict = blacklist_tools.load_dict(gtepath) newlpmdict1 = dict(newlpmdict, **sn_gte24) #merge saveToJSON(newlpmdict1, fpath, 'gte24_subnet') else: saveToJSON(sn_gte24, fpath, 'gte24_subnet') sn_gte24 = dict(sn_gte24, **sndict) # merge #match subnet_result = [] for ips in es_ip: ip_es_num = socket.ntohl( struct.unpack("I", socket.inet_aton(str(ips)))[0]) if (lpm.search_ip(ip_es_num)): subnet_result.append({ips: 'subnet_lpm_match'}) return subnet_result, sndict, sn_lte16, sn_gte24
def subnet_lpm(subnet, es_ip): mylog = blacklist_tools.getlog() lpm.init() sndict = {} fpath = parser_config.get_store_path()[1] sn_lte16 = {} lpmdict = {} ip_subnet = subnet.keys() for sn in ip_subnet: subnet_split = sn.split('/') ip_num = ip_split_num(subnet_split[0]) netMask = int(subnet_split[1]) if (sn == '192.168.0.0/16' or sn == '172.16.0.0/12' or sn == '10.0.0.0/8'): #略过私网 continue # return 'False' elif (netMask < 16): #暂时不处理 sn_lte16[sn] = subnet[sn] # return 'False' elif (netMask == 16): lpmdict[sn] = subnet[sn] newip1 = [] ip_num[2] = ip_num[2] | 1 newip1.append(str(ip_num[0])) newip1.append(str(ip_num[1])) newip1.append('*') newip1.append('*') ipstr1 = '.'.join(newip1) lpm.insert_rule(ipstr1) elif (netMask == 23): lpmdict[sn] = subnet[sn] newip1 = [] ip_num[2] = ip_num[2] | 1 newip1.append(str(ip_num[0])) newip1.append(str(ip_num[1])) newip1.append(str(ip_num[2])) newip1.append('*') ipstr1 = '.'.join(newip1) lpm.insert_rule(ipstr1) newip2 = [] ip_num[2] = ip_num[2] & 254 newip2.append(str(ip_num[0])) newip2.append(str(ip_num[1])) newip2.append(str(ip_num[2])) newip2.append('*') ipstr2 = '.'.join(newip2) lpm.insert_rule(ipstr2) elif (netMask == 25 or netMask == 24): #/25当/24处理 lpmdict[sn] = subnet[sn] newip1 = [] newip1.append(str(ip_num[0])) newip1.append(str(ip_num[1])) newip1.append(str(ip_num[2])) newip1.append('*') ipstr1 = '.'.join(newip1) lpm.insert_rule(ipstr1) else: #netMask>16 and not in [16,23,24,25],save them sndict[sn] = subnet[sn] mylog.info('lpm data size: %d' % len(lpmdict)) #save snpath = getsavepath(fpath, 'remain_subnet') ltepath = getsavepath(fpath, 'lte16_subnet') lpmpath = getsavepath(fpath, 'lpm_subnet_data') if (not sndict): if (os.path.exists(snpath)): newsndict = blacklist_tools.load_dict(snpath) newsndict1 = dict(newsndict, **sndict) #merge saveToJSON(newsndict1, fpath, "remain_subnet") else: saveToJSON(sndict, fpath, "remain_subnet") if (not sn_lte16): if (os.path.exists(ltepath)): newlte = blacklist_tools.load_dict(ltepath) newlte16 = dict(newlte, **sn_lte16) #merge saveToJSON(newlte16, fpath, 'lte16_subnet') else: saveToJSON(sn_lte16, fpath, 'lte16_subnet') if (not lpmdict): if (os.path.exists(lpmpath)): newlpmdict = blacklist_tools.load_dict(lpmpath) newlpmdict1 = dict(newlpmdict, **lpmdict) #merge saveToJSON(newlpmdict1, fpath, 'lpm_subnet_data') else: saveToJSON(lpmdict, fpath, 'lpm_subnet_data') #match subnet_result = [] for ips in es_ip: ip_es_num = socket.ntohl( struct.unpack("I", socket.inet_aton(str(ips)))[0]) if (lpm.search_ip(ip_es_num)): subnet_result.append({ips: 'subnet_lpm_match'}) return subnet_result, sndict, sn_lte16