def main(): mylog = blacklist_tools.getlog() mylog.info('download dropList...') dict = dropList(mylog) mylog.info('dropList size:{}'.format(len(dict.keys()))) store_json(dict, 'dropList') mylog.info("update dropList!")
def main(tday,index, gte, lte, aggs_name, timestamp,serverNum,dport,time_zone): mylog = blacklist_tools.getlog() path=parser_config.get_store_path()[1]+str(tday)+os.path.sep cnt=0 while(cnt<8): if(os.path.exists(path)): filelist=get_all_file(path) break elif cnt==7: #default file path dflg, defaultpath = parser_config.get_self_filelist('defaultlist') if(dflg==1): filelist = get_all_file(defaultpath) else: filelist=[] break else: # check last 7 days file lday=tday+datetime.timedelta(-1) path = parser_config.get_store_path()[1] + str(lday) + os.path.sep cnt=cnt+1 # mylog.warning('no path!') # filelist=[] #get es list es = ESclient(server =serverNum,port=dport) # mylog.info('connected with es') ip_es_list = es.get_es_ip(index,gte,lte,aggs_name,time_zone) # mylog.info('get es data,data size:%d'%len(ip_es_list)) if(filelist): try: #check each file and insert match results checkAndInsert(path,filelist,ip_es_list,index,aggs_name,timestamp,serverNum,dport) except Exception, e: mylog.error(e)
def checkES(startTime, indx, aggs_name, serverNum, dport, tday, offset, query_strs): # new check function mylog = blacklist_tools.getlog() try: # print("Starting check command."), time.ctime() mylog.info("{0}[Starting check command.Time is:{1}]{2}".format( "=" * 15, (startTime).strftime('%Y-%m-%d %H:%M:%S'), "=" * 15)) # execute the command gte = (startTime - delta - offset).strftime('%Y-%m-%d %H:%M:%S') lte = (startTime - offset).strftime('%Y-%m-%d %H:%M:%S') time_zone = '' if (time.daylight == 0): # 1:dst; time_zone = "%+03d:%02d" % (-(time.timezone / 3600), time.timezone % 3600 / 3600.0 * 60) else: time_zone = "%+03d:%02d" % (-(time.altzone / 3600), time.altzone % 3600 / 3600.0 * 60) timestamp = (startTime).strftime('%Y-%m-%dT%H:%M:%S.%f') + time_zone # check all_ip = match_insert.main(tday, indx, gte, lte, aggs_name, timestamp, serverNum, dport, time_zone, query_strs) # print("check finish."), time.ctime() mylog.info("{0}check finish.{1}".format("=" * 30, "=" * 30)) # print"="*40 return all_ip except Exception, e: # print e mylog.error(e) return {}
def insert_result(index, aggs_name, timestamp, serverNum, dport, fullmatch, segmentmatch, subnetlpm, subnetfull, msg): es_insert = ESclient(server=serverNum, port=dport) mylog = blacklist_tools.getlog() threat_ip = {} # finally dict of matched ip,最终匹配成功的ip #white list filter ips if len(fullmatch) > 0: # fullmatch=[ip,ip,ip...] tmp = full_match_type(es_insert, fullmatch, msg, index, timestamp, aggs_name) threat_ip = dict(threat_ip, **tmp) if len(segmentmatch) > 0: #segmentmatch=[{ip:range},{},{}...] tmp = other_match_type(es_insert, segmentmatch, "segment_match", msg, index, timestamp, aggs_name) threat_ip = dict(threat_ip, **tmp) #merge if len(subnetlpm) > 0: #subnetlpm=[{ip:"lpm_match"},{},{}...] tmp = other_match_type(es_insert, subnetlpm, 'subnet_lpm_match', msg, index, timestamp, aggs_name) threat_ip = dict(threat_ip, **tmp) if len(subnetfull ) > 0: # subnet_range data, subnetfull=[{ip:ipsubnet},{},{}] tmp = other_match_type(es_insert, subnetfull, 'subnet_fullmatch', msg, index, timestamp, aggs_name) threat_ip = dict(threat_ip, **tmp) return threat_ip
def main(): mylog = blacklist_tools.getlog() # mylog=[] dict = MiningServerIPList(mylog) print len(dict.keys()) store_json(dict, 'MiningServerIPList') mylog.info("update mining pool!")
def new_run(entertime,delta,serverNum,dport,indx='tcp-*',aggs_name='dip',): # new running procedure updatetime=datetime.datetime.now() startTime = entertime # beginTime = datetime.datetime.strptime(begin, '%Y-%m-%d %H:%M:%S') # flgnum is the running times per day flgnum=0 # get format: "yy-mm-dd" tday=datetime.datetime.now().date() # runtime=0 # elapsed time of whole process,included check and merge mylog=blacklist_tools.getlog() while True: if(tday!=datetime.datetime.now().date()): flgnum=0 # reset flgnum per day tday=datetime.datetime.now().date() dirpath = parser_config.get_store_path()[1] + str(tday) + os.path.sep os.mkdir(dirpath) while datetime.datetime.now() < startTime: #print('time sleep...') mylog.info("time sleep...") time.sleep((startTime-datetime.datetime.now()).total_seconds()) try: # st=time.clock() #update source dataset if(datetime.datetime.now()>updatetime): update_blacklist.main(tday,flgnum) updatetime=updatetime+delta # check interval time is 5mins checkES(startTime,indx,aggs_name,serverNum,dport,tday) startTime = startTime + delta flgnum+=1 # runtime=time.clock()-st# get the time of whole process except Exception, e: # print e mylog.error(e)
def main(): mylog = blacklist_tools.getlog() mylog.info('download bitnodes...') dict = bitnodes(mylog) mylog.info('bitnodes size:{}'.format(len(dict.keys()))) store_json(dict, 'bitnodes') mylog.info("update bitnodes!")
def main(startTime, all_IP, serverNum, dport, index="tcp-*"): # all_IP is a dips inserted as an information alert into es mylog = blacklist_tools.getlog() # startTime=datetime.datetime.now() delta1 = datetime.timedelta(minutes=5) gte1 = (startTime - delta1).strftime('%Y-%m-%d %H:%M:%S') lte = (startTime).strftime('%Y-%m-%d %H:%M:%S') time_zone = '' if (time.daylight == 0): # 1:dst; time_zone = "%+03d:%02d" % (-(time.timezone / 3600), time.timezone % 3600 / 3600.0 * 60) else: time_zone = "%+03d:%02d" % (-(time.altzone / 3600), time.altzone % 3600 / 3600.0 * 60) timestamp = (startTime).strftime('%Y-%m-%dT%H:%M:%S.%f') + time_zone # serverNum='localhost' # dport='9200' #first step,get the all_IP # mylog.info('start check alert info.') # diplist,es,allalerts=checkAlert('alert-*',gte1,lte,time_zone,serverNum,dport) es = ESclient(server=serverNum, port=dport) #second step delta2 = datetime.timedelta(days=1) gte2 = (startTime - delta2).strftime('%Y-%m-%d %H:%M:%S') lte = (startTime).strftime('%Y-%m-%d %H:%M:%S') allwarn = {} # {dip:[sip,sip,sip...],ip:[],...}, try: for dip in all_IP.keys(): allwarn[dip] = es.secondcheck(gte1, gte2, lte, time_zone, dip, index, mylog) except Exception, e: mylog.error('second_check:{}'.format(e))
def full_match_type(es_insert,data,msg,index,timestamp,aggs_name): mylog=blacklist_tools.getlog() tmpThreat={} # check by x-force new_fullmatch = get_xforce(data, 1) # new_fullmatch_list=new_fullmatch.keys() for i in range(len(data)): try: doc = {} doc['level'] = msg[data[i]]['level'] doc['type'] = 'mal_ip' doc['desc_type'] = '[mal_ip] Request of suspect IP detection.' doc['desc_subtype'] = msg[data[i]]['desc_subtype'] doc['subtype'] = msg[data[i]]['subtype'] doc['match_type'] = "full_match" doc[aggs_name] = data[i] doc['@timestamp'] = timestamp doc['index'] = index # mylog.info('msg start{0}'.format(new_fullmatch[fullmatch[i]])) # =========排查空值!============== if (new_fullmatch[data[i]].has_key('score') and (new_fullmatch[data[i]]["score"])): doc['xforce_marks'] = float(new_fullmatch[data[i]]["score"]) elif ((not new_fullmatch[data[i]].has_key('score')) or (not (new_fullmatch[data[i]]["score"]))): doc['xforce_marks'] = 0 else: doc['xforce_marks'] = float(new_fullmatch[data[i]]["score"]) # msg info msg_info = '' if(new_fullmatch[data[i]].has_key("cats")): cats = new_fullmatch[data[i]]["cats"] for ky, vals in cats.items(): msg_info = msg_info + str(ky) + ':' + str(vals) + '%;' if (new_fullmatch[data[i]].has_key("geo") and (new_fullmatch[data[i]]["geo"]).strip()): msg_info = msg_info + 'geo:' + new_fullmatch[data[i]]["geo"] + ';' if (new_fullmatch[data[i]].has_key("company") and (new_fullmatch[data[i]]["company"]).strip()): msg_info = msg_info + 'company:' + new_fullmatch[data[i]]["company"] if(msg_info[-1]==';'): doc['xforce_msg'] = msg_info[:-1] else: doc['xforce_msg'] = msg_info es_insert.es_index(doc) tmpThreat[data[i]] = doc mylog.info('insert fullmatch with xforce') except Exception, e: mylog.error(e) doc = {} doc['level'] = msg[data[i]]['level'] doc['type'] = 'mal_ip' doc['desc_type'] = '[mal_ip] Request of suspect IP detection.' doc['desc_subtype'] = msg[data[i]]['desc_subtype'] doc['subtype'] = msg[data[i]]['subtype'] doc['match_type'] = "full_match" doc[aggs_name] = data[i] doc['@timestamp'] = timestamp doc['index'] = index es_insert.es_index(doc) tmpThreat[data[i]] = doc # print 'full_match_insert' mylog.info('insert fullmatch by defaut')
def saveToJSON(dict1, path, name): "add the subnet to file" mylog = blacklist_tools.getlog() file_name = getsavepath(path, name) try: with open(file_name, 'w') as f: f.write(json.dumps(dict1)) except IOError: print 'save Error' mylog.error('saveToJSON Error!')
def main(tday, flgnum): mylog = blacklist_tools.getlog() print("Starting update command."), time.ctime() mylog.info("Starting update command.") # dirpath=".\data\\%s\\"%tday dirpath = parser_config.get_store_path()[1] + str(tday) + os.path.sep if (not os.path.exists(dirpath)): os.mkdir(dirpath) update_blacklist_module(flgnum) # print("update finish."), time.ctime() mylog.info("update finish.")
def new_run(entertime, delta, serverNum, dport, offset, querys, indx='tcp-*', aggs_name='dip'): # new running procedure updatetime = datetime.datetime.now() startTime = entertime # beginTime = datetime.datetime.strptime(begin, '%Y-%m-%d %H:%M:%S') # flgnum is the running times per day flgnum = 0 # get format: "yy-mm-dd" tday = datetime.datetime.now().date() # runtime=0 # elapsed time of whole process,included check and merge mylog = blacklist_tools.getlog() updateFlg = parser_config.update_flg() # while True: if (tday != datetime.datetime.now().date()): flgnum = 0 # reset flgnum per day tday = datetime.datetime.now().date() dirpath = parser_config.get_store_path()[1] + str( tday) + os.path.sep os.mkdir(dirpath) while datetime.datetime.now() < startTime: #print('time sleep...') mylog.info("Time sleeping ...") time.sleep((startTime - datetime.datetime.now()).total_seconds()) try: # st=time.clock() #update source dataset if (updateFlg == 1): if (datetime.datetime.now() > updatetime): update_blacklist.main(tday, flgnum) updatetime = updatetime + delta # check interval time is 5mins all_IP = checkES(startTime, indx, aggs_name, serverNum, dport, tday, offset, querys) #IP second check for C&C flg_C2 = parser_config.get_ip_secondcheck() if (flg_C2 == 1): # 从info中IP检查,提升为warn mylog.info('all_IP size:{}'.format(len(all_IP))) ip_check_C2.main(startTime, all_IP, serverNum, dport, indx) startTime = startTime + delta flgnum += 1 # runtime=time.clock()-st# get the time of whole process except Exception, e: # print e mylog.error(e)
def treatip(dataset, es_ip): # dataset is blacklist or whitelist mylog = blacklist_tools.getlog() full, segment, subnet = treat_ip.separate_ip(dataset) #dataset is dict # match procedure # full match full_list = full.keys() # return fullmatchlist,type is list fullmatchlist = treat_ip.ip_full_match(full_list, es_ip) # segment match, segmentlist:[{ip:ipsegment},{},...] segmentlist = treat_ip.int_ip_range(segment, es_ip) subnet_lpm = {} subnet_full = {} sndict = {} sn_lte16 = {} # read conf file to choose the methods flg_lpm, flg_full = parser_config.get_method() if (1 == flg_lpm): # subnet match by lpm,subnet_lpm is match results;sndict and sn_lte16 is original subnet data mylog.info('start lpm match') atime = time.time() subnet_lpm, sndict, sn_lte16, sn_gte24 = subnet_range.subnet_lpm( subnet, es_ip) ftime = time.time() - atime mylog.info('times:{}'.format(ftime)) mylog.info('finish lpm match') if (1 == flg_full): #subnet match by zhou, parameters are snlist and es_ip # mylog.info('sndict size: %d'%len(sndict)) # mylog.info('sn_lte16 size: %d' % len(sn_lte16)) mylog.info('start range subnet match') subnet_full = subnet_range.subnet_range_match(sn_gte24, es_ip) mylog.info('finish range subnet match') #whitelist wlflg, whitepath = parser_config.get_self_filelist('whitelist') if (wlflg == 1): #get whilelist if (os.path.exists(whitepath)): filelist = get_all_file(whitepath) for fname in filelist: fpath = whitepath + fname #白名单读取方式不一样, whitedata is dict whitedata = blacklist_tools.load_whitelist(fpath) #filter procedure fullmatchlist, segmentlist, subnet_lpm, subnet_full = treat_ip.whitelist_filter( fullmatchlist, segmentlist, subnet_lpm, subnet_full, whitedata) else: mylog.info('no self_whitelist_path') # return match results return fullmatchlist, segmentlist, subnet_lpm, subnet_full
def get_self_filelist(keywords): # optionname must be whitelist or blacklist or defaultlist mylog = blacklist_tools.getlog() optionname = 'self_' + keywords + '_path' try: source_store_path_key = cp.options(optionname) #value=cp.get(sectionName,keyword) flg = cp.getint(optionname, source_store_path_key[0]) bpath = cp.get(optionname, source_store_path_key[1]) path = get_store_path()[1] + bpath + os.path.sep return flg, path except Exception, e: mylog.error('config file error!') return 0, ''
def store_json(dict,name): ''' 保存为json ''' mylog=blacklist_tools.getlog() tday = datetime.datetime.now().date() file_name = name+ '.json' savepath=parser_config.get_store_path()[1]+str(tday)+os.path.sep+file_name try: with open(savepath,'w') as f: f.write(json.dumps(dict)) except IOError: # print 'store_json Error' mylog.warning('change date time!download again!')
def get_xforce(odata, datatype=0): # datatype=1 means odata is fullmatch data mylog = blacklist_tools.getlog() mylog.info('start get_xforce!') if (datatype == 1): # fullmatch data =[ip,ip,ip...] retdata = xf.start(1, odata) else: # other data = [{ip:matchtype},{},{},...] ipset = [] for ii in odata: ipset.append(ii.keys()[0]) retdata = xf.start(1, ipset) # retdata is dict of xforce info mylog.info('finish get_xforce') return retdata
def checkAndInsert(path,filelist,ip_es_list,index,aggs_name,timestamp,serverNum,dport): # check each file mylog=blacklist_tools.getlog() all_threatIP={} for fname in filelist: mylog.info('-*-*-*-*-file:{}-*-*-*-*-'.format(fname)) fpath = path + fname dataset = load_dict(fpath) if (dataset): msg = dataset[dataset.keys()[0]] # get match result fullmatch, segmentmatch, subnetlpm, subnetfull = treatip(dataset,ip_es_list) threatIP=insert_result(index,aggs_name,timestamp,serverNum,dport,fullmatch,segmentmatch,subnetlpm,subnetfull,dataset) # merge if(threatIP): all_threatIP=dict(all_threatIP,**threatIP) return all_threatIP
def update_blacklist_module(flgnum): mylog=blacklist_tools.getlog() parser_blacklist=parser_config.get_func() for filename in parser_blacklist.keys(): times=int(parser_blacklist[filename]) # check the update frequency mylog.info("check frequency.") if(flgnum%times==0): # command='python %s'%fpath try: df = __import__('get_blacklist.{}'.format(filename), fromlist=True) mylog.info("start update {} ".format(filename)) df.main() # status=os.system(command) # print status except Exception,e: # print e mylog.error(e)
def subnet_range_match(sn_gte24, es_ip): sn_gte24_list = [] #firstly,change allrange = subnetTOrange(sn_gte24) #secondly, match mylog = blacklist_tools.getlog() mylog.info('gte24 size:{}'.format(len(sn_gte24))) # sorted newAllRange = sorted(allrange.iteritems(), key=lambda x: x[1][0]) rangeLen = len(newAllRange) mylog.info('start Binary Search!') for ips in es_ip: ip_es_num = socket.ntohl( struct.unpack("I", socket.inet_aton(str(ips)))[0]) # Binary Search nlow = 0 nhigh = rangeLen - 1 while (nlow <= nhigh): nmid = (nlow + nhigh) / 2 subnet_num = newAllRange[nmid][1] # [start,end] if (subnet_num[0] <= ip_es_num <= subnet_num[1]): sn_gte24_list.append({ips: newAllRange[nmid][0]}) break elif (subnet_num[0] > ip_es_num): nhigh = nmid - 1 elif (subnet_num[1] < ip_es_num): nlow = nmid + 1 # for key in allrange.keys(): # subnet_num = allrange[key] # # print subnet_num[0],subnet_num[1] # subnet_num_min = socket.ntohl(struct.unpack("I",socket.inet_aton(str(subnet_num[0])))[0]) # subnet_num_max = socket.ntohl(struct.unpack("I",socket.inet_aton(str(subnet_num[1])))[0]) # if subnet_num[0] <= ip_es_num <= subnet_num[1]: # sn_gte24_list.append({ips:key}) # for key in sn_lte16:#key is ip # subnet_num = subnet_range(key) # # print subnet_num[0],subnet_num[1] # subnet_num_min = socket.ntohl(struct.unpack("I",socket.inet_aton(str(subnet_num[0])))[0]) # subnet_num_max = socket.ntohl(struct.unpack("I",socket.inet_aton(str(subnet_num[1])))[0]) # if subnet_num_min <= ip_es_num <= subnet_num_max: # sndict_list.append({ips:key}) return sn_gte24_list
def get_self_filelist(keywords): # optionname must be whitelist or blacklist or defaultlist mylog=blacklist_tools.getlog() optionname='self_'+keywords+'_path' try: source_store_path_key=cp.options(optionname) #value=cp.get(sectionName,keyword) flg=cp.getint(optionname,source_store_path_key[0]) bpath=cp.get(optionname,source_store_path_key[1]) path=get_store_path()[1]+bpath+os.path.sep return flg,path except Exception,e: mylog.error('config file error!') return 0,'' # print cp.sections #cun period ############################################################################################################################# # frequency_key = cp.options(sections[3]) # frequency = [] # for temp in frequency_key: # frequency.append(cp.get('frequency', temp)) # # # print frequency # regex1=re.compile(r'\d+') # regex2=re.compile(r'[a-zA-Z]+') # period_num = regex1.findall(frequency[1])[0] # period_scale = regex2.findall(frequency[1])[0] # def export_period(): # if period_scale == 's'or period_scale == 'S' : # period = datetime.timedelta(seconds = int(period_num)) # elif period_scale == 'm'or period_scale == 'M': # period = datetime.timedelta(minutes = int(period_num)) # elif period_scale == 'd' or period_scale == 'D': # period = datetime.timedelta(days = int(period_num)) # return period #############################################################################################################################
def main(tday,index, gte, lte, aggs_name, timestamp,serverNum,dport,time_zone,querys_str): mylog = blacklist_tools.getlog() path=parser_config.get_store_path()[1]+str(tday)+os.path.sep cnt=0 allThreatIP={}# 有问题的dip # 不联网情况下,尝试使用过去7天的数据检查 while(cnt<8): if(os.path.exists(path)): filelist=get_all_file(path) break elif cnt==7: #default file path dflg, defaultpath = parser_config.get_self_filelist('defaultlist') if(dflg==1): filelist = get_all_file(defaultpath) else: filelist=[] break else: # check last 7 days file lday=tday+datetime.timedelta(-1) path = parser_config.get_store_path()[1] + str(lday) + os.path.sep cnt=cnt+1 #get es list es = ESclient(server =serverNum,port=dport) # mylog.info('connected with es') ip_es_list = es.get_es_ip(index,gte,lte,aggs_name,time_zone,querys_str) mylog.info('ES data size:%d '%len(ip_es_list)) # 检查下载的网络情报 if(filelist): try: #check each file and insert match results tmpThreatIP=checkAndInsert(path,filelist,ip_es_list,index,aggs_name,timestamp,serverNum,dport) # mylog.info('main_insert Threat_ip size:{}'.format(len(tmpThreatIP))) if(tmpThreatIP): allThreatIP=dict(allThreatIP,**tmpThreatIP) except Exception, e: mylog.error('check blacklist:{}'.format(e))
def checkES(startTime,indx,aggs_name,serverNum,dport,tday): # new check function mylog=blacklist_tools.getlog() try: print("Starting check command."), time.ctime() mylog.info("Starting check command.Time is:{}".format((startTime).strftime('%Y-%m-%d %H:%M:%S'))) # execute the command gte = (startTime - delta).strftime('%Y-%m-%d %H:%M:%S') lte = (startTime).strftime('%Y-%m-%d %H:%M:%S') time_zone='' if(time.daylight==0):# 1:dst; time_zone="%+03d:%02d"%(-(time.timezone/3600),time.timezone%3600/3600.0*60) else: time_zone = "%+03d:%02d" % (-(time.altzone / 3600), time.altzone % 3600 / 3600.0 * 60) timestamp = (startTime).strftime('%Y-%m-%dT%H:%M:%S.%f') + time_zone match_insert.main(tday,indx,gte,lte,aggs_name,timestamp,serverNum,dport,time_zone) # print("check finish."), time.ctime() mylog.info("check finish.") print"="*40 except Exception, e: # print e mylog.error(e)
def main(): mylog = blacklist_tools.getlog() dict = stopforumspam_toxic_ip_range(mylog) print len(dict) store_json(dict, 'stopforumspam_toxic_ip_range') mylog.info("update spam!")
def main(): mylog = blacklist_tools.getlog() dict = ssl_abuse(mylog) print len(dict) store_json(dict, 'ssl_abuse') mylog.info("update ssl_abuse!")
def main(): mylog = blacklist_tools.getlog() dict = bogons_ip(mylog) print len(dict) store_json(dict, 'bogons') mylog.info("update bogons_ip!")
def other_match_type(es_insert, data, match_types, msg, index, timestamp, aggs_name): mylog = blacklist_tools.getlog() tmpThreat = {} # check by x-force try: new_subnetlpm = get_xforce(data, 0) # new_fullmatch_list=new_fullmatch.keys() for i in range(len(data)): doc = {} # segment insert, # ip_es 原es IP ip_es = data[i].keys()[0] # get alert ip # ip_es,对应的匹配的ip ipseg = data[i][ip_es] # alert match type # print ipseg if (match_types == "subnet_lpm_match"): #lpm找不到对应ip,随机取一个当前黑名单的ip,获取对应属性字段 key1 = msg.keys()[0] ipseg = key1 tmptype = msg[key1]['desc_subtype'].split(';') doc['desc_subtype'] = tmptype[0].split( ':')[0] + ';' + tmptype[1] else: doc['desc_subtype'] = msg[ipseg]['desc_subtype'] doc['level'] = msg[ipseg]['level'] doc['type'] = 'mal_ip' doc['desc_type'] = '[mal_ip] Request of suspect IP detection.' doc['subtype'] = msg[ipseg]['subtype'] doc['match_type'] = match_types doc[aggs_name] = ip_es doc['@timestamp'] = timestamp doc['index'] = index if (new_subnetlpm[ip_es].has_key("score") and (new_subnetlpm[ip_es]["score"]) and float(new_subnetlpm[ip_es]["score"]) >= 4.3): doc['level'] = 'warn' doc['xforce_marks'] = float(new_subnetlpm[ip_es]["score"]) elif ((not new_subnetlpm[ip_es].has_key("score")) or (not (new_subnetlpm[ip_es]["score"]))): doc['xforce_marks'] = 0 else: doc['xforce_marks'] = float(new_subnetlpm[ip_es]["score"]) # msg info msg_info = '' if (new_subnetlpm[ip_es].has_key("cats")): cats = new_subnetlpm[ip_es]["cats"] for ky, vals in cats.items(): msg_info = msg_info + str(ky) + ':' + str(vals) + '%;' if (new_subnetlpm[ip_es].has_key("geo") and (new_subnetlpm[ip_es]["geo"]).strip()): msg_info = msg_info + 'geo:' + new_subnetlpm[ip_es]["geo"] + ';' if (new_subnetlpm[ip_es].has_key("company") and (new_subnetlpm[ip_es]["company"]).strip()): msg_info = msg_info + 'company:' + new_subnetlpm[ip_es][ "company"] if (msg_info[-1] == ';'): doc['xforce_msg'] = msg_info[:-1] else: doc['xforce_msg'] = msg_info es_insert.es_index(doc) tmpThreat[ip_es] = doc mylog.info('insert {0} with xforce'.format(match_types)) except Exception, e: mylog.error(e) for i in range(len(data)): doc = {} # segment insert ip_es = data[i].keys()[0] # get alert ip # print ip_es ipseg = data[i][ip_es] # alert match type # print ipseg if (match_types == "subnet_lpm_match"): key1 = msg.keys()[0] ipseg = key1 tmptype = msg[key1]['desc_subtype'].split(';') doc['desc_subtype'] = tmptype[0].split( ':')[0] + ';' + tmptype[1] else: doc['desc_subtype'] = msg[ipseg]['desc_subtype'] doc['level'] = msg[ipseg]['level'] doc['type'] = 'mal_ip' doc['desc_type'] = '[mal_ip] Request of suspect IP detection.' doc['subtype'] = msg[ipseg]['subtype'] doc['match_type'] = match_types doc[aggs_name] = ip_es doc['@timestamp'] = timestamp doc['index'] = index tmpThreat[ip_es] = doc # print 'subnet_lpm_insert' mylog.info('insert {0} by default'.format(match_types))
def main(): mylog = blacklist_tools.getlog() dict = bambenek_ip(mylog) print len(dict) store_json(dict, 'bambenek_ip') mylog.info("update bambenek_ip!")
def subnet_lpm(subnet, es_ip): mylog = blacklist_tools.getlog() lpm.init() sndict = {} fpath = parser_config.get_store_path()[1] sn_lte16 = {} lpmdict = {} sn_gte24 = {} ip_subnet = subnet.keys() for sn in ip_subnet: subnet_split = sn.split('/') ip_num = ip_split_num(subnet_split[0]) netMask = int(subnet_split[1]) if (sn == '192.168.0.0/16' or sn == '172.16.0.0/12' or sn == '10.0.0.0/8'): #略过私网 continue # return 'False' elif (netMask < 16): #暂时不处理 sn_lte16[sn] = subnet[sn] # return 'False' elif (netMask == 16): lpmdict[sn] = subnet[sn] newip1 = [] ip_num[2] = ip_num[2] | 1 newip1.append(str(ip_num[0])) newip1.append(str(ip_num[1])) newip1.append('*') newip1.append('*') ipstr1 = '.'.join(newip1) lpm.insert_rule(ipstr1) elif (netMask >= 21 and netMask <= 24): lpmdict[sn] = subnet[sn] idx = pow(2, 24 - netMask) - 1 # print idx ip_base = ip_num[2] & (255 - idx) i = 0 while (i <= idx): newip1 = [] ipstr1 = '' ip_num[2] = ip_base + i newip1.append(str(ip_num[0])) newip1.append(str(ip_num[1])) newip1.append(str(ip_num[2])) newip1.append('*') ipstr1 = '.'.join(newip1) # print ipstr1 lpm.insert_rule(ipstr1) i = i + 1 # elif(netMask==24): # #/25当/24处理 # lpmdict[sn] = subnet[sn] # newip1 = [] # newip1.append(str(ip_num[0])) # newip1.append(str(ip_num[1])) # newip1.append(str(ip_num[2])) # newip1.append('*') # ipstr1 = '.'.join(newip1) # lpm.insert_rule(ipstr1) elif (netMask > 24): # range match sn_gte24[sn] = subnet[sn] else: #netMask>16 and netMask<21,save them sndict[sn] = subnet[sn] mylog.info('lpm data size: %d' % len(lpmdict)) mylog.info('remaining subnet size:%d' % len(sndict)) mylog.info('lte16 size:%d' % len(sn_lte16)) mylog.info('gte24 size:%d' % len(sn_gte24)) #save snpath = getsavepath(fpath, 'remaining_subnet') ltepath = getsavepath(fpath, 'lte16_subnet') lpmpath = getsavepath(fpath, 'lpm_subnet_data') gtepath = getsavepath(fpath, 'gte24_subnet') if (sndict): if (os.path.exists(snpath)): newsndict = blacklist_tools.load_dict(snpath) newsndict1 = dict(newsndict, **sndict) #merge saveToJSON(newsndict1, fpath, "remaining_subnet") else: saveToJSON(sndict, fpath, "remaining_subnet") if (sn_lte16): if (os.path.exists(ltepath)): newlte = blacklist_tools.load_dict(ltepath) newlte16 = dict(newlte, **sn_lte16) #merge saveToJSON(newlte16, fpath, 'lte16_subnet') else: saveToJSON(sn_lte16, fpath, 'lte16_subnet') if (lpmdict): if (os.path.exists(lpmpath)): newlpmdict = blacklist_tools.load_dict(lpmpath) newlpmdict1 = dict(newlpmdict, **lpmdict) #merge saveToJSON(newlpmdict1, fpath, 'lpm_subnet_data') else: saveToJSON(lpmdict, fpath, 'lpm_subnet_data') if (sn_gte24): if (os.path.exists(gtepath)): newlpmdict = blacklist_tools.load_dict(gtepath) newlpmdict1 = dict(newlpmdict, **sn_gte24) #merge saveToJSON(newlpmdict1, fpath, 'gte24_subnet') else: saveToJSON(sn_gte24, fpath, 'gte24_subnet') sn_gte24 = dict(sn_gte24, **sndict) # merge #match subnet_result = [] for ips in es_ip: ip_es_num = socket.ntohl( struct.unpack("I", socket.inet_aton(str(ips)))[0]) if (lpm.search_ip(ip_es_num)): subnet_result.append({ips: 'subnet_lpm_match'}) return subnet_result, sndict, sn_lte16, sn_gte24
def main(): mylog = blacklist_tools.getlog() dict = firehol_level1(mylog) print len(dict.keys()) store_json(dict, 'firehol_level1') mylog.info("update firehol!")
def subnet_lpm(subnet, es_ip): mylog = blacklist_tools.getlog() lpm.init() sndict = {} fpath = parser_config.get_store_path()[1] sn_lte16 = {} lpmdict = {} ip_subnet = subnet.keys() for sn in ip_subnet: subnet_split = sn.split('/') ip_num = ip_split_num(subnet_split[0]) netMask = int(subnet_split[1]) if (sn == '192.168.0.0/16' or sn == '172.16.0.0/12' or sn == '10.0.0.0/8'): #略过私网 continue # return 'False' elif (netMask < 16): #暂时不处理 sn_lte16[sn] = subnet[sn] # return 'False' elif (netMask == 16): lpmdict[sn] = subnet[sn] newip1 = [] ip_num[2] = ip_num[2] | 1 newip1.append(str(ip_num[0])) newip1.append(str(ip_num[1])) newip1.append('*') newip1.append('*') ipstr1 = '.'.join(newip1) lpm.insert_rule(ipstr1) elif (netMask == 23): lpmdict[sn] = subnet[sn] newip1 = [] ip_num[2] = ip_num[2] | 1 newip1.append(str(ip_num[0])) newip1.append(str(ip_num[1])) newip1.append(str(ip_num[2])) newip1.append('*') ipstr1 = '.'.join(newip1) lpm.insert_rule(ipstr1) newip2 = [] ip_num[2] = ip_num[2] & 254 newip2.append(str(ip_num[0])) newip2.append(str(ip_num[1])) newip2.append(str(ip_num[2])) newip2.append('*') ipstr2 = '.'.join(newip2) lpm.insert_rule(ipstr2) elif (netMask == 25 or netMask == 24): #/25当/24处理 lpmdict[sn] = subnet[sn] newip1 = [] newip1.append(str(ip_num[0])) newip1.append(str(ip_num[1])) newip1.append(str(ip_num[2])) newip1.append('*') ipstr1 = '.'.join(newip1) lpm.insert_rule(ipstr1) else: #netMask>16 and not in [16,23,24,25],save them sndict[sn] = subnet[sn] mylog.info('lpm data size: %d' % len(lpmdict)) #save snpath = getsavepath(fpath, 'remain_subnet') ltepath = getsavepath(fpath, 'lte16_subnet') lpmpath = getsavepath(fpath, 'lpm_subnet_data') if (not sndict): if (os.path.exists(snpath)): newsndict = blacklist_tools.load_dict(snpath) newsndict1 = dict(newsndict, **sndict) #merge saveToJSON(newsndict1, fpath, "remain_subnet") else: saveToJSON(sndict, fpath, "remain_subnet") if (not sn_lte16): if (os.path.exists(ltepath)): newlte = blacklist_tools.load_dict(ltepath) newlte16 = dict(newlte, **sn_lte16) #merge saveToJSON(newlte16, fpath, 'lte16_subnet') else: saveToJSON(sn_lte16, fpath, 'lte16_subnet') if (not lpmdict): if (os.path.exists(lpmpath)): newlpmdict = blacklist_tools.load_dict(lpmpath) newlpmdict1 = dict(newlpmdict, **lpmdict) #merge saveToJSON(newlpmdict1, fpath, 'lpm_subnet_data') else: saveToJSON(lpmdict, fpath, 'lpm_subnet_data') #match subnet_result = [] for ips in es_ip: ip_es_num = socket.ntohl( struct.unpack("I", socket.inet_aton(str(ips)))[0]) if (lpm.search_ip(ip_es_num)): subnet_result.append({ips: 'subnet_lpm_match'}) return subnet_result, sndict, sn_lte16