def subnet_range_match(sn_gte24, es_ip): sn_gte24_list = [] mylog = set_logger() # firstly, # mylog.info("len of gte24:{0}".format(len(sn_gte24))) allrange = subnetTOrange(sn_gte24) #mylog.info("type:{0}".format(type(allrange))) # secondly, match # mylog.info('gte24 size:{}'.format(len(sn_gte24))) # sorted newAllRange = sorted(allrange.iteritems(), key=lambda x: x[1][0]) rangeLen = len(newAllRange) # mylog.info('start Binary Search!') for ips in es_ip: ip_es_num = socket.ntohl(struct.unpack("I", socket.inet_aton(str(ips)))[0]) # Binary Search nlow = 0 nhigh = rangeLen - 1 while (nlow <= nhigh): nmid = (nlow + nhigh) / 2 subnet_num = newAllRange[nmid][1] # [start,end] if (subnet_num[0] <= ip_es_num <= subnet_num[1]): sn_gte24_list.append({ips: newAllRange[nmid][0]}) break elif (subnet_num[0] > ip_es_num): nhigh = nmid - 1 elif (subnet_num[1] < ip_es_num): nlow = nmid + 1 return sn_gte24_list
def checkES(startTime,indx,aggs_name,serverNum,dport,tday,offset,query_strs): # new check function mylog=set_logger() try: # print("Starting check command."), time.ctime() mylog.info("[mal_ip] Starting check command.Time is:{0}".format((startTime).strftime('%Y-%m-%d %H:%M:%S'))) # execute the command gte = (startTime - delta-offset).strftime('%Y-%m-%d %H:%M:%S') lte = (startTime-offset).strftime('%Y-%m-%d %H:%M:%S') time_zone='' if(time.daylight==0):# 1:dst; time_zone="%+03d:%02d"%(-(time.timezone/3600),time.timezone%3600/3600.0*60) else: time_zone = "%+03d:%02d" % (-(time.altzone / 3600), time.altzone % 3600 / 3600.0 * 60) timestamp = (startTime).strftime('%Y-%m-%dT%H:%M:%S.%f') + time_zone # check all_ip=match_insert.main(tday,indx,gte,lte,aggs_name,timestamp,serverNum,dport,time_zone,query_strs) # print("check finish."), time.ctime() mylog.info("[mal_ip] Check finish.") # print"="*40 return all_ip except Exception, e: # print e mylog.error("[mal_ip] CheckES error: {0}".format(e)) return {}
def update_blacklist_module(flgnum): mylog = set_logger() parser_blacklist = parser_config.get_func() # 获取网络blacklist path bl_file = parser_config.get_bl_path().split(os.path.sep) for filename in parser_blacklist.keys(): times = int(parser_blacklist[filename]) # check the update frequency #mylog.info("check frequency.") if (flgnum % times == 0): # command='python %s'%fpath try: # df = __import__('get_blacklist.{}'.format(filename), fromlist=True) # bl_pa='{0}.{1}'.format(bl_file[2].strip(' '),filename) # mylog.info(bl_pa) df = __import__('{0}.{1}'.format(bl_file[2].strip(' '), filename), fromlist=True) #mylog.info("start update {} ".format(filename)) df.main() # status=os.system(command) # print status except Exception, e: # print e mylog.error("[mal_ip] Update blacklist error:{0}".format(e))
def subnetTOrange(sn24): mylog=set_logger() allRange = {} #mylog.info("type_1:{0}".format(type(allRange))) for subnets in sn24: allRange[subnets] = subnet_range(subnets) # return {key:[start,end],...} return allRange
def main(tday, flgnum): mylog = set_logger() #print("Starting update command."), time.ctime() mylog.info("[mal_ip] Starting update command.") # dirpath=".\data\\%s\\"%tday dirpath = parser_config.get_store_path() + str(tday) + os.path.sep if (not os.path.exists(dirpath)): os.mkdir(dirpath) update_blacklist_module(flgnum) # print("update finish."), time.ctime() mylog.info("[mal_ip] Update finish.")
def treatip(dataset, es_ip): mylog = set_logger() full, segment, subnet = blacklist_tools.separate_ip( dataset) #dataset is dict # match procedure # full match full_list = full.keys() # return fullmatchlist,type is list fullmatchlist = blacklist_tools.ip_full_match(full_list, es_ip) # segment match, segmentlist:[{ip:ipsegment},{},...] segmentlist = blacklist_tools.int_ip_range(segment, es_ip) subnet_lpm = {} subnet_full = {} sndict = {} sn_lte16 = {} # read conf file to choose the methods flg_lpm, flg_full = parser_config.get_method() if (1 == flg_lpm): # subnet match by lpm,subnet_lpm is match results;sndict and sn_lte16 is original subnet data #mylog.info('start lpm match') #atime=time.time() subnet_lpm, sndict, sn_lte16, sn_gte24 = blacklist_tools.subnet_lpm( subnet, es_ip) #ftime=time.time()-atime #mylog.info('times:{}'.format(ftime)) #mylog.info('finish lpm match') if (1 == flg_full): #subnet match by zhou, parameters are snlist and es_ip # mylog.info('sndict size: %d'%len(sndict)) # mylog.info('sn_lte16 size: %d' % len(sn_lte16)) #mylog.info('start range subnet match') subnet_full = blacklist_tools.subnet_range_match(sn_gte24, es_ip) #mylog.info('finish range subnet match') #whitelist wlflg, whitepath = parser_config.get_self_filelist('whitelist') if (wlflg == 1): #get whilelist if (os.path.exists(whitepath)): filelist = get_all_file(whitepath) for fname in filelist: fpath = whitepath + fname #白名单读取方式不一样, whitedata is dict whitedata = blacklist_tools.load_whitelist(fpath) #filter procedure fullmatchlist, segmentlist, subnet_lpm, subnet_full = blacklist_tools.whitelist_filter( fullmatchlist, segmentlist, subnet_lpm, subnet_full, whitedata) else: mylog.warn('[mal_ip] Match_insert warn: no self_whitelist_path') # return match results return fullmatchlist, segmentlist, subnet_lpm, subnet_full
def main(tday, index, gte, lte, aggs_name, timestamp, serverNum, dport, time_zone, querys_str): mylog = set_logger() path = parser_config.get_store_path() + str(tday) + os.path.sep cnt = 0 allThreatIP = {} # 有问题的dip # 不联网情况下,尝试使用过去7天的数据检查 while (cnt < 8): if (cnt < 7 and os.path.exists(path)): filelist = get_all_file(path) if (not filelist): # 目录没有文件则同样检查前一天的数据 lday = tday + datetime.timedelta(-1) path = parser_config.get_store_path() + str(lday) + os.path.sep cnt = cnt + 1 continue else: break elif (cnt == 7 or not os.path.exists(path)): #default file path,达到7天或当前目录不存在 dflg, defaultpath = parser_config.get_self_filelist('defaultlist') if (dflg == 1): filelist = get_all_file(defaultpath) path = defaultpath else: filelist = [] break else: # check last 7 days file lday = tday + datetime.timedelta(-1) path = parser_config.get_store_path() + str(lday) + os.path.sep cnt = cnt + 1 #get es list es = ESclient(server=serverNum, port=dport) # mylog.info('connected with es') ip_es_list = es.get_es_ip(index, gte, lte, aggs_name, time_zone, querys_str) mylog.debug('[mal_ip] ES data size:%d ' % len(ip_es_list)) # 检查下载的网络情报 if (filelist): try: #check each file and insert match results tmpThreatIP = checkAndInsert(path, filelist, ip_es_list, index, aggs_name, timestamp, serverNum, dport) # mylog.info('main_insert Threat_ip size:{}'.format(len(tmpThreatIP))) if (tmpThreatIP): allThreatIP = dict(allThreatIP, **tmpThreatIP) except Exception, e: mylog.error('[mal_ip] Check blacklist error:{}'.format(e))
def load_whitelist(whitepath): mylog = set_logger() datadic = {} if (os.path.exists(whitepath)): # return dataset,and type is dict with open(whitepath, 'r') as bf: allip = bf.read().split(',') for ips in allip: datadic[ips] = { 'subtype': 'whitelist', 'desc_subtype': 'local whitelist ip' } else: mylog.warn('[mal_ip] NO whitelist path!') return datadic
def main(): mylog = set_logger() stime, detaltime = my_tools.get_starttime() while (True): if stime > datetime.datetime.now(): mylog.info("[surbl] time sleep ...") time.sleep((stime - datetime.datetime.now()).total_seconds()) else: try: mylog.info("[surbl] start check . Time is {0}".format(stime)) surbl_run(stime, detaltime) except Exception, e: mylog.error("[surbl] error:{0}".format(e)) #detaltime=datetime.timedelta(days=1) stime = stime + detaltime
def store_json(dict, name): ''' 保存为json ''' mylog = set_logger() tday = datetime.datetime.now().date() file_name = name + '.json' savepath = parser_config.get_store_path() + str( tday) + os.path.sep + file_name #mylog.info("path:{0}".format(savepath)) try: with open(savepath, 'w') as f: f.write(json.dumps(dict)) except IOError: # print 'store_json Error' mylog.warning('[mal_ip] Change date time! download again!')
def get_self_filelist(keywords): # optionname must be whitelist or blacklist or defaultlist mylog = set_logger() optionname = 'self_' + keywords + '_path' optionflg=keywords+'_flg' try: # source_store_path_key = cp.options(optionname) paths = __conf_ip["file_path"] # value=cp.get(sectionName,keyword) # flg = cp.getint(optionname, source_store_path_key[0]) # bpath = cp.get(optionname, source_store_path_key[1]) flg=paths[optionflg] bpath=paths[optionname] path = get_store_path() + bpath + os.path.sep return flg, path except Exception, e: mylog.error('[mal_ip] Parser_config error: config file error!') return 0, ''
def first_check(starttime, delta, timezone): mylog = set_logger() #dttime=datetime.timedelta(days=1) server, port, alert_idx, data_idx = my_tools.get_es_server() gte = (starttime - delta).strftime('%Y-%m-%d %H:%M:%S') lte = starttime.strftime('%Y-%m-%d %H:%M:%S') es = ES_class.ESClient(iserver=server, iport=port) #get dns data dataset = es.get_dns_data(data_idx, gte, lte, timezone) # clean newdata = my_tools.clean_dns_data(dataset) mylog.debug("[surbl] dns data size:{0}".format(len(newdata))) # analysis mylog.debug("[surbl] analyse data ...") docs = my_tools.analyse_info(newdata) # insert es mylog.debug("[surbl] insert ES ...") my_tools.insert_alert(es, docs, alert_idx)
def new_run(entertime,delta,serverNum,dport,offset,querys,indx='tcp-*',aggs_name='dip'): # new running procedure updatetime=datetime.datetime.now() startTime = entertime # beginTime = datetime.datetime.strptime(begin, '%Y-%m-%d %H:%M:%S') # flgnum is the running times per day flgnum=0 # get format: "yy-mm-dd" tday=datetime.datetime.now().date() # runtime=0 # elapsed time of whole process,included check and merge mylog=set_logger() offlineFlg=isOffline() # while True: if(tday!=datetime.datetime.now().date()): flgnum=0 # reset flgnum per day tday=datetime.datetime.now().date() dirpath = parser_config.get_store_path() + str(tday) + os.path.sep os.mkdir(dirpath) while datetime.datetime.now() < startTime: #print('time sleep...') mylog.info("[mal_ip] Time sleeping ...") time.sleep((startTime-datetime.datetime.now()).total_seconds()) try: # st=time.clock() #update source dataset if(offlineFlg is False): if(datetime.datetime.now()>=updatetime): update_blacklist.main(tday,flgnum) updatetime=updatetime+delta # check interval time is 5mins all_IP=checkES(startTime,indx,aggs_name,serverNum,dport,tday,offset,querys) #IP second check for C&C flg_C2=parser_config.get_ip_secondcheck() if(flg_C2==1): # 从info中IP检查,提升为warn mylog.debug('[mal_ip] Info_IP size:{}'.format(len(all_IP))) ip_check_C2.main(startTime,all_IP,serverNum,dport,indx) startTime = startTime + delta flgnum+=1 # runtime=time.clock()-st# get the time of whole process except Exception, e: # print e mylog.error("[mal_ip] Ontime_run error:{0}".format(e)) startTime = startTime + delta
def checkAndInsert(path, filelist, ip_es_list, index, aggs_name, timestamp, serverNum, dport): # check each file mylog = set_logger() all_threatIP = {} for fname in filelist: #mylog.info('-*-*-*-*-file:{}-*-*-*-*-'.format(fname)) fpath = path + fname dataset = load_dict(fpath) #mylog.info("dataset len:{0}".format(len(dataset))) if (dataset): msg = dataset[dataset.keys()[0]] # get match result fullmatch, segmentmatch, subnetlpm, subnetfull = treatip( dataset, ip_es_list) threatIP = insert_result(index, aggs_name, timestamp, serverNum, dport, fullmatch, segmentmatch, subnetlpm, subnetfull, dataset) # merge if (threatIP): all_threatIP = dict(all_threatIP, **threatIP) return all_threatIP
def main(): mylog = set_logger() dict = ssl_abuse(mylog) #print len(dict) store_json(dict, 'ssl_abuse')
def main(): mylog = set_logger() dict = stopforumspam_toxic_ip_range(mylog) #print len(dict) store_json(dict, 'stopforumspam_toxic_ip_range')
def other_match_type(es_insert, data, match_types, msg, index, timestamp, aggs_name): mylog = set_logger() tmpThreat = {} # check by x-force new_subnetlpm = get_xforce(data, 0) # new_fullmatch_list=new_fullmatch.keys() for i in range(len(data)): doc = {} # segment insert, # ip_es 原es IP ip_es = data[i].keys()[0] # get alert ip # ip_es,对应的匹配的ip ipseg = data[i][ip_es] # alert match type try: # print ipseg if (match_types == "subnet_lpm_match"): #lpm找不到对应ip,随机取一个当前黑名单的ip,获取对应属性字段 key1 = msg.keys()[0] ipseg = key1 tmptype = msg[key1]['desc_subtype'].split(';') doc['desc_subtype'] = tmptype[0].split( ':')[0] + ';' + tmptype[1] else: doc['desc_subtype'] = msg[ipseg]['desc_subtype'] doc['level'] = msg[ipseg]['level'] doc['type'] = 'mal_ip' doc['desc_type'] = '[mal_ip] Request of suspect IP detection.' doc['subtype'] = msg[ipseg]['subtype'] doc['match_type'] = match_types doc[aggs_name] = ip_es doc['@timestamp'] = timestamp doc['index'] = index if (new_subnetlpm[ip_es].has_key("score") and (new_subnetlpm[ip_es]["score"])): doc['xforce_marks'] = float(new_subnetlpm[ip_es]["score"]) elif ((not new_subnetlpm[ip_es].has_key("score")) or (not (new_subnetlpm[ip_es]["score"]))): doc['xforce_marks'] = 0 else: doc['xforce_marks'] = float(new_subnetlpm[ip_es]["score"]) # msg info msg_info = '' if (new_subnetlpm[ip_es].has_key("cats")): cats = new_subnetlpm[ip_es]["cats"] for ky, vals in cats.items(): msg_info = msg_info + str(ky) + ':' + str(vals) + '%;' if (new_subnetlpm[ip_es].has_key("geo") and (new_subnetlpm[ip_es]["geo"]).strip()): msg_info = msg_info + 'geo:' + new_subnetlpm[ip_es]["geo"] + ';' if (new_subnetlpm[ip_es].has_key("company") and (new_subnetlpm[ip_es]["company"]).strip()): msg_info = msg_info + 'company:' + new_subnetlpm[ip_es][ "company"] if (msg_info[-1] == ';'): doc['xforce_msg'] = msg_info[:-1] else: doc['xforce_msg'] = msg_info #mylog.info('insert {0} with xforce'.format(match_types)) except Exception, e: #mylog.error("[mal_ip] Other match_type error:{0}".format(e)) doc = {} # segment insert ip_es = data[i].keys()[0] # get alert ip # print ip_es ipseg = data[i][ip_es] # alert match type # print ipseg if (match_types == "subnet_lpm_match"): key1 = msg.keys()[0] ipseg = key1 tmptype = msg[key1]['desc_subtype'].split(';') doc['desc_subtype'] = tmptype[0].split( ':')[0] + ';' + tmptype[1] else: doc['desc_subtype'] = msg[ipseg]['desc_subtype'] doc['level'] = msg[ipseg]['level'] doc['type'] = 'mal_ip' doc['desc_type'] = '[mal_ip] Request of suspect IP detection.' doc['subtype'] = msg[ipseg]['subtype'] doc['match_type'] = match_types doc[aggs_name] = ip_es doc['@timestamp'] = timestamp doc['index'] = index # dip site dd = ipipCheckGeo(ip_es) doc['dst_country'] = dd[ip_es][0] doc['dst_province'] = dd[ip_es][1] doc['dst_city'] = dd[ip_es][2] doc['eventid'] = 102001 # insert es_insert.es_index(doc) tmpThreat[ip_es] = doc
def full_match_type(es_insert, data, msg, index, timestamp, aggs_name): mylog = set_logger() tmpThreat = {} # check by x-force new_fullmatch = get_xforce(data, 1) # new_fullmatch_list=new_fullmatch.keys() for i in range(len(data)): tmpip = data[i] try: doc = {} if (msg[data[i]].has_key('level')): doc['level'] = msg[data[i]]['level'] else: doc['level'] = 'info' doc['type'] = 'mal_ip' doc['desc_type'] = '[mal_ip] Request of suspect IP detection.' if (msg[data[i]].has_key('desc_subtype')): doc['desc_subtype'] = msg[data[i]]['desc_subtype'] else: doc['desc_subtype'] = 'alerts from local blacklist' if (msg[data[i]].has_key('type')): doc['subtype'] = msg[data[i]]['type'] else: doc['subtype'] = 'suspect' doc['match_type'] = "full_match" doc[aggs_name] = tmpip doc['@timestamp'] = timestamp doc['index'] = index # mylog.info('msg start{0}'.format(new_fullmatch[fullmatch[i]])) # =========排查空值!============== if (new_fullmatch[data[i]].has_key('score') and (new_fullmatch[data[i]]["score"])): doc['xforce_marks'] = float(new_fullmatch[data[i]]["score"]) elif ((not new_fullmatch[data[i]].has_key('score')) or (not (new_fullmatch[data[i]]["score"]))): doc['xforce_marks'] = 0 else: doc['xforce_marks'] = float(new_fullmatch[data[i]]["score"]) # msg info msg_info = '' if (new_fullmatch[data[i]].has_key("cats")): cats = new_fullmatch[data[i]]["cats"] for ky, vals in cats.items(): msg_info = msg_info + str(ky) + ':' + str(vals) + '%;' if (new_fullmatch[data[i]].has_key("geo") and (new_fullmatch[data[i]]["geo"]).strip()): msg_info = msg_info + 'geo:' + new_fullmatch[ data[i]]["geo"] + ';' if (new_fullmatch[data[i]].has_key("company") and (new_fullmatch[data[i]]["company"]).strip()): msg_info = msg_info + 'company:' + new_fullmatch[ data[i]]["company"] if (msg_info[-1] == ';'): doc['xforce_msg'] = msg_info[:-1] else: doc['xforce_msg'] = msg_info #mylog.info('insert fullmatch with xforce') except Exception, e: #mylog.error("[mal_ip] Match insert error:{0}".format(e)) doc = {} if (msg[data[i]].has_key('level')): doc['level'] = msg[data[i]]['level'] else: doc['level'] = 'info' doc['type'] = 'mal_ip' doc['desc_type'] = '[mal_ip] Request of suspect IP detection.' if (msg[data[i]].has_key('desc_subtype')): doc['desc_subtype'] = msg[data[i]]['desc_subtype'] else: doc['desc_subtype'] = 'alerts from local blacklist' if (msg[data[i]].has_key('type')): doc['subtype'] = msg[data[i]]['type'] else: doc['subtype'] = 'suspect' doc['match_type'] = "full_match" doc[aggs_name] = tmpip doc['@timestamp'] = timestamp doc['index'] = index # dip site dd = ipipCheckGeo(tmpip) doc['dst_country'] = dd[tmpip][0] doc['dst_province'] = dd[tmpip][1] doc['dst_city'] = dd[tmpip][2] doc['eventid'] = 102001 # insert es_insert.es_index(doc) tmpThreat[tmpip] = doc
def main(): mylog = set_logger() dict = firehol_level1(mylog) #print len(dict.keys()) store_json(dict, 'firehol_level1')
def main(): mylog = set_logger() dict = dshield_subnet(mylog) #print len(dict) store_json(dict, 'dshield_subnet')
def subnet_lpm(subnet, es_ip): mylog = set_logger() lpm.init() sndict = {} fpath = parser_config.get_store_path() sn_lte16 = {} lpmdict = {} sn_gte24 = {} ip_subnet = subnet.keys() # mylog.info("subnetlpm size:{0}".format(len(ip_subnet))) for sn in ip_subnet: subnet_split = sn.split('/') ip_num = ip_split_num(subnet_split[0]) netMask = int(subnet_split[1]) if (sn == '192.168.0.0/16' or sn == '172.16.0.0/12' or sn == '10.0.0.0/8'): # 略过私网 continue # return 'False' elif (netMask < 16): # 暂时不处理 sn_lte16[sn] = subnet[sn] # return 'False' elif (netMask == 16): lpmdict[sn] = subnet[sn] newip1 = [] ip_num[2] = ip_num[2] | 1 newip1.append(str(ip_num[0])) newip1.append(str(ip_num[1])) newip1.append('*') newip1.append('*') ipstr1 = '.'.join(newip1) lpm.insert_rule(ipstr1) elif (netMask >= 21 and netMask <= 24): lpmdict[sn] = subnet[sn] idx = pow(2, 24 - netMask) - 1 # print idx ip_base = ip_num[2] & (255 - idx) i = 0 while (i <= idx): newip1 = [] ipstr1 = '' ip_num[2] = ip_base + i newip1.append(str(ip_num[0])) newip1.append(str(ip_num[1])) newip1.append(str(ip_num[2])) newip1.append('*') ipstr1 = '.'.join(newip1) # print ipstr1 lpm.insert_rule(ipstr1) i = i + 1 # elif(netMask==24): # #/25当/24处理 # lpmdict[sn] = subnet[sn] # newip1 = [] # newip1.append(str(ip_num[0])) # newip1.append(str(ip_num[1])) # newip1.append(str(ip_num[2])) # newip1.append('*') # ipstr1 = '.'.join(newip1) # lpm.insert_rule(ipstr1) elif (netMask > 24): # range match sn_gte24[sn] = subnet[sn] else: # netMask>16 and netMask<21,save them sndict[sn] = subnet[sn] #mylog.info('lpm data size: %d' % len(lpmdict)) #mylog.info('remaining subnet size:%d' % len(sndict)) #mylog.info('lte16 size:%d' % len(sn_lte16)) #mylog.info('gte24 size:%d' % len(sn_gte24)) # save snpath = getsavepath(fpath, 'remaining_subnet') ltepath = getsavepath(fpath, 'lte16_subnet') lpmpath = getsavepath(fpath, 'lpm_subnet_data') gtepath = getsavepath(fpath, 'gte24_subnet') if (sndict): if (os.path.exists(snpath)): newsndict = load_dict(snpath) newsndict1 = dict(newsndict, **sndict) # merge saveToJSON(newsndict1, fpath, "remaining_subnet") else: saveToJSON(sndict, fpath, "remaining_subnet") if (sn_lte16): if (os.path.exists(ltepath)): newlte = load_dict(ltepath) newlte16 = dict(newlte, **sn_lte16) # merge saveToJSON(newlte16, fpath, 'lte16_subnet') else: saveToJSON(sn_lte16, fpath, 'lte16_subnet') if (lpmdict): if (os.path.exists(lpmpath)): newlpmdict = load_dict(lpmpath) newlpmdict1 = dict(newlpmdict, **lpmdict) # merge saveToJSON(newlpmdict1, fpath, 'lpm_subnet_data') else: saveToJSON(lpmdict, fpath, 'lpm_subnet_data') if (sn_gte24): if (os.path.exists(gtepath)): newlpmdict = load_dict(gtepath) newlpmdict1 = dict(newlpmdict, **sn_gte24) # merge saveToJSON(newlpmdict1, fpath, 'gte24_subnet') else: saveToJSON(sn_gte24, fpath, 'gte24_subnet') sn_gte24 = dict(sn_gte24, **sndict) # merge # match subnet_result = [] for ips in es_ip: ip_es_num = socket.ntohl(struct.unpack("I", socket.inet_aton(str(ips)))[0]) if (lpm.search_ip(ip_es_num)): subnet_result.append({ips: 'subnet_lpm_match'}) return subnet_result, sndict, sn_lte16, sn_gte24
def main(): mylog = set_logger() dict = bogons_ip(mylog) print len(dict) store_json(dict, 'bogons')
def main(): mylog = set_logger() #mylog.info('download dropList...') dict = dropList(mylog) #mylog.info('dropList size:{}'.format(len(dict.keys()))) store_json(dict, 'dropList')
def main(): mylog = set_logger() dict = bambenek_ip(mylog) print len(dict) store_json(dict,'bambenek_ip')
def main(): mylog = set_logger() dict = ZeuS_ip(mylog) #print len(dict) store_json(dict, 'ZeuS_ip')
def main(): mylog = set_logger() # mylog=[] dict = MiningServerIPList(mylog) #print len(dict.keys()) store_json(dict, 'MiningServerIPList')
def main(): mylog = set_logger() #mylog.info('download bitnodes...') dict = bitnodes(mylog) #mylog.info('bitnodes size:{}'.format(len(dict.keys()))) store_json(dict, 'bitnodes')
def main(): mylog = set_logger() dict = feodo_ip(mylog) #print len(dict) store_json(dict, 'feodo_ip')