def GetArea(req): IP.load(os.path.abspath("mydata4vipweek2.dat")) Key=['country','province','city','county','isp','ip'] # Area=IP.find(Get_Client_Ip(req)).split('\t') Area=IP.find('202.99.224.68').split('\t') Area.append(Get_Client_Ip(req)) ResultStr='' Code='' Mesage=dict(zip(Key,Area)) # print Mesage for key in Mesage.keys(): if key=='country': CodeValue=Get_Area_Code(Mesage[key],'COUNTRY') elif key=='province': CodeValue=Get_Area_Code(Mesage[key],'PROVINCE') elif key=='city': CodeValue=Get_Area_Code(Mesage[key],'CITY') elif key=='county': CodeValue=Get_Area_Code(Mesage[key],'COUNTY') elif key=='isp': CodeValue=Get_Area_Code(Mesage[key],'ISP') elif key=='ip': CodeValue=Mesage[key] if key in ['province','city','isp','ip']: ResultStr=ResultStr+key+"="+CodeValue+ ',' return ResultStr
def IpQuery(req): IP.load(os.path.abspath("mydata4vipweek2.dat")) Key=['country','province','city','county','isp','ip'] Result=IP.find(Get_Client_Ip(req)).split('\t') Result.append(Get_Client_Ip(req)) Mesage=dict(zip(Key,Result)) return HttpResponse(json.dumps(Mesage, ensure_ascii=False))
def run(): ip_views=stat_ip_views(nginx_log_path) max_ip_view={} fileName='out.csv' f=open('out.csv','w+') b = 'IP,国家,访问数总数' print >> f,b for ip in ip_views: IP.load(os.path.abspath("17monipdb.dat")) count=IP.find("%s"% (ip)) conut_s=count.split() countery=conut_s[0] views=ip_views[ip] c = '%s,%s,%s' %(ip,countery,views) print >> f,c if len(max_ip_view)==0: max_ip_view[ip]=views else: _ip=max_ip_view.keys()[0] _views=max_ip_view[_ip] if views>_views: max_ip_view[ip]=views max_ip_view.pop(_ip) print "IP:", ip, "国家:", countery, "访问数:", views print "总共有多少IP:", len(ip_views) print "最大访问IP数:", max_ip_view g = "" d = '总共有多少IP:%s' %(len(ip_views)) e = '最大访问IP数:%s' %(max_ip_view) print >> f,g print >> f,d print >> f,e
def query_ipip_db(self, db): try: from ipip import IP IP.load(os.path.abspath(db)) data = IP.find(self.ip) self.area = data.split()[1] + data.split()[2] + " " + data.split( )[-1] except: self.area = "unknown"
def GetAreaName(req): IP.load(os.path.abspath("mydata4vipweek2.dat")) Key=['country','province','city','county','isp','ip'] Area=IP.find(Get_Client_Ip(req)).split('\t') Area.append(Get_Client_Ip(req)) Mesage=dict(zip(Key,Area)) ResultStr='' for key in Mesage.keys(): if key in ['province','city','isp','ip']: ResultStr=ResultStr+key+"="+Mesage[key]+ ',' return ResultStr
def ipip_api(ip): dbpath = os.path.join(os.path.dirname(__file__),"17monipdb.dat") IP.load(dbpath) try: city = IP.find(ip).split()[2] return city except: pass
def GetAreaStr(ip): IP.load(os.path.abspath("mydata4vipweek2.dat")) Key=['country','province','city','county','isp','ip'] Area=IP.find(ip).split('\t') Area.append(ip) ResultStr='' CodeValue='' Mesage=dict(zip(Key,Area)) for key in Mesage.keys(): if key=='country': CodeValue=Get_Area_Code(Mesage[key],'COUNTRY') elif key=='province': CodeValue=Get_Area_Code(Mesage[key],'PROVINCE') elif key=='city': CodeValue=Get_Area_Code(Mesage[key],'CITY') elif key=='county': CodeValue=Get_Area_Code(Mesage[key],'COUNTY') elif key=='isp': CodeValue=Get_Area_Code(Mesage[key],'ISP') elif key=='ip': CodeValue=Mesage[key] if key in ['province','city']: ResultStr=ResultStr+key+"="+str(CodeValue)+ ',' return ResultStr
from pipestat import pipestat from djra import settings import itertools from ipip import IP def to_timestamp(d): return time.mktime(d.timetuple()) IP_DB_FILE = getattr(settings, 'IP_DB_FILE', None) if IP_DB_FILE is None: warnings.warn('''not found IP_DB_FILE, geo report will not work, please set IP_DB_FILE in settings. You can download the ipdb file from http://www.ipip.net/ ''') else: IP.load(IP_DB_FILE) def pick(x, fields): return { field: getattr(x, field) for field in fields } def build_server_report(begin_date, end_date, options): fields = ['username', 'nasipaddress', 'acctinputoctets', 'acctoutputoctets'] sessions = Radacct.objects.filter( acctstarttime__gte=begin_date, acctstarttime__lt=end_date ).only(*fields) sessions = itertools.imap(lambda x: pick(x, fields), sessions) report_pipe = [ { "$group": {
def ip_search(): # 查询指定IP所属地理位置 IP.load('路径/tinyipdata_utf8.dat') print IP.find('ip地址').strip().encode("utf8")
#!/usr/bin/python # -*- coding: utf-8 -*- import requests import sys from time import sleep from log import logger import os, json from ipip import IP base_taobao_url = "http://ip.taobao.com/service/getIpInfo.php" base_sina_url = "http://int.dpool.sina.com.cn/iplookup/iplookup.php" # 新浪库准确率太低,抛弃他 IP.load(os.path.abspath("input/mydata4vipweek2.dat")) def query_local(ip): ret = IP.find(ip) tmp = ret.split() print tmp[0], tmp[1] rjson = dict() if tmp[0] != u'中国': rjson["country"] = u'中国' rjson["province"] = u'北京市' rjson["city"] = u'北京市' rjson["isp"] = tmp[-1] else: # if tmp[1] == u'台湾' or tmp[1] == u'香港' or tmp[1] == u'澳门': if tmp[1] in (u'台湾', u'香港', u'澳门'): rjson["country"] = tmp[1] rjson["province"] = u''
# coding: utf-8 import sys, os, random, re, httplib, codecs from ipip import IP root = '/Users/zhangtaichao/Documents/ips' datapaht = '/Users/zhangtaichao/code/php/duobaohui_php/app/libraries/class/ip/17monipdb.dat' IP.load(os.path.abspath(datapaht)) def get_files(path): l = os.listdir(root) for name in l: if name.startswith('t_txt'): yield os.path.join(path, name) def for_file(file, num, inc=1): """ :param file: ip段文件 :param num: 每行最大生成个数 :return:list """ dic = {} (filepath, filename) = os.path.split(file) dic["name"] = filename[-2:] result = [] with open(file, 'r') as f: for line in f.readlines(): ips = line.split() if (len(ips) == 3):
# -*- coding: utf-8 -*- import sys reload(sys) sys.setdefaultencoding("utf-8") import os from ipip import IP #from ipip import IPX str_ip = sys.argv[1] if len(sys.argv) > 1 else '113.206.51.191' print 'find address of ip[%s]:' % str_ip IP.load(os.path.abspath("17monipdb.dat")) print IP.find(str_ip.encode('utf-8')) #IPX.load(os.path.abspath("17monipdb.datx")) #print IPX.find("118.28.8.8")
def appsflow_appAnalysis(user): startTime = request.args.get('startTime') endTime = request.args.get('endTime') agent = request.args.get('agent') type_ = request.args.get('type') page = request.args.get('page') size = request.args.get('size') if page: page = int(page) else: page = 1 if size: size = int(size) else: size = 10 MyAppSflow = type('AppSflow_' + str(user.dbname), (AppSflow, ), {'__bind_key__': user.dbname}) MyAppIpRegion = type('AppIpRegion_' + str(user.dbname), (AppIpRegion, ), {'__bind_key__': user.dbname}) #根据查询条件做流量统计 analysises = db.session.query(MyAppSflow.src_ip, db.func.sum(MyAppSflow.ipsize), MyAppIpRegion.region).\ select_from(MyAppSflow).outerjoin(MyAppIpRegion, MyAppSflow.src_ip == MyAppIpRegion.ip) if type_ == '1': analysises = db.session.query(MyAppSflow.dst_ip, db.func.sum(MyAppSflow.ipsize)) elif type_ == '2': analysises = db.session.query( MyAppSflow.src_ip + ' -> ' + MyAppSflow.dst_ip, db.func.sum(MyAppSflow.ipsize)) if agent: analysises = analysises.filter(MyAppSflow.agent == agent) if startTime: analysises = analysises.filter(MyAppSflow.timestamp >= startTime) if endTime: analysises = analysises.filter(MyAppSflow.timestamp <= endTime) if type_ == '0': MyAppIpWhitelist = type('AppIpWhitelist_' + str(user.dbname), (AppIpWhitelist, ), {'__bind_key__': user.dbname}) whitelists = db.session.query(MyAppIpWhitelist.ip).all() ips = [] for temp in whitelists: ips.append(temp[0]) if len(ips): analysises = analysises.filter(~MyAppSflow.src_ip.in_(ips)) analysises = analysises.group_by(MyAppSflow.src_ip) elif type_ == '1': analysises = analysises.group_by(MyAppSflow.dst_ip, ) elif type_ == '2': analysises = analysises.group_by(MyAppSflow.src_ip, MyAppSflow.dst_ip) analysises = analysises.order_by(db.func.sum( MyAppSflow.ipsize).desc()).paginate(page, size) content = {} temp = [] total = 0 for index in range(len(analysises.items)): analysis = analysises.items[index] temp.append({ 'ip': analysis[0], 'ipsize': float(analysis[1]) if float(analysis[1]) else 0 }) total += float(analysis[1]) if float(analysis[1]) else 0 if type_ == '0': temp[index]['region'] = analysis[2] if type_ == '1': IP.load(os.path.abspath("17monipdb.dat")) ip_region = IP.find(analysis[0]) temp[index]['region'] = ''.join(ip_region.split()) content['data'] = temp content['total'] = total return jsonify({'content': content, 'totalElements': analysises.total})
exit() with open(osv_path, 'r') as osv_file: for line in osv_file: kv = line.strip().split('\t') if (len(kv) == 1): osv_dict[""] = kv[0] elif (len(kv) == 2): osv_dict[kv[0]] = kv[1] else: print "error dict file!" print line exit() # load ipip dat IP.load(os.path.abspath("ip_location.dat")) district_encode_path = "./district_encode" if (os.path.exists(district_encode_path) == False): print "district_encode file not find" exit() district_encode_dict = {} with open(district_encode_path, 'r') as district_encode_file: for line in district_encode_file: kv = line.strip().split('\t') try: if (len(kv) == 1): district_encode_dict[""] = kv[0] elif (len(kv) == 2):
# code from https://github.com/17mon/python import sys reload(sys) sys.setdefaultencoding("utf-8") import os import random import socket import struct import time from ipip import IP IP.load(os.path.abspath("../data/17monipdb20170909.dat")) begin = time.time() for n in range(1000): ip = socket.inet_ntoa(struct.pack('>I', random.randint(1, 0xffffffff))) city = IP.find(ip) # if city: # print city[0] cost = time.time() - begin print 'cost===:', cost city = IP.find('222.175.96.1') print ','.join(city) city = IP.find('36.149.244.81') print ','.join(city)
for i4 in range(0,254): ip='%s.%s.%s.%s' %(i1,i2,i3,i4) if i1 not in (0,10,172,192,127): f = file('ip.txt','a+') for i1 in range(0,254): for i2 in range(0,254): for i3 in range(0,254): for i4 in range(0,254): ip='%s.%s.%s.%s' %(i1,i2,i3,i4) if i1 not in (0,10,172,192,127): # print(ip) try: #print(ip) IP.load(os.path.abspath("mydata4vipday2.datx")) # IP.load(os.path.abspath("monipdb.dat")) print IP.find(ip) IPX.load(os.path.abspath("mydata4vipday2.datx")) # IP.load(os.path.abspath("monipdb.dat")) ip1 = IPX.find(ip) print ip,ip1 codelst = ['\n',ip,ip1,'\n'] f.writelines(codelst) ip1 = IPX.find(ip) #print ip,ip1
#https://segmentfault.com/a/1190000000352578 ##更新本地dat文件 #from qqwry import updateQQwry #result = updateQQwry('qqwry.dat') ##use from qqwry import QQwry q = QQwry() q.load_file('qqwry.dat')#工作路径 result = q.lookup('xxx.xxx.xx.xx') print(result) ###################################### ipip.net ######################################### #https://github.com/17mon/python import os from ipip import IP#目前无法pip(python3.5.1) from ipip import IPX IP.load(os.path.abspath("mydata4vipday2.dat"))#工作路径 print IP.find("118.28.8.8") IPX.load(os.path.abspath("mydata4vipday2.datx"))#工作路径 print IPX.find("118.28.8.8") >>>中国 天津 天津 鹏博士 >>>中国 天津 天津 鹏博士 39.128399 117.185112 Asia/Shanghai UTC+8 120000
# -*- coding: utf-8 -*- import sys reload(sys) sys.setdefaultencoding('utf-8') import os from ipip import IP from ipip import IPX IP.load(os.path.abspath('mydata4vipday2.dat')) print(IP.find('118.28.8.8')) IPX.load(os.path.abspath('mydata4vipday2.datx')) print(IPX.find('118.28.8.8'))
# -*- coding: utf-8 -*- import os import random from ipip import IP dat_path = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'ip.dat') IP.load(dat_path) def get_ip_locality(ip): # fake province name for localhost if ip == '127.0.0.1': all_provinces = [u'湖北', u'湖南', u'河南', u'河北', u'山东', u'山西', u'江西', u'江苏', u'浙江', u'黑龙江', u'新疆', u'云南', u'贵州', u'福建', u'吉林', u'安徽', u'四川', u'西藏', u'宁夏', u'辽宁', u'青海', u'甘肃', u'陕西', u'内蒙', u'台湾', u'北京', u'上海', u'海南', u'天津', u'重庆'] return random.choice(all_provinces) raw = IP.find(ip) return raw[3:5]
# coding: utf-8 import sys,os,random,re,httplib,codecs from ipip import IP root = '/Users/zhangtaichao/Documents/ips' datapaht = '/Users/zhangtaichao/code/php/duobaohui_php/app/libraries/class/ip/17monipdb.dat' IP.load(os.path.abspath(datapaht)) def get_files(path): l = os.listdir(root) for name in l: if name.startswith('t_txt'): yield os.path.join(path,name) def for_file(file,num,inc=1): """ :param file: ip段文件 :param num: 每行最大生成个数 :return:list """ dic = {} (filepath,filename)=os.path.split(file) dic["name"] = filename[-2:] result = [] with open(file,'r') as f: for line in f.readlines(): ips = line.split() if(len(ips) == 3): res = gen_ip(ips[0],ips[1],num,inc) if type(res) == list:
# -*- coding: utf-8 -*- import re import os import csv import sys import time from ipip import IP from functools import wraps import marshal IP.load(os.path.join(os.getcwd(), "17monipdb.dat")) def dump_obj(obj, file_name): with open(file_name, 'wb') as wf: marshal.dump(obj, wf) def load_obj(file_name): with open(file_name, 'rb') as rf: return marshal.load(rf) def fn_cache(key=None): _default_key_func = lambda args, kwargs: '<args:%r,kwargs:%r>' % (args, kwargs) _key_func = _default_key_func if key is None else key def _fn_cache(func): cache_dict = {} @wraps(func)
#!/usr/bin/python # -*- coding: utf-8 -*- from ipip import IP import json import os pvDict = {} clkDict = {} winDict = {} staDict = {} rltDict = {} # statistic every crid's ctr f = open("ip_make_rlt", "w+") IP.load( os.path.abspath( '/Users/haoshun/hs-code-test/sta_bridge_log/ip_sta/ip_location.dat')) for line in open( "/Users/haoshun/hs-code-test/bridge_temporary_code/hs_win_21_head"): try: line = line.strip() jsonObj = json.loads(line) auction = jsonObj['auction'] winDict[auction] = {"win"} except: continue for line in open( "/Users/haoshun/hs-code-test/bridge_temporary_code/hs_clk_21_head"): try: line = line.strip() jsonObj = json.loads(line) auction = jsonObj['auction'] clkDict[auction] = {"clk"}
# -*- coding: utf-8 -*- import sys reload(sys) sys.setdefaultencoding("utf-8") import os from ipip import IP from ipip import IPX IP.load(os.path.abspath("mydata4vipday2.dat")) print IP.find("118.28.8.8") IPX.load(os.path.abspath("mydata4vipday2.datx")) print IPX.find("118.28.8.8")
def a(): IP.load(os.path.abspath("17monipdb.dat")) for i in xrange(1,2): print IP.find("118.%s.8.%s" % (i+1,i)) print IP.find('14.23.124.162')