def run(): ip_views=stat_ip_views(nginx_log_path) max_ip_view={} fileName='out.csv' f=open('out.csv','w+') b = 'IP,国家,访问数总数' print >> f,b for ip in ip_views: IP.load(os.path.abspath("17monipdb.dat")) count=IP.find("%s"% (ip)) conut_s=count.split() countery=conut_s[0] views=ip_views[ip] c = '%s,%s,%s' %(ip,countery,views) print >> f,c if len(max_ip_view)==0: max_ip_view[ip]=views else: _ip=max_ip_view.keys()[0] _views=max_ip_view[_ip] if views>_views: max_ip_view[ip]=views max_ip_view.pop(_ip) print "IP:", ip, "国家:", countery, "访问数:", views print "总共有多少IP:", len(ip_views) print "最大访问IP数:", max_ip_view g = "" d = '总共有多少IP:%s' %(len(ip_views)) e = '最大访问IP数:%s' %(max_ip_view) print >> f,g print >> f,d print >> f,e
def GetArea(req): IP.load(os.path.abspath("mydata4vipweek2.dat")) Key=['country','province','city','county','isp','ip'] # Area=IP.find(Get_Client_Ip(req)).split('\t') Area=IP.find('202.99.224.68').split('\t') Area.append(Get_Client_Ip(req)) ResultStr='' Code='' Mesage=dict(zip(Key,Area)) # print Mesage for key in Mesage.keys(): if key=='country': CodeValue=Get_Area_Code(Mesage[key],'COUNTRY') elif key=='province': CodeValue=Get_Area_Code(Mesage[key],'PROVINCE') elif key=='city': CodeValue=Get_Area_Code(Mesage[key],'CITY') elif key=='county': CodeValue=Get_Area_Code(Mesage[key],'COUNTY') elif key=='isp': CodeValue=Get_Area_Code(Mesage[key],'ISP') elif key=='ip': CodeValue=Mesage[key] if key in ['province','city','isp','ip']: ResultStr=ResultStr+key+"="+CodeValue+ ',' return ResultStr
def IpQuery(req): IP.load(os.path.abspath("mydata4vipweek2.dat")) Key=['country','province','city','county','isp','ip'] Result=IP.find(Get_Client_Ip(req)).split('\t') Result.append(Get_Client_Ip(req)) Mesage=dict(zip(Key,Result)) return HttpResponse(json.dumps(Mesage, ensure_ascii=False))
def query_ipip_db(self, db): try: from ipip import IP IP.load(os.path.abspath(db)) data = IP.find(self.ip) self.area = data.split()[1] + data.split()[2] + " " + data.split( )[-1] except: self.area = "unknown"
def GetAreaName(req): IP.load(os.path.abspath("mydata4vipweek2.dat")) Key=['country','province','city','county','isp','ip'] Area=IP.find(Get_Client_Ip(req)).split('\t') Area.append(Get_Client_Ip(req)) Mesage=dict(zip(Key,Area)) ResultStr='' for key in Mesage.keys(): if key in ['province','city','isp','ip']: ResultStr=ResultStr+key+"="+Mesage[key]+ ',' return ResultStr
def ipip_api(ip): dbpath = os.path.join(os.path.dirname(__file__),"17monipdb.dat") IP.load(dbpath) try: city = IP.find(ip).split()[2] return city except: pass
def get_user_location(): user_data = db.query("select * from `user_list`") country_list = [] other_list = [] province_list = [] for user in user_data: if user.lastLoginIp: ip = str(user.lastLoginIp).replace("*", "1") row = IP.find(ip) row = str(row).split('\t') country = "" province = "" if len(row) > 2: country = str(row[0]) province = str(row[1]) # print "country:" + country # print "province:" + province if country == "中国": country_list.append(country) province_list.append(province) elif country != "": other_list.append(country) diction_list = [] myset = set(province_list) for item in myset: print item.encode("GBK") + ":" + str(province_list.count(item)) d = {"name": item, "value": str(province_list.count(item))} diction_list.append(d) # for list in other_list: # print list.encode("GBK") json_str = json.dumps(diction_list) json_str = 'chinaMap(' + json_str + ');' print json_str with open('xxxx/mapdata.js', 'w') as outfile: outfile.write(json_str) outfile.closed diction_list_world = [] worldSet = set(other_list) for item in worldSet: print item.encode("GBK") + ":" + str(other_list.count(item)) d = {"name": item, "value": str(other_list.count(item))} diction_list_world.append(d) if item =="局域网": d = {"name": "猴山_冰岛_丧失岛", "value": str(other_list.count(item))} diction_list_world.append(d) d = {"name": "中国", "value": str(len(province_list))} diction_list_world.append(d) json_str_world = json.dumps(diction_list_world) json_str_world = 'world_Map(' + json_str_world + ');' print json_str_world with open('xxxx/world_mapdata.js', 'w') as outfile: outfile.write(json_str_world) outfile.closed
def ip_location(ip): ip = ip.split(',')[-1].strip() ret = '' try: ret = IP.find(ip) except Exception as ex: print 'x', while("\t\t" in ret): ret = ret.replace("\t\t", "\t") return ret
def get_ip_locality(ip): # fake province name for localhost if ip == '127.0.0.1': all_provinces = [u'湖北', u'湖南', u'河南', u'河北', u'山东', u'山西', u'江西', u'江苏', u'浙江', u'黑龙江', u'新疆', u'云南', u'贵州', u'福建', u'吉林', u'安徽', u'四川', u'西藏', u'宁夏', u'辽宁', u'青海', u'甘肃', u'陕西', u'内蒙', u'台湾', u'北京', u'上海', u'海南', u'天津', u'重庆'] return random.choice(all_provinces) raw = IP.find(ip) return raw[3:5]
def ip_check(address): if len(address.split(".")) == 4: try: clientip = IP.find(address) if clientip not in "中国 钓鱼岛 北京 上海 天津 重庆 黑龙江 吉林 辽宁 内蒙古 河北 新疆 " \ "甘肃 青海 陕西 宁夏 河南 山东 山西 安徽 湖南 湖北 江苏 四川 贵州 " \ "云南 广西 西藏 浙江 江西 广东 福建 台湾 海南 香港 澳门": clientip = "外国" return clientip return clientip except Exception as e: return "Null" else: return "Null"
def prepare(x): ip = x.callingstationid #real world data is dirty if '=' in ip: ip = ip.split('=', 1)[0] try: location = IP.find(ip) location = " ".join(location.split()[:2]) except: location = 'Unknow' y = pick(x, fields) y['location'] = location y['acctstarttime'] = to_timestamp(y['acctstarttime'] or datetime.datetime.now()) y['acctstoptime'] = to_timestamp(y['acctstoptime'] or datetime.datetime.now()) return y
def GetAreaStr(ip): IP.load(os.path.abspath("mydata4vipweek2.dat")) Key=['country','province','city','county','isp','ip'] Area=IP.find(ip).split('\t') Area.append(ip) ResultStr='' CodeValue='' Mesage=dict(zip(Key,Area)) for key in Mesage.keys(): if key=='country': CodeValue=Get_Area_Code(Mesage[key],'COUNTRY') elif key=='province': CodeValue=Get_Area_Code(Mesage[key],'PROVINCE') elif key=='city': CodeValue=Get_Area_Code(Mesage[key],'CITY') elif key=='county': CodeValue=Get_Area_Code(Mesage[key],'COUNTY') elif key=='isp': CodeValue=Get_Area_Code(Mesage[key],'ISP') elif key=='ip': CodeValue=Mesage[key] if key in ['province','city']: ResultStr=ResultStr+key+"="+str(CodeValue)+ ',' return ResultStr
def write2file(path,data): name = data['name'] list = data['ip'] with codecs.open(path,'a','utf-8') as f: sql = 'insert into sh_ip_factory(ip,country,province,city,county,remark) values' f.write(sql) for ip in list: res = IP.find(ip) res = res.split(); if res == 'N/A' or len(res) == 1: continue while len(res) < 4: res.append(''); res = tuple(res) (country,province,city,county) = res data = " ('%s','%s','%s','%s','%s','%s')," % (ip,country,province,city,county,name) f.write(data) f.write(os.linesep)
def query_local(ip): ret = IP.find(ip) tmp = ret.split() print tmp[0], tmp[1] rjson = dict() if tmp[0] != u'中国': rjson["country"] = u'中国' rjson["province"] = u'北京市' rjson["city"] = u'北京市' rjson["isp"] = tmp[-1] else: # if tmp[1] == u'台湾' or tmp[1] == u'香港' or tmp[1] == u'澳门': if tmp[1] in (u'台湾', u'香港', u'澳门'): rjson["country"] = tmp[1] rjson["province"] = u'' rjson["city"] = u'' rjson["isp"] = u'' elif tmp[1] in (u'西藏', u'内蒙古'): rjson["country"] = tmp[0] rjson["province"] = tmp[1] + u'自治区' rjson["city"] = tmp[-2] + u'市' rjson["isp"] = tmp[-1] elif tmp[1] == u'广西': rjson["country"] = tmp[0] rjson["province"] = tmp[1] + u'壮族自治区' rjson["city"] = tmp[-2] + u'市' rjson["isp"] = tmp[-1] elif tmp[1] == u'新疆': rjson["country"] = tmp[0] rjson["province"] = tmp[1] + u'维吾尔自治区' rjson["city"] = tmp[-2] + u'市' rjson["isp"] = tmp[-1] elif tmp[1] == u'宁夏': rjson["country"] = tmp[0] rjson["province"] = tmp[1] + u'回族自治区' rjson["city"] = tmp[-2] + u'市' rjson["isp"] = tmp[-1] else: rjson["country"] = tmp[0] rjson["province"] = tmp[1] + u'省' rjson["city"] = tmp[-2] + u'市' rjson["isp"] = tmp[-1] return rjson
def write2file(path, data): name = data['name'] list = data['ip'] with codecs.open(path, 'a', 'utf-8') as f: sql = 'insert into sh_ip_factory(ip,country,province,city,county,remark) values' f.write(sql) for ip in list: res = IP.find(ip) res = res.split() if res == 'N/A' or len(res) == 1: continue while len(res) < 4: res.append('') res = tuple(res) (country, province, city, county) = res data = " ('%s','%s','%s','%s','%s','%s')," % ( ip, country, province, city, county, name) f.write(data) f.write(os.linesep)
#!/usr/bin/python # -*- coding: utf-8 -*- import requests import sys from time import sleep from log import logger import os, json from ipip import IP base_taobao_url = "http://ip.taobao.com/service/getIpInfo.php" base_sina_url = "http://int.dpool.sina.com.cn/iplookup/iplookup.php" # 新浪库准确率太低,抛弃他 IP.load(os.path.abspath("input/mydata4vipweek2.dat")) def query_local(ip): ret = IP.find(ip) tmp = ret.split() print tmp[0], tmp[1] rjson = dict() if tmp[0] != u'中国': rjson["country"] = u'中国' rjson["province"] = u'北京市' rjson["city"] = u'北京市' rjson["isp"] = tmp[-1] else: # if tmp[1] == u'台湾' or tmp[1] == u'香港' or tmp[1] == u'澳门': if tmp[1] in (u'台湾', u'香港', u'澳门'): rjson["country"] = tmp[1] rjson["province"] = u''
#!/usr/bin/python #!-*- coding: utf -8 -*- import json import os from ipip import IP pv_ip_rlt = open('head_rlt', 'w+') IP.load( os.path.abspath( '/Users/haoshun/hs-code-test/sta_bridge_log/ip_sta/ip_location.dat')) try: for file in open("hs_tmp_0301"): jsonObj = json.loads(file) ip = jsonObj['input']['device']['ip'] iprlt = IP.find(ip) pv_ip_rlt.write("%s\n" % (iprlt.encode('utf-8'))) except Exception as ex: print ex.message
# -*- coding: utf-8 -*- import sys reload(sys) sys.setdefaultencoding("utf-8") import os from ipip import IP #from ipip import IPX str_ip = sys.argv[1] if len(sys.argv) > 1 else '113.206.51.191' print 'find address of ip[%s]:' % str_ip IP.load(os.path.abspath("17monipdb.dat")) print IP.find(str_ip.encode('utf-8')) #IPX.load(os.path.abspath("17monipdb.datx")) #print IPX.find("118.28.8.8")
# coding: utf-8 import sys, os, random, re, httplib, codecs from ipip import IP root = '/Users/zhangtaichao/Documents/ips' datapaht = '/Users/zhangtaichao/code/php/duobaohui_php/app/libraries/class/ip/17monipdb.dat' IP.load(os.path.abspath(datapaht)) def get_files(path): l = os.listdir(root) for name in l: if name.startswith('t_txt'): yield os.path.join(path, name) def for_file(file, num, inc=1): """ :param file: ip段文件 :param num: 每行最大生成个数 :return:list """ dic = {} (filepath, filename) = os.path.split(file) dic["name"] = filename[-2:] result = [] with open(file, 'r') as f: for line in f.readlines(): ips = line.split() if (len(ips) == 3):
exit() with open(osv_path, 'r') as osv_file: for line in osv_file: kv = line.strip().split('\t') if (len(kv) == 1): osv_dict[""] = kv[0] elif (len(kv) == 2): osv_dict[kv[0]] = kv[1] else: print "error dict file!" print line exit() # load ipip dat IP.load(os.path.abspath("ip_location.dat")) district_encode_path = "./district_encode" if (os.path.exists(district_encode_path) == False): print "district_encode file not find" exit() district_encode_dict = {} with open(district_encode_path, 'r') as district_encode_file: for line in district_encode_file: kv = line.strip().split('\t') try: if (len(kv) == 1): district_encode_dict[""] = kv[0] elif (len(kv) == 2):
def appsflow_appAnalysis(user): startTime = request.args.get('startTime') endTime = request.args.get('endTime') agent = request.args.get('agent') type_ = request.args.get('type') page = request.args.get('page') size = request.args.get('size') if page: page = int(page) else: page = 1 if size: size = int(size) else: size = 10 MyAppSflow = type('AppSflow_' + str(user.dbname), (AppSflow, ), {'__bind_key__': user.dbname}) MyAppIpRegion = type('AppIpRegion_' + str(user.dbname), (AppIpRegion, ), {'__bind_key__': user.dbname}) #根据查询条件做流量统计 analysises = db.session.query(MyAppSflow.src_ip, db.func.sum(MyAppSflow.ipsize), MyAppIpRegion.region).\ select_from(MyAppSflow).outerjoin(MyAppIpRegion, MyAppSflow.src_ip == MyAppIpRegion.ip) if type_ == '1': analysises = db.session.query(MyAppSflow.dst_ip, db.func.sum(MyAppSflow.ipsize)) elif type_ == '2': analysises = db.session.query( MyAppSflow.src_ip + ' -> ' + MyAppSflow.dst_ip, db.func.sum(MyAppSflow.ipsize)) if agent: analysises = analysises.filter(MyAppSflow.agent == agent) if startTime: analysises = analysises.filter(MyAppSflow.timestamp >= startTime) if endTime: analysises = analysises.filter(MyAppSflow.timestamp <= endTime) if type_ == '0': MyAppIpWhitelist = type('AppIpWhitelist_' + str(user.dbname), (AppIpWhitelist, ), {'__bind_key__': user.dbname}) whitelists = db.session.query(MyAppIpWhitelist.ip).all() ips = [] for temp in whitelists: ips.append(temp[0]) if len(ips): analysises = analysises.filter(~MyAppSflow.src_ip.in_(ips)) analysises = analysises.group_by(MyAppSflow.src_ip) elif type_ == '1': analysises = analysises.group_by(MyAppSflow.dst_ip, ) elif type_ == '2': analysises = analysises.group_by(MyAppSflow.src_ip, MyAppSflow.dst_ip) analysises = analysises.order_by(db.func.sum( MyAppSflow.ipsize).desc()).paginate(page, size) content = {} temp = [] total = 0 for index in range(len(analysises.items)): analysis = analysises.items[index] temp.append({ 'ip': analysis[0], 'ipsize': float(analysis[1]) if float(analysis[1]) else 0 }) total += float(analysis[1]) if float(analysis[1]) else 0 if type_ == '0': temp[index]['region'] = analysis[2] if type_ == '1': IP.load(os.path.abspath("17monipdb.dat")) ip_region = IP.find(analysis[0]) temp[index]['region'] = ''.join(ip_region.split()) content['data'] = temp content['total'] = total return jsonify({'content': content, 'totalElements': analysises.total})
# -*- coding: utf-8 -*- import os import random from ipip import IP dat_path = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'ip.dat') IP.load(dat_path) def get_ip_locality(ip): # fake province name for localhost if ip == '127.0.0.1': all_provinces = [u'湖北', u'湖南', u'河南', u'河北', u'山东', u'山西', u'江西', u'江苏', u'浙江', u'黑龙江', u'新疆', u'云南', u'贵州', u'福建', u'吉林', u'安徽', u'四川', u'西藏', u'宁夏', u'辽宁', u'青海', u'甘肃', u'陕西', u'内蒙', u'台湾', u'北京', u'上海', u'海南', u'天津', u'重庆'] return random.choice(all_provinces) raw = IP.find(ip) return raw[3:5]
# coding: utf-8 import sys,os,random,re,httplib,codecs from ipip import IP root = '/Users/zhangtaichao/Documents/ips' datapaht = '/Users/zhangtaichao/code/php/duobaohui_php/app/libraries/class/ip/17monipdb.dat' IP.load(os.path.abspath(datapaht)) def get_files(path): l = os.listdir(root) for name in l: if name.startswith('t_txt'): yield os.path.join(path,name) def for_file(file,num,inc=1): """ :param file: ip段文件 :param num: 每行最大生成个数 :return:list """ dic = {} (filepath,filename)=os.path.split(file) dic["name"] = filename[-2:] result = [] with open(file,'r') as f: for line in f.readlines(): ips = line.split() if(len(ips) == 3): res = gen_ip(ips[0],ips[1],num,inc) if type(res) == list:
from time import sleep from ipip import IP reload(sys) sys.setdefaultencoding('utf-8') mysql_settings = { 'host': 'localhost:3306', 'user': '', 'password': '', 'database': 'acfun' } db = torndb.Connection(**mysql_settings) IP.load(os.path.abspath("17monipdb.dat")) # def get_user_rank(): # Get User post Count print("Start Get user post count from db.") startDate = (time.strftime("%Y-%m-%d")) endDate = (datetime.datetime.strftime(datetime.datetime.now() - datetime.timedelta(1), '%Y-%m-%d')) channelCount = db.query('SELECT `userID`,`userName` ,count(*) AS count FROM ' '(SELECT * FROM `comment_list` WHERE Date(`postDate`) ' 'between %s and %s ORDER BY `comment_list`.`postDate` DESC)sort ' 'GROUP BY userID ORDER BY count(*) DESC LIMIT 0,50', endDate, startDate) json_str = json.dumps(channelCount) json_str = 'userRank("' + startDate + '","' + endDate + '",' + json_str + ');'
# -*- coding: utf-8 -*- import re import os import csv import sys import time from ipip import IP from functools import wraps import marshal IP.load(os.path.join(os.getcwd(), "17monipdb.dat")) def dump_obj(obj, file_name): with open(file_name, 'wb') as wf: marshal.dump(obj, wf) def load_obj(file_name): with open(file_name, 'rb') as rf: return marshal.load(rf) def fn_cache(key=None): _default_key_func = lambda args, kwargs: '<args:%r,kwargs:%r>' % (args, kwargs) _key_func = _default_key_func if key is None else key def _fn_cache(func): cache_dict = {} @wraps(func)
def a(): IP.load(os.path.abspath("17monipdb.dat")) for i in xrange(1,2): print IP.find("118.%s.8.%s" % (i+1,i)) print IP.find('14.23.124.162')
for i4 in range(0,254): ip='%s.%s.%s.%s' %(i1,i2,i3,i4) if i1 not in (0,10,172,192,127): f = file('ip.txt','a+') for i1 in range(0,254): for i2 in range(0,254): for i3 in range(0,254): for i4 in range(0,254): ip='%s.%s.%s.%s' %(i1,i2,i3,i4) if i1 not in (0,10,172,192,127): # print(ip) try: #print(ip) IP.load(os.path.abspath("mydata4vipday2.datx")) # IP.load(os.path.abspath("monipdb.dat")) print IP.find(ip) IPX.load(os.path.abspath("mydata4vipday2.datx")) # IP.load(os.path.abspath("monipdb.dat")) ip1 = IPX.find(ip) print ip,ip1 codelst = ['\n',ip,ip1,'\n'] f.writelines(codelst) ip1 = IPX.find(ip) #print ip,ip1
# code from https://github.com/17mon/python import sys reload(sys) sys.setdefaultencoding("utf-8") import os import random import socket import struct import time from ipip import IP IP.load(os.path.abspath("../data/17monipdb20170909.dat")) begin = time.time() for n in range(1000): ip = socket.inet_ntoa(struct.pack('>I', random.randint(1, 0xffffffff))) city = IP.find(ip) # if city: # print city[0] cost = time.time() - begin print 'cost===:', cost city = IP.find('222.175.96.1') print ','.join(city) city = IP.find('36.149.244.81') print ','.join(city)
def ip_search(): # 查询指定IP所属地理位置 IP.load('路径/tinyipdata_utf8.dat') print IP.find('ip地址').strip().encode("utf8")
#!/usr/bin/python # -*- coding: utf-8 -*- from ipip import IP import json import os pvDict = {} clkDict = {} winDict = {} staDict = {} rltDict = {} # statistic every crid's ctr f = open("ip_make_rlt", "w+") IP.load( os.path.abspath( '/Users/haoshun/hs-code-test/sta_bridge_log/ip_sta/ip_location.dat')) for line in open( "/Users/haoshun/hs-code-test/bridge_temporary_code/hs_win_21_head"): try: line = line.strip() jsonObj = json.loads(line) auction = jsonObj['auction'] winDict[auction] = {"win"} except: continue for line in open( "/Users/haoshun/hs-code-test/bridge_temporary_code/hs_clk_21_head"): try: line = line.strip() jsonObj = json.loads(line) auction = jsonObj['auction'] clkDict[auction] = {"clk"}
from pipestat import pipestat from djra import settings import itertools from ipip import IP def to_timestamp(d): return time.mktime(d.timetuple()) IP_DB_FILE = getattr(settings, 'IP_DB_FILE', None) if IP_DB_FILE is None: warnings.warn('''not found IP_DB_FILE, geo report will not work, please set IP_DB_FILE in settings. You can download the ipdb file from http://www.ipip.net/ ''') else: IP.load(IP_DB_FILE) def pick(x, fields): return { field: getattr(x, field) for field in fields } def build_server_report(begin_date, end_date, options): fields = ['username', 'nasipaddress', 'acctinputoctets', 'acctoutputoctets'] sessions = Radacct.objects.filter( acctstarttime__gte=begin_date, acctstarttime__lt=end_date ).only(*fields) sessions = itertools.imap(lambda x: pick(x, fields), sessions) report_pipe = [ { "$group": {
# -*- coding: utf-8 -*- import sys reload(sys) sys.setdefaultencoding("utf-8") import os from ipip import IP from ipip import IPX ip_file = open("ip_data") ip_arr = [] for line in ip_file: ip_arr.append(line) ip_file.close() IP.load(os.path.abspath("ip_location.dat")) for ip in ip_arr: print IP.find(ip)
#https://segmentfault.com/a/1190000000352578 ##更新本地dat文件 #from qqwry import updateQQwry #result = updateQQwry('qqwry.dat') ##use from qqwry import QQwry q = QQwry() q.load_file('qqwry.dat')#工作路径 result = q.lookup('xxx.xxx.xx.xx') print(result) ###################################### ipip.net ######################################### #https://github.com/17mon/python import os from ipip import IP#目前无法pip(python3.5.1) from ipip import IPX IP.load(os.path.abspath("mydata4vipday2.dat"))#工作路径 print IP.find("118.28.8.8") IPX.load(os.path.abspath("mydata4vipday2.datx"))#工作路径 print IPX.find("118.28.8.8") >>>中国 天津 天津 鹏博士 >>>中国 天津 天津 鹏博士 39.128399 117.185112 Asia/Shanghai UTC+8 120000
#!/usr/bin/env python # -*- coding:utf-8 -*- import json import os import datetime import csv import multiprocessing as mp import pandas as pd from sqlalchemy import create_engine from ipip import IP # 加载IP库 IP.load(os.path.abspath("17monipdb.dat")) today_time = datetime.datetime.now().strftime("%Y-%m-%d") today_name = "/home/data/log_analysis/%s.csv" % today_time #测试数据 #today_time = "2017-08-27" #today_name = "2017-06-09.csv" # ip地址转换地区 def ip_check(address): if len(address.split(".")) == 4: try: clientip = IP.find(address) if clientip not in "中国 钓鱼岛 北京 上海 天津 重庆 黑龙江 吉林 辽宁 内蒙古 河北 新疆 " \ "甘肃 青海 陕西 宁夏 河南 山东 山西 安徽 湖南 湖北 江苏 四川 贵州 " \ "云南 广西 西藏 浙江 江西 广东 福建 台湾 海南 香港 澳门": clientip = "外国" return clientip return clientip
#!/usr/bin/env python # coding: utf-8 import os,sys,json import string from ipip import IP from ipip import IPX IP.load(os.path.abspath("mydata4vipday2.dat")) #address = IP.find("118.28.8.8") #print address jsonfile=open("20141001.txt") #jsonfile=open("20141001-20141101.txt") jsontext = jsonfile.read() dataDict = json.loads(jsontext); list = dataDict["list"] match=0 smatch=0 nomatch=0 snomatch=0 nomatchList=[] count=0 for element in list: ip = element["ip"] #print ip #print element["city"]
# -*- coding: utf-8 -*- import sys reload(sys) sys.setdefaultencoding('utf-8') import os from ipip import IP from ipip import IPX IP.load(os.path.abspath('mydata4vipday2.dat')) print(IP.find('118.28.8.8')) IPX.load(os.path.abspath('mydata4vipday2.datx')) print(IPX.find('118.28.8.8'))
# -*- coding: utf-8 -*- import sys reload(sys) sys.setdefaultencoding("utf-8") import os from ipip import IP from ipip import IPX IP.load(os.path.abspath("mydata4vipday2.dat")) print IP.find("118.28.8.8") IPX.load(os.path.abspath("mydata4vipday2.datx")) print IPX.find("118.28.8.8")
#!/usr/bin/env python # -*- coding: utf-8 -*- import sys reload(sys) sys.setdefaultencoding("utf-8") import os import sys from ipip import IP IP.load(os.path.abspath("17monipdb.dat")) query = sys.argv[1] r = " ".join(IP.find(query).split("\t")) output = ''' <?xml version="1.0"?> <items> <item uid="%s" arg="0" valid="YES" autocomplete="%s"> <title>%s</title> <icon>icon.png</icon> </item> </items> ''' % (query, query, r) print output