def create_geoip_db(): """Open GeoIP DB if available, download if needed""" try: # Try to open an existing GeoIP DB geoip_db = open_database('GeoLite2-City.mmdb') return geoip_db except IOError: try: #Download GeoIP DB sys.stderr.write( "\nCouldn't find the GeoIP DB. Attempting download now from " "http://geolite.maxmind.com/download/geoip/database/GeoLite2-City" ".tar.gz\n") urllib.urlretrieve( "http://geolite.maxmind.com/download/geoip/database/GeoLite2-City" ".tar.gz", "GeoLite2-City.tar.gz") # Extract GeoIP DB sys.stderr.write("Extracting GeoLite Database.\n") with tarfile.open("GeoLite2-City.tar.gz", "r:gz") as tar: tar_directory = tar.getnames()[0] tar.extractall() # Clean up unnecessary files sys.stderr.write("Cleaning up.\n\n") shutil.move("{}/GeoLite2-City.mmdb".format(tar_directory), "GeoLite2-City.mmdb") shutil.rmtree(tar_directory) os.remove("GeoLite2-City.tar.gz") # Open and return GeoIP DB geoip_db = open_database('GeoLite2-City.mmdb') return geoip_db except: # Provide instructions for manually downloading the GeoIP DB if we fail sys.stderr.write( "Couldn't find or download the GeoIP DB. Please do the following:\n" ) sys.stderr.write( "\t1. Download the 'GeoLite2 City - MaxMind DB binary' from " "http://dev.maxmind.com/geoip/geoip2/geolite2/\n") sys.stderr.write( "\t2. Extract the contents and make sure the DB file is named " "'GeoLite2-City.mmdb'\n") sys.stderr.write( "\t3. Place 'GeoLite2-City.mmdb' in the 'GeoLogonalyzer.py' working" "directory'\n") sys.exit()
def parse_geo(plist_url, stat=False, include=None, exclude=None, exclude_list=None): include = [x.strip() for x in include.split(',')] if include else [] exclude = [x.strip() for x in exclude.split(',')] if exclude else [] if exclude_list: ex_plist = download_plist(normalize_plist_url(exclude_list)) ex_ips = set([x.split(':')[0] for x in ex_plist]) else: ex_ips = set() geo = open_database(country_database_file) plist_url = normalize_plist_url(plist_url) plist = download_plist(plist_url) reg = defaultdict(int) for addr in plist: ip = addr.split(':')[0] match = geo.lookup(ip) code = (match.country or '--').lower() if include and code not in include: continue elif exclude and code in exclude: continue elif ip in ex_ips: continue else: reg[code] += 1 if not stat: print(addr) if stat: for code, count in sorted(reg.items(), key=lambda x: x[1]): print('%s:%d' % (code, count)) print('total:%d' % sum(x for x in reg.values()))
def load_if_new(self, force=False): self.fp.restat() modified = self.fp.getModificationTime() if not force and modified <= self.last_modified: return logger.info("%r modified: %s > %s", self.fp, modified, self.last_modified) self.last_modified = modified return geoip.open_database(self.fp.realpath().path)
def __init__(self, logfiles_folder, geoip_database_location='./db.mmdb'): self.countries = [] self.ips = [] self.files = glob.glob(logfiles_folder) self.ordered_log = [""] * len(self.files) self.db = open_database(geoip_database_location) self.sort_files() self.get_ip_date_n_location()
def load_if_new(self, force=False): self.fp.restat() modified = self.fp.getModificationTime() if not force and modified <= self.last_modified: return logger.info('%r modified: %s > %s', self.fp, modified, self.last_modified) self.last_modified = modified return geoip.open_database(self.fp.realpath().path)
def checkIPAddress(sampleIPArray, country, continent, database): db = open_database(database) for ip in sampleIPArray: match = db.lookup(ip) if match.country == country and match.continent == continent: print ip + ' matches country & continent for ' + country + ' IP addresses--pass' else: sendEmailAlert(ip + ' did not match' + country + ' country or continent as expected while testing' + country + 'IP addresses from database: ' + database) sys.exit()
def get_pos(request_client): bd = open_database(os.getcwd()+ "/GeoLite2-City.mmdb") if bd.lookup(request_client)!=None: resposta_geoip = bd.lookup(request_client).to_dict() location = resposta_geoip['location'] # (lat,lng) #lat, lng = [location[0], location[1]] pos = str(resposta_geoip).replace("'","") else: pos = str(request_client) + " não consta no Banco GeoLite2-City" location = ('0','0') return pos, float(location[0]), float(location[1])
def __init__(self, log_file_path): self.log_file_path = log_file_path self.conn = sqlite3.connect(self.db_name) self.cur = self.conn.cursor() self.ipdb = open_database( 'GeoLite2-Country.mmdb') # открытая база данных ip адресов self.logs = list() self.users = set() self.orders = set() self.current_categories = dict() self.init_design()
def __init__(self, log_file_path): self.log_file_path = log_file_path self.conn = sqlite3.connect(self.db_name) self.cur = self.conn.cursor() self.ipdb = open_database('GeoLite2/GeoLite2-Country.mmdb') self.key_fragment_pattern = re.compile(self.KEY_URL_FRAGMENT_RE) self.log = list() self.users = set() self.orders = set() self.current_categories = dict() self.init_schema()
def ip_lookup_country(ip): """ Looks up an IP address in the MaxMindDataBase GeoLite2-Country.mmdb to find out to which country the IP address l links to. This DB should be updated once in a while. (For update purposes Database downloadable from: http://dev.maxmind.com/geoip/geoip2/geolite2/ :author: Sebastian :param ip: The IP address as string (-- without the port --). :raises ValueError: A Value Error is raised if the IP address specified does not match IP specifications. :return: The location of the IP address as two letter ISO-code. """ database = geoip.open_database( "{}/GeoLite2-Country.mmdb".format(static_path)) if ip is None: return "DE" return database.lookup(ip).country
def set_mirror_country( mirror_info: Dict[AnyStr, Union[Dict, AnyStr]], ) -> None: """ Set country by IP of a mirror :param mirror_info: Dict with info about a mirror """ mirror_name = mirror_info['name'] ip = socket.gethostbyname(mirror_name) db = open_database(GEOPIP_DB) match = db.lookup(ip) # type: IPInfo logging.info('Set country for mirror "%s"', mirror_name) if match is None: mirror_info['country'] = 'Unknown' else: country = match.get_info_dict()['country']['names']['en'] mirror_info['country'] = country
def load_geoip_data(): global ip_db ip_db = open_database('GeoLite2-City.mmdb') with open("GeoLite2-City-Locations.csv", "rb") as csvfile: reader = csv.reader(csvfile) for row in reader: if row[0] == "geoname_id": continue geoname_id = int(row[0]) subdivision_name = row[6].strip() city_name = row[7].strip() if subdivision_name != "" and city_name != "": subdivision_names[geoname_id] = "{}, {}".format(city_name, subdivision_name) elif subdivision_name != "": subdivision_names[geoname_id] = subdivision_name elif city_name != "": subdivision_names[geoname_id] = city_name
def parse_dump(self): with open('tcpdump_file.txt', 'rb') as fobj: lines = fobj.read().splitlines() final = [] for i in range(len(lines)): if "length" not in lines[i]: continue first = lines[i].find("IP") second = lines[i].find("Flags") ip_ports = lines[i][first + 2:second - 2].strip() size_where = lines[i].find("length") size = lines[i][size_where + 7:len(lines[i])] first_ip, second_ip = self.auxillary(ip_ports) output = [[], [], 0, ""] output[0].append(first_ip) output[1].append(second_ip) output[2] += int(size) output[3] = output[0][0].strip() + "-" + output[1][0].strip() final.append(output) result = self.filter(final, self.address) real_list = self.sort_list(result) mr_robot = {} for i in range(len(real_list)): dst = real_list[i][1][0] size = real_list[i][2] if dst in mr_robot: mr_robot[dst] += size else: mr_robot[dst] = size sort_num = [] for k, v in mr_robot.items(): sort_num.append(v) sort_num = sorted(sort_num, reverse=True) ordered = [] for i in range(len(sort_num)): for k, v in mr_robot.items(): if sort_num[i] == mr_robot[k]: ordered.append(k) break print "Source IP" + "\t" + "Destination IP" + "\t" + "Packet Size" print for i in range(len(sort_num)): print self.address + "\t" + ordered[i] + "\t" + str(sort_num[i]) db = open_database( '/home/binyamin/Downloads/GeoLite2-City_20181023/GeoLite2-City.mmdb' ) print print Fore.GREEN + "[*] Calculating locations for top 5 IP addresses..." + Fore.RESET print time.sleep(2) print "IP address" + "\t" + " " + "\t" + "Country" + "\t" + "Coordinates" print for i in range(5): match = db.lookup(ordered[i]) if match is not None: x, y = str(match.location[0]), str(match.location[1]) print ordered[ i] + "\t" + " --->" + "\t" + match.country + "\t" + x + "," + y else: continue
import sys import pytz import geoip import ipaddress from flask import Flask, render_template, request sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..")) from config import Outputs from output import MongoConnector output_file_name = "loganalyser.output" config_path = os.path.dirname(__file__) app = Flask(__name__) geoip_db = geoip.open_database( os.path.join(os.path.dirname(__file__), 'data/GeoLite2-Country.mmdb')) def get_mongo_connection(): output = Outputs() output.parse_outputs(os.path.join(config_path, '..', output_file_name)) config = output.get_output('mongo') mc = MongoConnector(config) col = mc.get_collection() return col def get_period_mask(period): now = datetime.datetime.now(pytz.UTC) if period == 'today': today_start = now.replace(hour=0, minute=0, second=0, microsecond=0)
import dpkt import sys import geoip2.database import socket import json import pytz import datetime from geoip import open_database import matplotlib.pyplot as plt import matplotlib import numpy as np TCP = dpkt.tcp.TCP reader = geoip2.database.Reader('GeoLite2-Country.mmdb') db = open_database('GeoLite2-City.mmdb') global country_ts, top5, countries country_ts = {} top5 = {} countries = {} server_timezone = pytz.timezone("Europe/Berlin") time_format = '%Y-%m-%d %H:%M:%S' def extract_country(ip_hdr, ts): src_ip = socket.inet_ntoa(ip_hdr.src) country = db.lookup(src_ip).country date_time = datetime.datetime.fromtimestamp(ts) timezone_ip = db.lookup(src_ip).timezone new_timezone = pytz.timezone(timezone_ip) time_new_timezone = server_timezone.localize(date_time).astimezone( new_timezone) new_time = str(time_new_timezone.replace(tzinfo=None, microsecond=0))
from geoip import open_database, geolite2 with open_database('GeoLite2-City.mmdb') as db: # match = db.lookup_mine() match = geolite2.lookup('1.127.48.248') print 'My IP:', match
# -*- coding: utf-8 -*- from geoip import open_database GEO_DB = open_database("data/GeoLite2-City.mmdb")
def main(argv): print("Check logs with IP BL v1.0") global db global databl global list_geoip recursive = False pathisfile = False path2scan = None savefile = None try: opts, args = getopt.getopt(argv, "hrp:f:g:b:o:", [ "help", "recursive", "path=", "filtergeoip=", "geoipdata=", "blacklist=", "output=" ]) except getopt.GetoptError: usage() sys.exit(-1) for opt, arg in opts: if opt in ("-h", "--help"): usage() sys.exit(-1) if opt in ("-r", "--recursive"): recursive = True elif opt in ("-p", "--path"): #check if file or path if os.path.isdir(arg): path2scan = arg elif os.path.isfile(arg): path2scan = arg pathisfile = True else: print('Error: Path or file ' + str(arg) + ' not exist.') sys.exit() elif opt in ("-g", "--geoipdata"): #check if file or path if os.path.isfile(arg): try: db = open_database(arg) except Exception as error: print( 'Error to load: geoip database potentially corrupted => ' + str(error)) sys.exit() else: print('Error: geoip database ' + str(arg) + ' not exist.') sys.exit() elif opt in ("-b", "--blacklist"): #check if file or path if os.path.isfile(arg): try: with open(arg) as json_file: databl = json.load(json_file) except Exception as error: print('Error to load: blacklist database => ' + str(error)) sys.exit() else: print('Error: geoip database ' + str(arg) + ' not exist.') sys.exit() elif opt in ("-f", "--filtergeoip"): #check if file or path if bool(re.search(r'^[A-Za-z,]+$', arg)): list_geoip = arg.upper().split(',') else: print('Error: filter geoip bad format => ' + str(arg)) sys.exit() elif opt in ("-o", "--output"): #check if file or path if not os.path.isfile(arg): savefile = arg else: print('Error: file to save already exist => ' + str(arg)) sys.exit() if savefile and path2scan and databl and db: retjson = {} if pathisfile: print('Analyze file: ' + str(path2scan)) retjson = run_bl_file(path2scan) else: if recursive: for root, directories, filenames in os.walk(path2scan): for filename in filenames: #filename print('Analyze file: ' + str(os.path.join(root, filename))) retjson[os.path.join(root, filename)] = run_bl_file( os.path.join(root, filename)) else: for root, directories, filenames in os.walk(path2scan): for filename in filenames: #filename print('Analyze file: ' + str(os.path.join(root, filename))) retjson[os.path.join(root, filename)] = run_bl_file( os.path.join(root, filename)) break print('Analyze finished, save result...') with open(savefile, 'w') as json_file: json.dump(retjson, json_file, indent=4, sort_keys=True) else: usage() sys.exit(-1)
while True: file = open("Banner/nmap") sys.stdout.write(YELLOW) print file.read() cmd = raw_input(colored("<nmap>", "blue")) os.system("nmap " + cmd) #GeoIP elif menu_choice == 3: os.system("clear") file = open("Banner/geoip") sys.stdout.write(YELLOW) print file.read() ip = raw_input(colored("IP:", "white")) with open_database( '/data/data/com.termux/files/usr/lib/python2.7/site-packages/_geoip_geolite2/GeoLite2-City.mmdb' ) as db: match = db.lookup(ip) print('IP info:', match) db.close() #Whois elif menu_choice == 4: os.system("clear") while True: file = open("Banner/whois") sys.stdout.write(YELLOW) print file.read() ip = raw_input(colored("Domain:", "white")) os.system("whois " + ip) #Back button
#!/usr/bin/env python # export PYTHONPATH=$PYTHONPATH:/Library/Frameworks/GDAL.framework/Versions/1.11/Python/2.7/site-packages import sys import argparse import json from geoip import geolite2 from geoip import open_database from sklearn.externals import joblib import pycountry db = open_database('/Users/gen/Downloads/GeoLite2-City.mmdb') # formatted file: list where each element is a tuple for one worker, represented by (num_hits, ip) worker_ips = joblib.load('cocottributes_worker_ips.jbl') hist = {} hist['all data'] = [] hist['countries'] = {} hist['timezone'] = {} hist['countries']['num_hits'] = [] hist['countries']['num_workers'] = [] hist['countries']['items'] = [] hist['timezone']['num_hits'] = [] hist['timezone']['num_workers'] = [] hist['timezone']['items'] = [] map_out = [ ] # {"city_name": "city", "lat": "lat", "long": "long", "nb_visits": int(num_hits)} for num_hits, ip in worker_ips: if ip == '' or ip == '0' or ip is None: continue
def run_bl_file(file, get_geoip): list_geoip = None if get_geoip: list_geoip = get_geoip.upper().split(',') tmpdir = tempfile.mkdtemp() retjson = {'file_clean': False, 'GeoIP': {}, 'BL': {}} with tempfile.NamedTemporaryFile(dir='/tmp', delete=False) as tmpfile: temp_file_name = tmpfile.name try: file.save(temp_file_name) #get country file databl = None db = open_database('/GeoLite2-Country.mmdb') with open('/data/db-ipbl.json') as json_file: try: databl = json.load(json_file) except: abort(503) #get BL LIST if str(file.filename).endswith(".gz"): #GZIP FILE #check ip line by line with gzip.open(temp_file_name, encoding="utf-8", errors='ignore') as file_in: cnt = 0 tmp_ipgeo = {} for line in file_in: cnt += 1 linex = str(line.rstrip()) ip = re.findall( r'\b(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\b', linex) find_geo = None find_bl = False for ipx in ip: if not ipaddress.ip_address(str(ipx)).is_private: #Check in BL tmp_geo = None try: if ipx not in tmp_ipgeo: tmp_ipgeo[ipx] = str( db.lookup(str(ipx)).country) tmp_geo = tmp_ipgeo[ipx] else: tmp_geo = tmp_ipgeo[ipx] except Exception as error: print('Error in db lookup on ' + str(ipx) + ' -> ' + str(error)) tmp_geo = 'Inconnu' if ipx in databl: find_bl = True if ipx not in retjson['BL']: retjson['BL'][ipx] = databl[ipx] if ipx not in retjson['GeoIP']: retjson['GeoIP'][ipx] = tmp_geo find_geo = tmp_geo else: find_geo = tmp_geo #Check in GeoIP elif list_geoip and tmp_geo not in list_geoip: if ipx not in retjson['GeoIP']: retjson['GeoIP'][ipx] = tmp_geo find_geo = tmp_geo else: find_geo = tmp_geo if find_bl or find_geo: if 'lines_suspect' not in retjson: retjson['lines_suspect'] = [] retjson['lines_suspect'] = [] if find_bl: retjson['lines_suspect'].append({ str(cnt): linex, 'bl': True, 'geoip': find_geo }) elif find_geo: retjson['lines_suspect'].append({ str(cnt): linex, 'bl': False, 'geoip': find_geo }) else: #check ip line by line with open(temp_file_name, encoding="utf-8", errors='ignore') as file_in: cnt = 0 tmp_ipgeo = {} for line in file_in: cnt += 1 linex = str(line.rstrip()) ip = re.findall( r'\b(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\b', linex) find_geo = None find_bl = False for ipx in ip: if not ipaddress.ip_address(str(ipx)).is_private: #Check in BL tmp_geo = None try: if ipx not in tmp_ipgeo: tmp_ipgeo[ipx] = str( db.lookup(str(ipx)).country) tmp_geo = tmp_ipgeo[ipx] else: tmp_geo = tmp_ipgeo[ipx] except Exception as error: print('Error in db lookup on ' + str(ipx) + ' -> ' + str(error)) tmp_geo = 'Inconnu' if ipx in databl: find_bl = True if ipx not in retjson['BL']: retjson['BL'][ipx] = databl[ipx] if ipx not in retjson['GeoIP']: retjson['GeoIP'][ipx] = tmp_geo find_geo = tmp_geo else: find_geo = tmp_geo #Check in GeoIP elif list_geoip and tmp_geo not in list_geoip: if ipx not in retjson['GeoIP']: retjson['GeoIP'][ipx] = tmp_geo find_geo = tmp_geo else: find_geo = tmp_geo if find_bl or find_geo: if 'lines_suspect' not in retjson: retjson['lines_suspect'] = [] retjson['lines_suspect'] = [] if find_bl: retjson['lines_suspect'].append({ str(cnt): linex, 'bl': True, 'geoip': find_geo }) elif find_geo: retjson['lines_suspect'].append({ str(cnt): linex, 'bl': False, 'geoip': find_geo }) os.remove(temp_file_name) except Exception as e: print("Error:" + str(e)) return make_response(jsonify({'error': 'Bad file upload'}), 400) if not 'lines_suspect' in retjson: retjson['file_clean'] = True return retjson
def get_geoip_db(): global db if db is None: db = open_database('data/GeoLite2-City.mmdb') return db