def calcStats(data): data = findTimeouts(data) #data["hops"] = [hop for hop in data["hops"] if hop["ip"]] reader = geolite2.reader() for hop in data["hops"]: hop["geo"] = None if hop["id"] == "1": hop["geo"] = reader.get(getHostAddress()) elif hop["responses"][0]["ip"]: hop["geo"] = reader.get(hop["responses"][0]["ip"]) for response in hop["responses"]: response["min"] = response["delta"] = response["max"] = response[ "avg"] = response["unit"] = sum = 0. for query in response["queries"]: if response["max"] == 0 or float( query["value"]) > response["max"]: response["max"] = float(query["value"]) if response["min"] == 0 or float( query["value"]) < response["min"]: response["min"] = float(query["value"]) sum = sum + float(query["value"]) response["unit"] = query["unit"] if len(response["queries"]) > 0: response["avg"] = "{0:.2f}".format(sum / len(response["queries"])) response["delta"] = (response["max"] - response["min"]) / 2 return (calcGeopos(data))
def setup_data(self): """ This method uses the classes pandas dataframe and saves back to it. It first initializes the connection with geolite2, then for each IP in the data, it finds out the country and city of that IP and stores this as a new column onto our pandas dataframe. After it has does this we close the connection to geolite2. After that will call httpagentparser which converts our user_agent_string into its browser family and it's OS, this is done for each row and is saved as new columns onto our pandas dataframe. """ reader = geolite2.reader() self.df["country"] = self.df.apply( lambda k: (self.none_check(reader.get(self.check_ip(k["IP"])), "IP", "country") if k["IP"] not in ["-", None] else None), axis=1) self.df["city"] = self.df.apply( lambda k: (self.none_check(reader.get(self.check_ip(k["IP"])), "IP", "city") if k["IP"] not in ["-", None] else None), axis=1) geolite2.close() self.df["browser_family"] = self.df.apply( lambda k: self.none_check( httpagentparser.detect(k["user_agent_string"], {}).get( "browser"), "user_agent_str", "browser") if k["IP"] not in ["-", None] else None, axis=1) self.df["os_family"] = self.df.apply(lambda k: self.none_check( httpagentparser.detect(k["user_agent_string"], {}).get("os"), "user_agent_str", "os") if k["IP"] not in ["-", None] else None, axis=1)
def processing(file_id): """ Task to to process uploaded files in order to extract data to send it to map :param file_id: id of file stored locally :return: True or False """ ap_log = FileModel.objects.get(id=file_id) log_file = ap_log.log_file.path patt = re.compile('\[(.*)\]') for line in open(log_file): time_logged = datetime.strptime( patt.search(line).group(1), '%d/%b/%Y:%H:%M:%S %z') geo_reader = geolite2.reader() ip = line.split()[0] ip_info = geo_reader.get(ip) if ip_info and 'location' in ip_info.keys(): long = ip_info['location']['longitude'] lat = ip_info['location']['latitude'] try: ip_save = Coordinates.objects.create(ip=ip, longitude=long, latitude=lat, time_logged=time_logged) ip_save.save() except IntegrityError: continue ap_log.processed = True ap_log.save() return True
def processUpdate(self, data, session=None): monitor.getMonitor().count("AnalyticsProcessUpdateCount") with monitor.getMonitor().time("AnalyticsProcessUpdateTime"): for attribute in ["liftpass-ip", "liftpass-application", "user", "events"]: if attribute not in data: monitor.getMonitor().count("AnalyticsUpdateMissingAttributeCount") raise EventMissingAttributeError(attribute) events = [] ip = data["liftpass-ip"] try: country = geolite2.reader().get(ip) country = country["country"]["iso_code"] except: country = None s = time.time() for update in data["events"]: try: with monitor.getMonitor().time("AnalyticsProcessUpdateEventTime"): monitor.getMonitor().count("AnalyticsEventCount") events.append( self.processEvent(data["liftpass-application"], data["user"], ip, country, update) ) except Exception as e: print(e) if session != None: session.execute(models.Events.__table__.insert(), events) session.commit() return events
def _collect_unique_ip(self, lines): """Collect Unique ip address.""" reader = geolite2.reader() list_ips = self._get_ip(lines) for ip in list_ips: if ip not in self.__unique_ip: self.__unique_ip.append(ip) try: if reader.get(ip) is not None and reader.get(ip).get('country') is not None: # self.__logger.info(reader.get(ip)) self.__unique_ip_country[str(ip)] = { "ip": ip, "hits": 1, "country_code": reader.get(ip).get('country').get('iso_code'), "country_name": reader.get(ip).get('country').get('en') } except ValueError: self.__logger.error('Not a IPV4 or IPV6>>>>' + str(ip)) else: # Count how many times does the ip address exist on the flat file. try: if reader.get(ip) is not None and reader.get(ip).get('country') is not None: self.__unique_ip_country[str(ip)]['hits'] += 1 except ValueError: self.__logger.error('Not a IPV4 or IPV6>>>>' + str(ip))
def geo_lookup_ip(ip): from geolite2 import geolite2 reader = geolite2.reader() geo_reader = reader.get(ip) result = {} try: result["subdivisions"] = geo_reader["subdivisions"][0]["iso_code"] except Exception: result["subdivisions"] = "NA" try: result["continent"] = geo_reader["continent"]["code"] except Exception: result["continent"] = "NA" try: result["country"] = geo_reader["country"]["iso_code"] except Exception: result["country"] = "NA" try: result["city"] = geo_reader["city"]["names"]["de"] except Exception: result["city"] = "NA" try: result["postal"] = geo_reader["postal"]["code"] except Exception: result["postal"] = "NA" return result
def extractIp(filename): countryDict = {} reader = geolite2.reader() csvF = open('loc.csv', mode='w') writer = csv.writer(csvF) writer.writerow(['country', 'lat', 'long']) with open(filename) as f: line = f.readline() while line: try: obj = json.loads(line) ip = obj['src_ip'] match = reader.get(ip) if match is not None: # print(match) name = match['country']['names']['en'] if name in countryDict: countryDict[name] += 1 else: countryDict[name] = 1 lat = match['location']['latitude'] long = match['location']['longitude'] writer.writerow([name, lat, long]) except: pass finally: line = f.readline() countryDict = sorter(countryDict) with open('country.csv', mode='w') as f: writer = csv.writer(f) writer.writerow(['country', 'count']) for k, v in countryDict: writer.writerow([k, v])
def main(): last_host_list = [] while True: new_host_list = [] all_peers = get_peer_list(last_host_list) with progress.Bar(label="Fetching data", expected_size=len(all_peers)) as bar: for i, (host, port) in enumerate(get_peer_list(last_host_list)): height = get_peer_height(host, port) new_host_list.append((host, port, height)) bar.show(i) new_host_list = sorted(new_host_list, key=lambda t: -t[-1] if t[-1] is not None else 0) last_host_list = new_host_list for (host, port, height) in new_host_list: reader = geolite2.reader() match = reader.get(host2ip(host)) if match is not None: if 'country' in match: country = match['country']['iso_code'] elif 'continent' in match: country = match['continent']['code'] else: country = "??" print("%3s %30s %6s %7s" % (country, host, port, height if height is not None else "??????"))
def log(self, remote_ip, remote_port, protocol, port, data, is_binary, use_ssl): es = Elasticsearch(self.__server, use_ssl=self.__use_ssl, verify_certs=self.__verify_certs) location = None reader = geolite2.reader() raw_loc_data = reader.get(remote_ip) if raw_loc_data and 'location' in raw_loc_data: location = { "lat": round(raw_loc_data['location']['latitude'], 2), "lon": round(raw_loc_data['location']['longitude'], 2), } es.index(index='honeypoke', doc_type='connection', body={ "time": datetime.datetime.utcnow().isoformat(), "remote_ip": remote_ip, "remote_port": remote_port, "protocol": protocol, "port": port, "input": str(data), "is_binary": is_binary, "use_ssl": use_ssl, "location": location, "host": platform.node() })
def get_geo(ip_addr): geo_reader = geolite2.reader() g = None try: g = geo_reader.get(ip_addr) except ValueError: log.error("parsing IP address: {} failed!".format(ip_addr)) log.debug("Geo data :" + str(g)) ret = {} ret["ip_addr"] = ip_addr if g is not None: ret["country"] = g["country"]["names"]["en"] ret["continent"] = g["continent"]["names"]["en"] ret["city"] = g["city"]["names"]["en"] ret["zip"] = g["postal"]["code"] ret["timezone"] = g["location"]["time_zone"] ret["latitude"] = g["location"]["latitude"] ret["longitude"] = g["location"]["longitude"] else: log.warning("cant get geolocation for IP {}".format(ip_addr)) ret["country"] = UNDEFINED ret["continent"] = UNDEFINED ret["city"] = UNDEFINED ret["timezone"] = UNDEFINED ret["latitude"] = 0.0 ret["longitude"] = 0.0 return ret
def get_geoip(ip): """Lookup country for IP address.""" reader = geolite2.reader() ip_data = reader.get(ip) if ip_data is not None: return dict(country=ip_data.country) return {}
def location(): # mandamos a llamar la funcion que nos retornara la ip ipv4 = ip() #mandarmos a llamar el metodo lector de geolite2 geo = geolite2.reader() #comprueba si get esta recibiendo la ip try: info = geo.get(ipv4) #compruba si el info tiene elementos if len(info) > 0: #creamos un diccionario con las caractericas+ descr = { 'Municipio': info['city']['names']['en'], 'Estado': info['subdivisions'][0]['names']['en'], 'Pais': info['country']['names']['en'], 'Continente': info['continent']['names']['en'], 'Latitud': info['location']['latitude'], 'longitud': info['location']['longitude'], 'ZonaHoraria': info['location']['time_zone'], 'CodigoPostal': info['postal']['code'], 'Ippublica': ipv4 } #retornamos el diccionario return descr except TypeError: # Si no recibe la ip return "No se puede obtener la localizacion"
def test_none_check(self): """ This test check whether the none_check returns what it is suppose to depending on the input parameters. """ reader = geolite2.reader() # Asserting that the correct country comes out ip_right = reader.get("92.238.71.10") assert (self.etl.none_check(ip_right, "IP", "country") == "United Kingdom") # Asserting that when there is no key with that name or similar the method returns none assert (self.etl.none_check(ip_right, "IP", "browser") == None) geolite2.close() # Asserting the none_check gets the name correctly user_agent_right = httpagentparser.detect( "Mozilla/5.0 (iPad; CPU OS 7_1_2 like Mac OS X) AppleWebKit/537.51.2 (KHTML, like Gecko) Version/7.0 Mobile/11D257 Safari/9537.53", {}).get("browser") assert (self.etl.none_check(user_agent_right, "user_agent_str", "browser") == "Safari") # Asserting that when a none value is passed for both method types, then none is returned back value_none = None assert (self.etl.none_check(value_none, "IP", "country") == None) assert (self.etl.none_check(value_none, "user_agent_str", "browser") == None) # Asserting that a differnet method type has been chosen, it will reutrned back none assert (self.etl.none_check(value_none, "mutliply", "country") == None)
def add_countries(df): print("This might take a while, please wait...") geo = geolite2.reader() df['src_country'] = df['src_ip'].apply(get_country, geo=geo) df['dst_country'] = df['dst_ip'].apply(get_country, geo=geo) geolite2.close() return df
def geoip_results(self, results): reader = geolite2.reader() previous = False coordinates = [] for result in results: info = reader.get(str(result[0])) or {} location = info.get('location', None) if not location: continue coordinate = [] coordinates.append( [float(location['longitude']), float(location['latitude'])]) data = { "type": "Feature", "geometry": { "type": "MultiPoint", "coordinates": coordinates } } dump = json.dumps(data).encode('utf-8') self.wfile.write(dump)
def geoJSON(self): """ Generates the relevent GeoJSON data needed for the map """ jsonList = list() # Load GeoIP reader = geolite2.reader() for ip in self.ipList: # Get GeoIP information for IP try: ipgeo = reader.get(ip) lat = ipgeo["location"]["latitude"] lon = ipgeo["location"]["longitude"] except TypeError: # Some IPs don't have geolocation information for some reason, # and can't be included on the map, so skip them. print("No geoip information for", ip) pass # Obfuscate IP (this can used for public map pages) if self.hideIP == True: ip = obfuscateIPAddress(ip) jsonList.append(geoJSONTemplate(ip, self.service, [lon,lat], self.color)) geolite2.close() # Close geoip db return jsonList
def processUpdate(self, data, session=None): monitor.getMonitor().count('AnalyticsProcessUpdateCount') with monitor.getMonitor().time('AnalyticsProcessUpdateTime'): for attribute in ['liftpass-ip', 'liftpass-application', 'user', 'events']: if attribute not in data: monitor.getMonitor().count('AnalyticsUpdateMissingAttributeCount') raise EventMissingAttributeError(attribute) events = [] ip = data['liftpass-ip'] try: country = geolite2.reader().get(ip) country = country['country']['iso_code'] except: country = None s = time.time() for update in data['events']: try: with monitor.getMonitor().time('AnalyticsProcessUpdateEventTime'): monitor.getMonitor().count('AnalyticsEventCount') events.append(self.processEvent(data['liftpass-application'], data['user'], ip, country, update)) except Exception as e: print(e) if session != None: session.execute(models.Events.__table__.insert(), events) session.commit() return events
def countries_from_abstract_list(filename): """ Create a list of countries using the Colloquium abstract list :param filename: path to the abstract file :type filename: str :return countrylist: list of country iso-codes """ reader = geolite2.reader() countrylist = [] nlines = 0 try: with open(filename, 'r') as f: for lines in f.readlines(): nlines += 1 match = re.search( r'(\d{4})-(\d{2})-(\d{2})_(\d{2}):(\d{2}):(\d{2})_IP_(\d+\.\d+\.\d+\.\d+).json', lines) if match: ip = match.group(7) logging.debug("IP: {}".format(ip)) match_ip = reader.get(ip) if match_ip: countrylist.append(match_ip['country']['iso_code']) except IOError as exc: raise IOError("%s: %s" % (filename, exc.strerror)) logloc.info("Number of abstracts in the list: {}".format(nlines)) logloc.info("Number of identified countries: {}".format(len(countrylist))) return countrylist
def get_country_by_ip(ip_address): reader = geolite2.reader() geo_data = reader.get(ip_address) geolite2.close() if geo_data and 'country' in geo_data and 'iso_code' in geo_data['country']: country_iso_code = geo_data['country']['iso_code'] if country_iso_code in countries: return Country(country_iso_code)
def get_source_country(ip): """ Finds the country of origin for the IP address of the incoming request where the site was served Arguments: ip -- IP address of the the user who visited the site """ reader = geolite2.reader() return reader.get(ip)['country']['names']['en']
def get_country_for_ip(ip): if not ip.strip(): return None reader = geolite2.reader() try: return reader.get(ip)['country']['iso_code'].lower() except: return None
def get_curr_loc(ip_address): reader = geolite2.reader() curr_loc = reader.get(ip_address) if curr_loc: return {'lat': curr_loc['location']['latitude'], 'lng': curr_loc['location']['longitude']} else: # in honor of the application being created in NYC return {'lat': 40.7143, 'lng': -74.006}
def parse_ip_to_country_city(ip): """Applies geolite2 parse function to each IP row and gets the city and country for every IP in that row. Parameters ---------- ip : str IP or IPs separated by comma Raises ------ ValueError If faulty IP value. KeyError If faulty key value. Returns ------- row : str Parsed IP row into countries/cities or None """ geo = geolite2.reader() if ',' in str(ip): countries_lst = list() cities_lst = list() for i in ip.split(','): try: x = geo.get(i.strip()) except ValueError: return '' try: if x is not None: if x.get('country', 0) != 0 and x.get('city', 0) != 0: countries_lst.append(x['country']['names']['en']) cities_lst.append(x['city']['names']['en']) elif x.get('country', 0) != 0 and x.get('city', 0) == 0: countries_lst.append(x['country']['names']['en']) elif x.get('city', 0) != 0 and x.get('country', 0) == 0: cities_lst.append(x['city']['names']['en']) except KeyError: return '' return '{}/{}'.format(','.join(countries_lst), ','.join(cities_lst)) else: try: x = geo.get(str(ip)) except ValueError: return '' try: if x is not None: if x.get('country', 0) != 0 and x.get('city', 0) != 0: return '{}/{}'.format(x['country']['names']['en'], x['city']['names']['en']) elif x.get('city', 0) == 0: return '{}'.format(x['country']['names']['en']) elif x.get('country', 0) == 0: return '/{}'.format(x['city']['names']['en']) except KeyError: return ''
def get_country_by_ip(ip): reader = geolite2.reader() geo_data = reader.get(ip) geolite2.close() if geo_data and "country" in geo_data and "iso_code" in geo_data["country"]: country_iso_code = geo_data["country"]["iso_code"] if country_iso_code in countries: return Country(country_iso_code) return None
async def geoip(request): reader = geolite2.reader() latlons = [] for ip in await request.json(): info = reader.get(ip) if info and "location" in info: loc = info["location"] latlons.append({'lat': loc['latitude'], 'lng': loc['longitude']}) return JSONResponse(latlons)
def __init__(self): self.reader = geolite2.reader() # Create a few hardcoded strings which will be used multiple times thorough the code as instance attributes. # This is done so that if the values change its not a pain to find them through the code and update them. # Not always the most necessary but a good practice non the less. self.override_sections = ['city', 'continent', 'country', 'location', 'registered_country', 'subdivisions'] self.names_text = 'names' self.lang = 'en'
def get_location_from_ip(ip): result = None try: result = geolite2.reader().get(ip) except: result = None finally: geolite2.close() return result
def country(ip): try: reader = geolite2.reader() get_country = reader.get(ip) return get_country['country']['names']['ru'] except TypeError: return None except KeyError: return get_country['registered_country']['names']['ru']
def find_country(ip: str) -> str: reader = geolite2.reader() match = reader.get(ip) if not match: return "NA" if "country" in match: return match["country"]["names"]["fr"] else: return match["continent"]["names"]["fr"]
def geoip(ip): """ Returns geo IP info of the given IP. """ reader = geolite2.reader() geo_info = reader.get(ip) geo_info = geo_info or {} geo_info.update({'ip': ip}) geolite2.close() return geo_info
def get_country_from_ip(user_ip): if user_ip is None: return "NO" reader = geolite2.reader() tmpcountry = reader.get(user_ip) country = "NO" if tmpcountry is not None: country = tmpcountry['country']['iso_code'] return country
def main(data): data['city'] = [] data['country'] = [] ip_array = data['IP'] reader = geolite2.reader() print('Starting IP conversion to city, country') for ip in ip_array: ips = ip.split(', ') ip_valid = isipvalid(ips) if ip_valid is not None and len(ip_valid) == 1: location = reader.get(ip_valid[0]) if location is not None and 'country' in location.keys( ) and 'city' in location.keys(): data['country'].append(location['country']['names']['en']) data['city'].append(location['city']['names']['en']) elif location is not None and 'country' in location.keys(): data['country'].append(location['country']['names']['en']) data['city'].append('Unknown') elif location is not None and 'city' in location.keys(): data['city'].append(location['city']['names']['en']) data['country'].append('Unknown') else: data['country'].append('Unknown') data['city'].append('Unknown') elif ip_valid is not None and len(ip_valid) > 1: location_0 = reader.get(ip_valid[0]) location_1 = reader.get(ip_valid[1]) if location_0 is not None and 'country' in location_0.keys( ) and 'city' in location_0.keys(): data['country'].append(location_0['country']['names']['en']) data['city'].append(location_0['city']['names']['en']) elif location_1 is not None and 'country' in location_1.keys( ) and 'city' in location_1.keys(): data['country'].append(location_1['country']['names']['en']) data['city'].append(location_1['city']['names']['en']) elif location_1 is not None and 'country' in location_1.keys(): data['country'].append(location_1['country']['names']['en']) data['city'].append('Unknown') elif location_1 is not None and 'city' in location_1.keys(): data['city'].append(location_1['city']['names']['en']) data['country'].append('Unknown') else: data['country'].append('Unknown') data['city'].append('Unknown') else: data['country'].append('Unknown') data['city'].append('Unknown') geolite2.close() return data
def __init__(self,address, port, redis_server, redis_port, skip): """ address/port - upstream server redis_server/port - redis server skip - list of wildcard labels to skip """ self.address = address self.port = port self.skip = skip self.georeader = geolite2.reader() self.cache = redis.StrictRedis(host=redis_server, port=redis_port)
def process_request(self, request): user_time_zone = request.session.get('user_time_zone') try: if not user_time_zone: user_ip = get_real_ip(request) if user_ip: reader = geolite2.reader() ip_details = reader.get(user_ip) user_time_zone = ip_details['location']['time_zone'] geolite2.close() if user_time_zone: request.session['user_time_zone'] = user_time_zone timezone.activate(pytz.timezone(user_time_zone)) except: timezone.deactivate()
def get_location(self): row = self.conn.execute("SELECT IP, Location, Time FROM metadata").fetchone() if row is not None and len(row): self.log.info("Loaded our IP from the database: {0}, {1}", row['IP'], row['Location']) return (row['IP'], row['Location']) else: ip = requests.get("http://checkip.amazonaws.com").text.strip() self.log.info("Amazon reports our IP as {0}", ip) location = geolite2.reader().get(ip) iso = location['registered_country']['iso_code'] self.conn.execute("INSERT INTO metadata (IP, Location, Time) VALUES (?,?,?)", [ip, iso, time.time()]) self.conn.commit() return ip, iso
def getpeerinfo(): peers = normalize_result(rpc_connect().getpeerinfo()) peers = json.loads(json.loads(peers.response[0])) peer_list = [] for peer in peers: ip, separator, port = peer["addr"].rpartition(':') port = int(port) # convert to integer ip = ip_address(ip.strip("[]")) # convert to `IPv4Address` or `IPv6Address` reader = geolite2.reader() match = reader.get(str(ip)) geolite2.close() out = {} out['client'] = peer['subver'] out['ip'] = peer['addr'] out['last_seen'] = datetime.datetime.fromtimestamp(int(peer['lastrecv'])).strftime('%Y-%m-%d %H:%M:%S') out['connected_since'] = datetime.datetime.fromtimestamp(int(peer['conntime'])).strftime('%Y-%m-%d %H:%M:%S') out['ping'] = str(peer['pingtime'] * 1000) + "ms" if match is not None: out['location']=match peer_list.append(out) return jsonify(peer_list)
def __init__(self, ip): result=geolite2.reader().get(ip) self.country=result['country']['names']['en'] self.city=result['city']['names']['en']
def __init__(self, ip): result = geolite2.reader().get(ip) self.country = result["country"]["names"]["en"] self.state = result["subdivisions"][0]["names"]["en"] self.city = result["city"]["names"]["en"]
from geolite2 import geolite2 import numpy import pandas geolite_reader = geolite2.reader() def get_country(ip): if not isinstance(ip, str): return numpy.nan lookup = geolite_reader.get(ip) return numpy.nan if lookup is None or 'country' not in lookup else lookup['country']['iso_code'] answers = pandas.read_csv('./answers.csv', delimiter=';') answers['touch_device'] = answers['touch_device'].apply(lambda x: x == 't') answers['correct'] = answers['correct'].apply(lambda x: x == 't') countries = {ip: get_country(ip) for ip in answers['ip_address'].unique()} ips_dict = {ip: i + 1 for i, ip in enumerate(answers['ip_address'].unique()) if isinstance(ip, str)} answers['country'] = answers['ip_address'].apply(lambda ip: countries.get(ip)) answers['ip'] = answers['ip_address'].apply(lambda ip: ips_dict.get(ip)) del answers['ip_address'] del answers['experiment_id'] answers.to_csv('./anatom.csv', index=False) pandas.read_csv('./contexts.csv', delimiter=';').to_csv('anatom.contexts.csv', index=False)
def get_mirror_location(self, ip): location = geolite2.reader().get(ip) return location
from django.http import Http404 from django.utils.encoding import iri_to_uri, smart_text from django_babel.templatetags.babel import currencyfmt from django_countries import countries from django_countries.fields import Country from django_prices_openexchangerates import exchange_currency from django_prices_openexchangerates.tasks import update_conversion_rates from geolite2 import geolite2 from prices import MoneyRange from versatileimagefield.image_warmer import VersatileImageFieldWarmer from ...account.utils import get_random_avatar from ...celeryconf import app from ...core.i18n import COUNTRY_CODE_CHOICES georeader = geolite2.reader() logger = logging.getLogger(__name__) class CategoryChoiceField(forms.ModelChoiceField): def label_from_instance(self, obj): # pylint: disable=W0212 level = getattr(obj, obj._mptt_meta.level_attr) indent = max(0, level - 1) * "│" if obj.parent: last = (obj.parent.rght - obj.rght == 1) and (obj.rght - obj.lft == 1) if last: indent += "└ " else: indent += "├ " return "%s%s" % (indent, smart_text(obj))
def update(version): theTerminal = terminal.getTerminal() backend = content.Content() theAnalytics = analytics.Analytics() # Check minimum number of keys required in JSON update if extras.keysInDict(request.values, ['user', 'events']) == False: monitor.getMonitor().count('ApplicationUpdateMissingKeysCount') return errors.ApplicationUpdateIncomplete if len(request.values['user']) != 32: monitor.getMonitor().count('ApplicationUpdateMissingUsersCount') return errors.ApplicationUpdateBadUser # Events must have at least one item if len(request.values['events']) == 0: monitor.getMonitor().count('ApplicationUpdateNoEventsCount') return errors.ApplicationUpdateMissingEvents # Event has progress if 'progress' not in request.values['events'][-1]: monitor.getMonitor().count('ApplicationUpdateMissingProgressCount') return errors.ApplicationUpdateMissingEvents # Save update (include IP address of user) with monitor.getMonitor().time('ApplicationUpdateSaveUpdateTime'): request.values['liftpass-ip'] = request.environ.get('HTTP_X_REAL_IP') theAnalytics.saveUpdate(request.values) # Lookup player country try: country = geolite2.reader().get(request.environ.get('HTTP_X_REAL_IP')) country = country['country']['iso_code'] except Exception as e: monitor.getMonitor().count('ApplicationUpdateNoCountryCount') country = None response = None with monitor.getMonitor().time('ApplicationUpdateBuildResponseTime'): # Try getting price engine try: prices = backend.getPricingEngine(request.values['liftpass-application']) except pricing.ApplicationNotFoundException as e: monitor.getMonitor().count('ApplicationUpdateNoApplicationCount') return errors.ApplicationNotConfigured # Try getting price for user + progress try: userPrices = prices.getPrices(request.values['user'], request.values['events'][-1]['progress'], country=country) except pricing.NoPricingForGroup: monitor.getMonitor().count('ApplicationUpdateNoPriceCount') return errors.ApplicationHasNoPriceForUser # Build response response = {'goods':userPrices[1], 'version':userPrices[0]} # If debug mode save to terminal if 'liftpass-debug' in request.values and request.values['liftpass-debug'] == True: theTerminal.put(request.values['liftpass-application'], request.values, response) return response
def _ip2country(ip): """Get user country.""" if ip: match = geolite2.reader().get(ip) return match.get('country', {}).get('iso_code') if match else None
def onlineStats(self): print('Analysing online web logs...') linesProcessed = 0 path = './online/*' files = glob.glob(path) startDate = '27 Sept 2015' endDate = '26 Sept 2016' pdfCount = 0 pdfCountMyanmar = 0 pdfMyanmarPercentage = 0 ipList = [] monthTotals = {} myanmarMonthTotals = {} pageViews = 0 britishCouncil = 0 # Loop through log files for file in files: #print(os.path.basename(path)) # Open the first file with open(file) as fp: for line in fp: if line is None: continue linesProcessed = linesProcessed+1 print('Processing log file row: '+str(linesProcessed)) if '200' in line and not any(x in line for x in ('.zip', '.pdf', '.css', '.js', '.png', '.jpg', '.jpeg', '.gif', '.mp4', '.ico', '.txt', '.php', 'bot', 'spider')): pageViews = pageViews+1 lineData = self.apache2_logrow(line) #print('.', end='') # If requested file is a PDF add to the count if "pdf" in lineData[4] or "zip" in lineData[4]: #print(lineData[4]) pdfCount = pdfCount+1 #print('*', end='') # Add IP to list ipList.append(lineData[0]) logDate = lineData[3].split('/') logMonth = logDate[1] logYear = logDate[2].split(':') logYear = logYear[0] # All month totals if logYear not in monthTotals: monthTotals[logYear] = {} if logMonth in monthTotals[logYear]: monthTotals[logYear][logMonth] = monthTotals[logYear][logMonth]+1 else: monthTotals[logYear][logMonth] = 1 # Myanmar month totals reader = geolite2.reader() geo = reader.get(lineData[0]) if geo is None: continue if 'country' in geo: country = geo['country']['iso_code'] if 'MM' in country: if logYear not in myanmarMonthTotals: myanmarMonthTotals[logYear] = {} if logMonth in myanmarMonthTotals[logYear]: myanmarMonthTotals[logYear][logMonth] = myanmarMonthTotals[logYear][logMonth]+1 else: myanmarMonthTotals[logYear][logMonth] = 1 if 'britishcouncil' in lineData[4]: print(lineData[4]) britishCouncil = britishCouncil+1 # Work out the Myanmar IPs ipCountries = {} for ip in ipList: #print(ip) if ip is not '::1': reader = geolite2.reader() geo = reader.get(ip) if geo is None: continue if 'country' in geo: country = geo['country']['iso_code'] countryName = geo['country']['names']['en'] #print(country) #quit() #print(type(country)) if 'MM' in country: #print(country) pdfCountMyanmar = pdfCountMyanmar+1 if countryName in ipCountries: ipCountries[countryName] = ipCountries[countryName]+1 else: ipCountries[countryName] = 1 geolite2.close() # Work out the Myanmar downloads as a percentage if pdfCountMyanmar > 0: pdfCountMyanmar1percent = pdfCount/100 pdfMyanmarPercentage = pdfCountMyanmar/pdfCountMyanmar1percent print('\nWeb log analysis complete:') print('\t Log lines processed: {}'.format(linesProcessed)) print('\t {} to {}'.format(startDate, endDate)) print('\t Total page views: {}'.format(pageViews)) print('\t Total PDF and ZIP downloads {}'.format(pdfCount)) print('\t Total PDF and ZIP downloads from Myanmar {}'.format(pdfCountMyanmar)) print('\t British Council downloads {}'.format(britishCouncil)) print('\t Myanmar percentage {}%'.format(round(pdfMyanmarPercentage, 2))) print('\t Downloads by country: ') sorted_ipCountries = OrderedDict(sorted(ipCountries.items(), key=itemgetter(1), reverse=True)) for x in sorted_ipCountries: print('\t\t - {} : {}'.format(x, ipCountries[x])) pp = pprint.PrettyPrinter(indent=3) pp.pprint(monthTotals) for year in monthTotals: for month in monthTotals[year]: print(year+', '+month+', '+str(monthTotals[year][month])) # Create CSV with open('onlinestats.csv', 'w', newline='') as csvfile: onlineCSV = csv.writer(csvfile, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL) onlineCSV.writerow(['Time period ', startDate+' to '+endDate]) onlineCSV.writerow(['Total page views', pageViews]) onlineCSV.writerow(['Total downloads (PDF and ZIP files)', pdfCount]) onlineCSV.writerow(['British Council downloads', britishCouncil]) pmp = str(round(pdfMyanmarPercentage, 2)) onlineCSV.writerow(['Total downloads from Myanmar', str(pdfCountMyanmar)+' ('+str(pmp+'%')+')']) onlineCSV.writerow(['Total downloads by month:']) onlineCSV.writerow(['Year', 'Month', 'Downloads']) od = collections.OrderedDict(sorted(monthTotals.items())) for year, month in od.items(): odm = collections.OrderedDict(sorted(month.items())) for mon, value in odm.items(): onlineCSV.writerow([year,mon,value]) """ for year in monthTotals: for month in monthTotals[year]: onlineCSV.writerow([year, month, monthTotals[year][month]]) """ onlineCSV.writerow(['Myanmar downloads by month:']) onlineCSV.writerow(['Year', 'Month', 'Downloads']) od = collections.OrderedDict(sorted(myanmarMonthTotals.items())) for year, month in od.items(): odm = collections.OrderedDict(sorted(month.items())) for mon, value in odm.items(): onlineCSV.writerow([year,mon,value]) """ for year in myanmarMonthTotals: for month in myanmarMonthTotals[year]: onlineCSV.writerow([year, month, myanmarMonthTotals[year][month]]) """ onlineCSV.writerow(['Country', 'Downloads']) sorted_ipCountries = OrderedDict(sorted(ipCountries.items(), key=itemgetter(1), reverse=True)) for x in sorted_ipCountries: onlineCSV.writerow([x, ipCountries[x]]) # Create document '''
def vanillaConnect(host, port=443, attempt_protocol=OpenSSL.SSL.SSLv23_METHOD): """ Return a list of connection parameters negotiated with a vanilla connect :return: clientCiphers, certificate, connection params, (host,port), openssl_version """ returnlist = [] ## time before we started connection scan_time = datetime.datetime.utcnow() ## configure SSL context ctx = SSL.Context(attempt_protocol) ##ctx.set_options(SSL.OP_NO_SSLv2) ##ctx.set_verify(SSL.VERIFY_FAIL_IF_NO_PEER_CER6T, verify_cb) # Demand a certificate ##ctx.set_verify(SSL.VERIFY_PEER|SSL.VERIFY_FAIL_IF_NO_PEER_CERT, verify_cb) # Demand a certificate ##ctx.use_privatekey_file (os.path.join(dir, 'server.pkey')) ##ctx.use_certificate_file(os.path.join(dir, 'server.cert')) ##ctx.load_verify_locations("server.crt") ##print("%s" % OpenSSL.crypto.get_elliptic_curves()) try: for res in socket.getaddrinfo(host, port, socket.AF_UNSPEC, socket.SOCK_STREAM): af, socktype, proto, canonname, sa = res try: rawsocket = socket.socket(af, socktype, proto) except socket.error as msg: rawsocket = None return "Socket Error: %s" % msg except socket.gaierror as msg: return "getaddrinfo failed: %s" % msg rawsocket.settimeout(5) sock = SSL.Connection(ctx, rawsocket) sock.set_tlsext_host_name(host.encode('utf-8')) try: sock.connect((host, port)) except Exception as inst: return "Connection Error: %s" % inst server_ip = sock._socket.getpeername() rawsocket.settimeout(None) try: sock.do_handshake() except Exception as inst: return "Handshake Error: %s" % inst returnlist.append((scan_time, sock.get_cipher_list())) servercert = sock.get_peer_certificate() servercert_serial = servercert.get_serial_number() servercert_subject = X509Name_to_str(servercert.get_subject()) servercert_issuer = X509Name_to_str(servercert.get_issuer()) servercert_version = servercert.get_version() servercert_algo = servercert.get_signature_algorithm().decode() servercert_validity = (servercert.get_notBefore().decode(), servercert.get_notAfter().decode()) dt_now = datetime.datetime.utcnow() notbefore = str(servercert_validity[0][2:14]) notafter = str(servercert_validity[1][2:14]) ## this should work for UTCtime, GeneralTime is YYYY so fix this near the year 2050 dt_notbefore = datetime.datetime(2000 + int(notbefore[0:2]), int(notbefore[2:4]), int(notbefore[4:6]), int(notbefore[6:8]), int(notbefore[8:10]), int(notbefore[10:12])) dt_notafter = datetime.datetime(2000 + int(notafter[0:2]), int(notafter[2:4]), int(notafter[4:6]), int(notafter[6:8]), int(notafter[8:10]), int(notafter[10:12])) servercert_pubkey = servercert.get_pubkey() evp_pkey = servercert_pubkey._pkey servercert_key_bits = servercert_pubkey.bits() returncertificate = {} bio = OpenSSL.crypto._new_mem_buf() lib.PEM_write_bio_X509(bio, servercert._x509) cert_pem = OpenSSL.crypto._bio_to_string(bio).decode().strip() returncertificate['pem'] = cert_pem returncertificate['version'] = (servercert_version+1) returncertificate['serial'] = servercert_serial returncertificate['algo'] = servercert_algo returncertificate['issuer'] = servercert_issuer returncertificate['validity'] = [dt_notbefore, dt_notafter] returncertificate['subject'] = servercert_subject key_type = servercert_pubkey.type() ## Public Key Algo Specific Extractions returnpublickey = [] returnpublickey.append(key_type) if (key_type==408): ##print(" EC") ec_key = lib.EVP_PKEY_get1_EC_KEY(evp_pkey) ec_point = lib.EC_KEY_get0_public_key(ec_key) ec_group = lib.EC_KEY_get0_group(ec_key) ec_group_nid = lib.EC_GROUP_get_curve_name(ec_group) ec_point_conversion_form = lib.EC_KEY_get_conv_form(ec_key) curve_string = ffi.string(lib.OBJ_nid2sn(ec_group_nid)).decode() point_string = ffi.string(lib.EC_POINT_point2hex(ec_group, ec_point, ec_point_conversion_form, ffi.NULL)).decode() ##print(" curve: %s" % curve_string) ##print(" public %s" % points_string) ##print(" bits: %d" % servercert_key_bits) returnpublickey.append(servercert_key_bits) returnpublickey.append(point_string) returnpublickey.append(curve_string) #print("%s " % lib.EC_POINT_point2oct(ec_point)) #print("%s " % lib.EVP_PKEY_print_public(evp_key)) ##bio = OpenSSL.crypto._new_mem_buf() #lib.i2d_EC_PUBKEY_bio(bio, ec_key) #publickey_string = OpenSSL.crypto._bio_to_string(bio) #print(binascii.hexlify(publickey_string)) returncertificate['pubkey'] = returnpublickey elif (key_type==OpenSSL.crypto.TYPE_RSA): #print(" type: RSA") rsa_key = lib.EVP_PKEY_get1_RSA(evp_pkey) bio = OpenSSL.crypto._new_mem_buf() lib.RSA_print(bio, rsa_key, 0) rsabiostring = OpenSSL.crypto._bio_to_string(bio).decode() openssl_rsa_print_regex = "Public-Key: \((\d+) bit\)\nModulus:\n(.*)Exponent: (\d+)" prog = re.compile(openssl_rsa_print_regex, re.DOTALL) rsa_data = prog.match(rsabiostring) rsa_size, rsa_mod, rsa_exp = rsa_data.groups() rsa_mod = rsa_mod.replace(" ", "") rsa_mod = rsa_mod.replace(":", "") rsa_mod = rsa_mod.replace("\n", "") returnpublickey.append(rsa_size) returnpublickey.append(rsa_mod) returnpublickey.append(rsa_exp) returncertificate['pubkey']=returnpublickey else: return "unsupported: %s " % returncertificate ## SAN and ext server_cert_subjectaltname = "" server_cert_subjectaltname_list = [] bc, cp, crl, ku, eku, aki, aia = (), (), (), (), (), (), () for ext in range(0, servercert.get_extension_count()): ext_obj = servercert.get_extension(ext) ext_name = ext_obj.get_short_name() #print("n: %s d: %s %s" % (ext_name, ext_obj, type(ext_obj))) if (ext_name == b'subjectAltName'): ext_data = ext_obj.get_data() server_cert_subjectaltname = decoder.decode(ext_data, asn1Spec=rfc2459.SubjectAltName())[0] for san in server_cert_subjectaltname: santype = san.getName() sancomponent = san.getComponent() if isinstance(sancomponent, pyasn1.type.char.IA5String): sanuri = san.getComponent().asOctets().decode() elif isinstance(sancomponent, pyasn1_modules.rfc2459.AnotherName): san_other_oid = san.getComponent().getComponentByName('type-id') san_other_value = san.getComponent().getComponentByName('value') sanuri = san_other_oid.prettyPrint() + "\n" + san_other_value.prettyPrint() else : sanuri = san.getComponent().prettyPrint() server_cert_subjectaltname_list.append("%s:%s" % (santype, sanuri)) elif (ext_name == b'basicConstraints'): bc = ext_obj elif (ext_name == b'keyUsage'): ku = ext_obj elif (ext_name == b'extendedKeyUsage'): eku = ext_obj elif (ext_name == b'authorityKeyIdentifier'): aki = ext_obj elif (ext_name == b'crlDistributionPoints'): crl = ext_obj elif (ext_name == b'authorityInfoAccess'): aia = ext_obj elif (ext_name == b'certificatePolicies'): cp = ext_obj returncertificate['san'] = server_cert_subjectaltname_list returncertificate['bc'] = bc returncertificate['eku'] = eku returncertificate['aki'] = aki returncertificate['aia'] = aia returncertificate['crl'] = crl returncertificate['ku'] = ku returncertificate['cp'] = cp ## OK done with certificate dictionary items. push to return list returnlist.append(returncertificate) # get ServerHello technical specifics cipherinuse = lib.SSL_get_current_cipher(sock._ssl) cipherinuse_string = ffi.string(lib.SSL_CIPHER_get_name(cipherinuse)).decode() cipherversion = ffi.string(lib.SSL_CIPHER_get_version(cipherinuse)).decode() protocolversion = ffi.string(lib.SSL_get_version(sock._ssl)).decode() cipherdescription = ffi.string(lib.SSL_CIPHER_description(cipherinuse, ffi.NULL, 128)).decode().strip() serverrandom = binascii.hexlify(sock.server_random()) clientrandom = binascii.hexlify(sock.client_random()) masterkey = binascii.hexlify(sock.master_key()).decode() ## requires SSL_SESSION struct expanded binding in cryptography.binding session = sock.get_session() ## print out session using SSL_SESSION_print #bio = OpenSSL.crypto._new_mem_buf() #lib.SSL_SESSION_print(bio, session._session) #print(OpenSSL.crypto._bio_to_string(bio)) ## session params returnsession_params = dict() returnsession_params['cipher'] = cipherinuse_string returnsession_params['tls_version'] = protocolversion returnsession_params['cipher_description'] = cipherdescription returnsession_params['server_random'] = serverrandom returnsession_params['client_random'] = clientrandom returnsession_params['master_key'] = masterkey sessionid_length = session._session.session_id_length returnsession_params['session_id'] = binascii.hexlify(ffi.buffer(session._session.session_id)) ## are tickets supported? if (session._session.tlsext_tick): returnsession_params['session_ticket'] = binascii.hexlify(ffi.string(session._session.tlsext_tick)) returnsession_params['session_ticket_lifetime'] = session._session.tlsext_tick_lifetime_hint else: returnsession_params['session_ticket'] = "0" returnsession_params['session_ticket_lifetime'] = "0" returnlist.append(returnsession_params) returnlist.append(server_ip) openssl_version = ffi.string(lib.SSLeay_version(0)).decode() #print(openssl_version ) returnlist.append(openssl_version) ## Geo Data language = 'en' server_geo = OrderedDict() ip_to_geo = server_ip[0] reader = geolite2.reader() match = reader.get(ip_to_geo) if (match != None): if (match.get('city') != None): server_geo['city'] = match['city']['names'][language] if (match.get('subdivisions') != None): server_geo['subdivisions'] = match['subdivisions'][0]['names'][language] if (match.get('postal') != None): server_geo['postal'] = match['postal']['code'] if (match.get('country') != None): server_geo['country'] = match['country']['names'][language] if (match.get('continent') != None): server_geo['continent'] = match['continent']['names'][language] if (match.get('location') != None): server_geo['location'] = (match['location']['latitude'], match['location']['longitude']) test_geoip_resolution = float( server_geo['location'][0] ) if (test_geoip_resolution % 1==0): server_geo['zoom'] = 3 else: server_geo['zoom'] = 8 if (match.get('time_zone') != None): server_geo['time_zone'] = match['location']['time_zone'] if (match.get('metro_code') != None): server_geo['metro_code'] = match['location']['metro_code'] if (match.get('registered_country') != None): server_geo['registered_country'] = match['registered_country']['names'][language] returnlist.append(server_geo) ## Application data try: useragent = "TLSSecondOpinion/1.0 (+https://tls2o.com TLS Second Opinion Bot)" line = "GET / HTTP/1.1\r\nHost:%s\r\nAccept: */*\r\nConnection: keep-alive\r\nUser-Agent: %s\r\n\r\n" % (host, useragent) sock.send(line) server_response = sock.recv(65535).decode() returnlist.append(server_response) except SSL.Error: server_response = 'Connection died unexpectedly' sock.shutdown() sock.close() return returnlist