class Helpers(): def __init__(self): self.conn = SQLiteHelper('mirrors.db') self.log = logging.getLogger(__name__) if not self.conn.table_exists('metadata'): self.conn.execute(''' CREATE TABLE metadata ( IP TEXT, Location TEXT, Time TIMESTAMP ) ''') def get_location(self): row = self.conn.execute("SELECT IP, Location, Time FROM metadata").fetchone() if row is not None and len(row): self.log.info("Loaded our IP from the database: {0}, {1}", row['IP'], row['Location']) return (row['IP'], row['Location']) else: ip = requests.get("http://checkip.amazonaws.com").text.strip() self.log.info("Amazon reports our IP as {0}", ip) location = geolite2.reader().get(ip) iso = location['registered_country']['iso_code'] self.conn.execute("INSERT INTO metadata (IP, Location, Time) VALUES (?,?,?)", [ip, iso, time.time()]) self.conn.commit() return ip, iso def get_mirror_location(self, ip): location = geolite2.reader().get(ip) return location def http_ping(self, url): start = time.clock() try: headers = {'User-Agent': 'Dodgy HTTP Ping Script ([email protected])'} requests.head(url, timeout=0.75, headers=headers) finish = time.clock() except: finish = 999 return url, (finish - start) def average_http_ping(self, url): results = [self.http_ping(url)[1] * 3] return (url, sum(results, 0.0) / len(results)) def async_http_ping(self,urls): mylist = [] pool = TPE(4) for x in urls: mylist.append(pool.submit(self.average_http_ping,x)) return mylist
def __init__(self): self.conn = SQLiteHelper('mirrors.db') self.log = logging.getLogger(__name__) if not self.conn.table_exists('metadata'): self.conn.execute(''' CREATE TABLE metadata ( IP TEXT, Location TEXT, Time TIMESTAMP ) ''')
def setUp(self): self.gestor_incidentes = GestorIncidentes(SQLiteHelper()) self.incidente = Incidente("Incidente test", "Descripcion del test", "2019-10-29 11:00:00", 37.2, -3.5, "Leve")
from sqlitehelper import SQLiteHelper import sys database = SQLiteHelper('packagecontents.db') results = database.execute(''' SELECT packages.Name,package_contents.File FROM packages INNER JOIN package_contents ON packages.ID = package_contents.PackageID WHERE package_contents.File LIKE ? ''', ['%' + sys.argv[1] + '%',]).fetchall() try: for result in results: print('{0}: {1}'.format(result['Name'], result['File'])) except (BrokenPipeError, IOError): sys.stderr.close() sys.exit(0)
def __init__(self, databasefile='mirrors.db'): self.downloader = MirrorDownloader() self.conn = SQLiteHelper(databasefile) self.cachefile = 'mirrors.pkl' self.log = logging.getLogger('MirrorDatabase')
class MirrorDatabase: def __init__(self, databasefile='mirrors.db'): self.downloader = MirrorDownloader() self.conn = SQLiteHelper(databasefile) self.cachefile = 'mirrors.pkl' self.log = logging.getLogger('MirrorDatabase') def resolve_mirrors(self): mirrors = self.get_mirrors_from_database() threadpool = TPE(4) def not_a_lambda_function(mirror): mirror_hostname = parse.urlparse(mirror['mirror']).hostname address_list = [] for addrinfo in socket.getaddrinfo(mirror_hostname,80): address_list.append(addrinfo[-1][0]) return (mirror['ID'], address_list) results = threadpool.map(not_a_lambda_function,mirrors) return list(results) def get_mirror_by_id(self, id): return self.conn.execute("SELECT mirrors.Distro, mirrors.Mirror, mirror_ip.IP, mirror_ip.City, mirror_ip.Area, mirror_ip.Country FROM mirrors INNER JOIN mirror_ip ON mirrors.ID = mirror_ip.MirrorID WHERE mirrors.ID = ?", [id]).fetchall() def get_mirror_by_ip(self, ip): return self.conn.execute("SELECT mirrors.Distro, mirrors.Mirror, mirror_ip.IP, mirror_ip.City, mirror_ip.Area, mirror_ip.Country FROM mirrors INNER JOIN mirror_ip ON mirrors.ID = mirror_ip.MirrorID WHERE mirror_ip.IP = ?", [ip]).fetchall() def create_database(self): if not self.conn.table_exists('mirrors'): self.conn.execute(''' CREATE TABLE mirrors ( ID INTEGER PRIMARY KEY, Distro TEXT, Mirror TEXT, Hostname TEXT )''') self.log.info('Created mirrors db') if not self.conn.table_exists('mirror_ip'): self.conn.execute(''' CREATE TABLE mirror_ip ( MirrorID INTEGER, IP TEXT, Country TEXT, Area TEXT, City TEXT, FOREIGN KEY(MirrorID) REFERENCES mirrors(ID) ) ''') self.log.info('Created mirror_ip db') def get_mirrors_from_database(self): return self.conn.execute("SELECT ID, Mirror FROM mirrors").fetchall() def fill_database(self): logging.basicConfig(level=logging.INFO) database = MirrorDatabase() helper = Helpers() database.create_database() downloader = MirrorDownloader() if not (database.conn.number_of_rows('mirrors')): mirrors = downloader.download_mirrors() ip, location = helper.get_location() for distro in mirrors: database.conn.executemany('INSERT INTO mirrors (Distro, Mirror) VALUES (?, ?)', [(distro['name'], m) for m in distro['mirrors']]) database.conn.commit() else: database.log.warn('No mirrors were added') hosts = database.resolve_mirrors() for mirror_id, addresses in hosts: for address in addresses: if not database.get_mirror_by_ip(address): database.log.info("IP %s was new", address) location = helper.get_mirror_location(address) country = location['country']['names']['en'] city = location['city']['names']['en'] if 'city' in location else "" area = ", ".join([s['names']['en'] for s in location['subdivisions']]) if 'subdivisions' in location else "" database.conn.execute("INSERT INTO mirror_ip (MirrorID, IP, Country, Area, City) VALUES (?, ?, ?, ?, ?)", [mirror_id, address, country, area, city]) else: database.log.info("IP %s was already in the database", address) database.conn.commit() database.conn.close()
if len(splits) > 2: fixed = [" ".join(splits[0:-2]), splits[-1]] splits = fixed yield splits def find_first_line_of_index(self, index_generator): i = 0 for line in index_generator: line = line.decode("utf-8") if " ".join(line.split()) == "FILE LOCATION": return i i += 1 database = SQLiteHelper("packagecontents.db") distros = """ CREATE TABLE distros ( ID INTEGER PRIMARY KEY, Name TEXT ) """ if not database.table_exists("distros"): database.execute(distros) database.commit() database.execute("INSERT INTO distros (Name) VALUES ('Debian')") database.commit()