def main(argv, _db_master=None, _heka_client=None, _stats_client=None): parser = argparse.ArgumentParser( prog=argv[0], description='Location Importer') parser.add_argument('source', help="The source file.") parser.add_argument('--userid', default=None, help='Internal userid for attribution.') args = parser.parse_args(argv[1:]) userid = None if args.userid is not None: userid = int(args.userid) conf = read_config() settings = conf.get_map('ichnaea') configure_heka(conf.filename, _heka_client=_heka_client) configure_stats(conf.get('ichnaea', 'statsd_host'), _client=_stats_client) # configure databases incl. test override hooks if _db_master is None: # pragma: no cover db = Database(settings['db_master']) else: db = _db_master session = db.session() added = load_file(session, args.source, userid=userid) print('Added a total of %s records.' % added) session.commit() return added
def main(argv, _db_master=None, _heka_client=None, _stats_client=None): parser = argparse.ArgumentParser(prog=argv[0], description='Location Importer') parser.add_argument('source', help="The source file.") parser.add_argument('--userid', default=None, help='Internal userid for attribution.') args = parser.parse_args(argv[1:]) userid = None if args.userid is not None: userid = int(args.userid) conf = read_config() settings = conf.get_map('ichnaea') configure_heka(conf.filename, _heka_client=_heka_client) configure_stats(conf.get('ichnaea', 'statsd_host'), _client=_stats_client) # configure databases incl. test override hooks if _db_master is None: # pragma: no cover db = Database(settings['db_master']) else: db = _db_master session = db.session() added = load_file(session, args.source, userid=userid) print('Added a total of %s records.' % added) session.commit() return added
def main(argv, _db_master=None): parser = argparse.ArgumentParser( prog=argv[0], description='Location Importer') parser.add_argument('source', help="The source file.") parser.add_argument('--userid', default=None, help='Internal userid for attribution.') args = parser.parse_args(argv[1:]) userid = None if args.userid is not None: userid = int(args.userid) settings = config().get_map('ichnaea') # configure databases incl. test override hooks if _db_master is None: db = Database( settings['db_master'], socket=settings.get('db_master_socket'), create=False, ) else: db = _db_master session = db.session() added = load_file(session, args.source, userid=userid) print('Added a total of %s records.' % added) session.commit() return added
def main(argv, _db_master=None): parser = argparse.ArgumentParser(prog=argv[0], description='Location Importer') parser.add_argument('source', help="The source file.") parser.add_argument('--userid', default=None, help='Internal userid for attribution.') args = parser.parse_args(argv[1:]) userid = None if args.userid is not None: userid = int(args.userid) settings = config().get_map('ichnaea') # configure databases incl. test override hooks if _db_master is None: db = Database( settings['db_master'], socket=settings.get('db_master_socket'), create=False, ) else: db = _db_master session = db.session() added = load_file(session, args.source, userid=userid) print('Added a total of %s records.' % added) session.commit() return added
class DBFixture(object): def __init__(self): self.db_master = Database(SQLURI) master_conn = self.db_master.engine.connect() self.master_trans = master_conn.begin() self.db_master_session = self.db_master.session() def install_wifi_aps(self): self.db_master_session.execute(Wifi.__table__.delete()) ap_data = json.load(open(AP_FILE), object_hook=JSONLocationDictDecoder) session = self.db_master_session data = [] for i, ((lat, lon), ap_set) in enumerate(ap_data.items()): for ap in ap_set: wifi = Wifi(key=ap['key'].replace(":", ""), lat=lat * (10 ** 7), lon=lon * (10 ** 7)) data.append(wifi) session.add_all(data) session.flush() data = [] session.commit() def install_cell_towers(self): self.db_master_session.execute(Cell.__table__.delete()) tower_data = json.load(open(TOWER_FILE), object_hook=JSONLocationDictDecoder) session = self.db_master_session data = [] for i, ((lat, lon), cell_data_set) in enumerate(tower_data.items()): for cell_data in cell_data_set: data.append(Cell(lat=lat * (10 ** 7), lon=lon * (10 ** 7), radio=cell_data['radio'], cid=cell_data['cid'], mcc=cell_data['mcc'], mnc=cell_data['mnc'], lac=cell_data['lac'])) session.add_all(data) session.flush() data = [] session.commit()
def main(): settings = read_config().get_map('ichnaea') db = Database(settings['db_slave']) session = db.session() bad = [] offset = 0 count = 10000 results = True while results: results = False r = session.execute("select id, lat, lon, mcc, mnc, lac, cid, radio, " "total_measures from cell where " "lat is not null and lon is not null and " "mcc not in (1, 260) " "order by id limit %d offset %d" % (count, offset)) offset += count for row in r: results = True (id, lat, lon, mcc, mnc, lac, cid, radio, total_measures) = row ccs = [c.alpha2 for c in mobile_codes.mcc(str(mcc))] if not any([location_is_in_country(lat, lon, c, 1) for c in ccs]): if ccs: s = ",".join(ccs) else: continue bad.append(dict( type='Feature', properties=dict( mcc=mcc, mnc=mnc, lac=lac, cid=cid, radio=radio, total_measures=total_measures, countries=s), geometry=dict( type='Point', coordinates=[lon, lat]))) json.dump(dict(type='FeatureCollection', features=bad), sys.stdout, indent=True)
def main(argv): parser = argparse.ArgumentParser(prog=argv[0], description="Location Importer") parser.add_argument("--dry-run", action="store_true") # TODO rely on ICHNAEA_CFG / ichnaea.config, as the worker is relying # on it anyways parser.add_argument("config", help="config file") parser.add_argument("source", help="source file") args = parser.parse_args(argv[1:]) settings = Config(args.config).get_map("ichnaea") db = Database(settings["db_master"], socket=settings.get("db_master_socket"), create=False) session = db.session() added = load_file(session, args.source) print("Added %s records." % added) if args.dry_run: session.rollback() else: # pragma: no cover session.commit() return added
def main(): settings = read_config().get_map('ichnaea') db = Database(settings['db_slave']) session = db.session() bad = [] offset = 0 count = 10000 results = True while results: results = False r = session.execute("select id, lat, lon, mcc, mnc, lac, cid, radio, " "total_measures from cell where " "lat is not null and lon is not null and " "mcc not in (1, 260) " "order by id limit %d offset %d" % (count, offset)) offset += count for row in r: results = True (id, lat, lon, mcc, mnc, lac, cid, radio, total_measures) = row ccs = [c.alpha2 for c in mobile_codes.mcc(str(mcc))] if not any([location_is_in_country(lat, lon, c, 1) for c in ccs]): if ccs: s = ",".join(ccs) else: continue bad.append( dict(type='Feature', properties=dict(mcc=mcc, mnc=mnc, lac=lac, cid=cid, radio=radio, total_measures=total_measures, countries=s), geometry=dict(type='Point', coordinates=[lon, lat]))) json.dump(dict(type='FeatureCollection', features=bad), sys.stdout, indent=True)
def main(argv, _db_master=None): parser = argparse.ArgumentParser(prog=argv[0], description='Location Importer') parser.add_argument('source', help="source file") args = parser.parse_args(argv[1:]) settings = config().get_map('ichnaea') # configure databases incl. test override hooks if _db_master is None: db = Database( settings['db_master'], socket=settings.get('db_master_socket'), create=False, ) else: db = _db_master session = db.session() added = load_file(session, args.source) print('Added %s records.' % added) session.commit() return added
def main(argv, _db_master=None): parser = argparse.ArgumentParser( prog=argv[0], description='Location Importer') parser.add_argument('source', help="source file") args = parser.parse_args(argv[1:]) settings = config().get_map('ichnaea') # configure databases incl. test override hooks if _db_master is None: db = Database( settings['db_master'], socket=settings.get('db_master_socket'), create=False, ) else: db = _db_master session = db.session() added = load_file(session, args.source) print('Added %s records.' % added) session.commit() return added
class TestSearch(TestCase): """ Search tests should only be affected by mysql outages. All redis tests are in one test case """ def setUp(self): TestCase.setUp(self) self.redis = VaurienRedis() self.mysql = VaurienMySQL() uri = os.environ.get('SQLURI', 'mysql+pymysql://root:mysql@localhost/location') self.db = Database(uri) self.install_apikey() self.install_fixtures() def install_fixtures(self): session = self.db.session() PARIS_LAT_DEG = from_degrees(PARIS_LAT) PARIS_LON_DEG = from_degrees(PARIS_LON) qry = session.query(Cell) if qry.count() > 0: session.query(Cell).delete() lat = from_degrees(PARIS_LAT) lon = from_degrees(PARIS_LON) key = dict(mcc=FRANCE_MCC, mnc=2, lac=3) data = [ Cell(lat=lat, lon=lon, radio=2, cid=4, **key), Cell(lat=lat + 20000, lon=lon + 40000, radio=2, cid=5, **key), ] session.add_all(data) if session.query(Wifi).count() > 0: session.query(Wifi).delete() wifis = [ Wifi(key="A1", lat=PARIS_LAT_DEG, lon=PARIS_LON_DEG), Wifi(key="B2", lat=PARIS_LAT_DEG, lon=PARIS_LON_DEG), Wifi(key="C3", lat=PARIS_LAT_DEG, lon=PARIS_LON_DEG), Wifi(key="D4", lat=None, lon=None), ] session.add_all(wifis) session.commit() def install_apikey(self): session = self.db.session() if session.query(ApiKey).filter( ApiKey.valid_key == 'test').count() > 0: session.query(ApiKey).delete() session.add(ApiKey(valid_key='test', maxreq=0)) session.commit() def test_mysql_dummy(self): # this should pass, otherwise, vaurien has screwed up self.mysql.dummy() self.redis.dummy() status_code, content = do_search(use_ip=PARIS_IP) eq_(status_code, 200) expected = {u'status': u'ok', u'lat': PARIS_LAT, u'lon': PARIS_LON, u'accuracy': 100} actual = json.loads(content) self.assertAlmostEquals(actual['status'], expected['status']) self.assertAlmostEquals(actual['lat'], expected['lat']) self.assertAlmostEquals(actual['lon'], expected['lon']) self.assertAlmostEquals(actual['accuracy'], expected['accuracy']) def test_mysql_delay(self): # this should pass, otherwise, vaurien has screwed up self.mysql.delay() self.redis.dummy() start = time.time() status_code, content = do_search() end = time.time() assert (end-start) > 1.0 eq_(status_code, 200) eq_(json.loads(content), {"status": "ok", "lat": PARIS_LAT, "lon": PARIS_LON, "accuracy": 100}) def test_mysql_blackout(self): # This test has been renamed so that it runs last self.mysql.blackout() self.redis.dummy() # MySQL blackouts will cause API key checking to be disabled status_code, content = do_search(apikey='invalid_key', use_ip=FREMONT_IP) # MySQL blackouts will force only geo-ip to work eq_(status_code, 200) actual = json.loads(content) expected = {"status": "ok", "lat": FREMONT_LAT, "lon": FREMONT_LON, "accuracy": GEOIP_CITY_ACCURACY} # TODO: not sure why we need almost equal for geoip self.assertAlmostEquals(actual['status'], expected['status']) self.assertAlmostEquals(actual['lat'], expected['lat']) self.assertAlmostEquals(actual['lon'], expected['lon']) self.assertAlmostEquals(actual['accuracy'], expected['accuracy']) def test_redis_dummy(self): self.mysql.dummy() self.redis.dummy() status_code, content = do_search() eq_(status_code, 200) eq_(json.loads(content), {"status": "ok", "lat": PARIS_LAT, "lon": PARIS_LON, "accuracy": 100}) def test_redis_delay(self): self.mysql.dummy() self.redis.delay() start = time.time() status_code, content = do_search() delta = time.time() - start # The delay in redis should not affect search self.assertTrue(delta < 1.0) eq_(status_code, 200) eq_(json.loads(content), {"status": "ok", "lat": PARIS_LAT, "lon": PARIS_LON, "accuracy": 100}) def test_redis_blackout(self): self.mysql.dummy() self.redis.blackout() start = time.time() status_code, content = do_search() delta = time.time() - start # The redis blackout should not affect search at all self.assertTrue(delta < 1.0) eq_(status_code, 200) eq_(json.loads(content), {"status": "ok", "lat": PARIS_LAT, "lon": PARIS_LON, "accuracy": 100})
class TestSearch(TestCase): """ Search tests should only be affected by mysql outages. All redis tests are in one test case """ def setUp(self): TestCase.setUp(self) self.redis = VaurienRedis() self.mysql = VaurienMySQL() uri = os.environ.get('SQLURI', 'mysql+pymysql://root:mysql@localhost/location') self.db = Database(uri) self.install_apikey() self.install_fixtures() def install_fixtures(self): session = self.db.session() PARIS_LAT_DEG = from_degrees(PARIS_LAT) PARIS_LON_DEG = from_degrees(PARIS_LON) qry = session.query(Cell) if qry.count() > 0: session.query(Cell).delete() lat = from_degrees(PARIS_LAT) lon = from_degrees(PARIS_LON) key = dict(mcc=FRANCE_MCC, mnc=2, lac=3) data = [ Cell(lat=lat, lon=lon, radio=2, cid=4, **key), Cell(lat=lat + 20000, lon=lon + 40000, radio=2, cid=5, **key), ] session.add_all(data) if session.query(Wifi).count() > 0: session.query(Wifi).delete() wifis = [ Wifi(key="A1", lat=PARIS_LAT_DEG, lon=PARIS_LON_DEG), Wifi(key="B2", lat=PARIS_LAT_DEG, lon=PARIS_LON_DEG), Wifi(key="C3", lat=PARIS_LAT_DEG, lon=PARIS_LON_DEG), Wifi(key="D4", lat=None, lon=None), ] session.add_all(wifis) session.commit() def install_apikey(self): session = self.db.session() if session.query(ApiKey).filter( ApiKey.valid_key == 'test').count() > 0: session.query(ApiKey).delete() session.add(ApiKey(valid_key='test', maxreq=0)) session.commit() def test_mysql_dummy(self): # this should pass, otherwise, vaurien has screwed up self.mysql.dummy() self.redis.dummy() status_code, content = do_search(use_ip=PARIS_IP) eq_(status_code, 200) expected = { u'status': u'ok', u'lat': PARIS_LAT, u'lon': PARIS_LON, u'accuracy': 100 } actual = json.loads(content) self.assertAlmostEquals(actual['status'], expected['status']) self.assertAlmostEquals(actual['lat'], expected['lat']) self.assertAlmostEquals(actual['lon'], expected['lon']) self.assertAlmostEquals(actual['accuracy'], expected['accuracy']) def test_mysql_delay(self): # this should pass, otherwise, vaurien has screwed up self.mysql.delay() self.redis.dummy() start = time.time() status_code, content = do_search() end = time.time() assert (end - start) > 1.0 eq_(status_code, 200) eq_(json.loads(content), { "status": "ok", "lat": PARIS_LAT, "lon": PARIS_LON, "accuracy": 100 }) def test_mysql_blackout(self): # This test has been renamed so that it runs last self.mysql.blackout() self.redis.dummy() # MySQL blackouts will cause API key checking to be disabled status_code, content = do_search(apikey='invalid_key', use_ip=FREMONT_IP) # MySQL blackouts will force only geo-ip to work eq_(status_code, 200) actual = json.loads(content) expected = { "status": "ok", "lat": FREMONT_LAT, "lon": FREMONT_LON, "accuracy": GEOIP_CITY_ACCURACY } # TODO: not sure why we need almost equal for geoip self.assertAlmostEquals(actual['status'], expected['status']) self.assertAlmostEquals(actual['lat'], expected['lat']) self.assertAlmostEquals(actual['lon'], expected['lon']) self.assertAlmostEquals(actual['accuracy'], expected['accuracy']) def test_redis_dummy(self): self.mysql.dummy() self.redis.dummy() status_code, content = do_search() eq_(status_code, 200) eq_(json.loads(content), { "status": "ok", "lat": PARIS_LAT, "lon": PARIS_LON, "accuracy": 100 }) def test_redis_delay(self): self.mysql.dummy() self.redis.delay() start = time.time() status_code, content = do_search() delta = time.time() - start # The delay in redis should not affect search self.assertTrue(delta < 1.0) eq_(status_code, 200) eq_(json.loads(content), { "status": "ok", "lat": PARIS_LAT, "lon": PARIS_LON, "accuracy": 100 }) def test_redis_blackout(self): self.mysql.dummy() self.redis.blackout() start = time.time() status_code, content = do_search() delta = time.time() - start # The redis blackout should not affect search at all self.assertTrue(delta < 1.0) eq_(status_code, 200) eq_(json.loads(content), { "status": "ok", "lat": PARIS_LAT, "lon": PARIS_LON, "accuracy": 100 })