def main(argv, _db_master=None, _heka_client=None, _stats_client=None): parser = argparse.ArgumentParser( prog=argv[0], description='Location Importer') parser.add_argument('source', help="The source file.") parser.add_argument('--userid', default=None, help='Internal userid for attribution.') args = parser.parse_args(argv[1:]) userid = None if args.userid is not None: userid = int(args.userid) conf = read_config() settings = conf.get_map('ichnaea') configure_heka(conf.filename, _heka_client=_heka_client) configure_stats(conf.get('ichnaea', 'statsd_host'), _client=_stats_client) # configure databases incl. test override hooks if _db_master is None: # pragma: no cover db = Database(settings['db_master']) else: db = _db_master session = db.session() added = load_file(session, args.source, userid=userid) print('Added a total of %s records.' % added) session.commit() return added
def main(argv, _db_master=None): parser = argparse.ArgumentParser( prog=argv[0], description='Location Importer') parser.add_argument('source', help="The source file.") parser.add_argument('--userid', default=None, help='Internal userid for attribution.') args = parser.parse_args(argv[1:]) userid = None if args.userid is not None: userid = int(args.userid) settings = config().get_map('ichnaea') # configure databases incl. test override hooks if _db_master is None: db = Database( settings['db_master'], socket=settings.get('db_master_socket'), create=False, ) else: db = _db_master session = db.session() added = load_file(session, args.source, userid=userid) print('Added a total of %s records.' % added) session.commit() return added
def main(argv, _db_master=None, _heka_client=None, _stats_client=None): parser = argparse.ArgumentParser(prog=argv[0], description='Location Importer') parser.add_argument('source', help="The source file.") parser.add_argument('--userid', default=None, help='Internal userid for attribution.') args = parser.parse_args(argv[1:]) userid = None if args.userid is not None: userid = int(args.userid) conf = read_config() settings = conf.get_map('ichnaea') configure_heka(conf.filename, _heka_client=_heka_client) configure_stats(conf.get('ichnaea', 'statsd_host'), _client=_stats_client) # configure databases incl. test override hooks if _db_master is None: # pragma: no cover db = Database(settings['db_master']) else: db = _db_master session = db.session() added = load_file(session, args.source, userid=userid) print('Added a total of %s records.' % added) session.commit() return added
def main(argv, _db_master=None): parser = argparse.ArgumentParser(prog=argv[0], description='Location Importer') parser.add_argument('source', help="The source file.") parser.add_argument('--userid', default=None, help='Internal userid for attribution.') args = parser.parse_args(argv[1:]) userid = None if args.userid is not None: userid = int(args.userid) settings = config().get_map('ichnaea') # configure databases incl. test override hooks if _db_master is None: db = Database( settings['db_master'], socket=settings.get('db_master_socket'), create=False, ) else: db = _db_master session = db.session() added = load_file(session, args.source, userid=userid) print('Added a total of %s records.' % added) session.commit() return added
def setUp(self): TestCase.setUp(self) self.redis = VaurienRedis() self.mysql = VaurienMySQL() uri = os.environ.get('SQLURI', 'mysql+pymysql://root:mysql@localhost/location') self.db = Database(uri) self.install_apikey() self.install_fixtures()
def attach_database(app, settings=None, _db_master=None): # called manually during tests if _db_master is None: # pragma: no cover db_master = Database(settings['db_master']) else: db_master = _db_master app.db_master = db_master
def main(): settings = read_config().get_map('ichnaea') db = Database(settings['db_slave']) session = db.session() bad = [] offset = 0 count = 10000 results = True while results: results = False r = session.execute("select id, lat, lon, mcc, mnc, lac, cid, radio, " "total_measures from cell where " "lat is not null and lon is not null and " "mcc not in (1, 260) " "order by id limit %d offset %d" % (count, offset)) offset += count for row in r: results = True (id, lat, lon, mcc, mnc, lac, cid, radio, total_measures) = row ccs = [c.alpha2 for c in mobile_codes.mcc(str(mcc))] if not any([location_is_in_country(lat, lon, c, 1) for c in ccs]): if ccs: s = ",".join(ccs) else: continue bad.append(dict( type='Feature', properties=dict( mcc=mcc, mnc=mnc, lac=lac, cid=cid, radio=radio, total_measures=total_measures, countries=s), geometry=dict( type='Point', coordinates=[lon, lat]))) json.dump(dict(type='FeatureCollection', features=bad), sys.stdout, indent=True)
def attach_database(app, _db_master=None): # called manually during tests settings = config().get_map('ichnaea') if _db_master is None: # pragma: no cover db_master = Database(settings['db_master']) else: db_master = _db_master app.db_master = db_master
def main(global_config, _db_master=None, _db_slave=None, **settings): config = Configurator(settings=settings) config.include("cornice") settings = config.registry.settings # logging global logger logger.setLevel(logging.DEBUG) sh = logging.StreamHandler() formatter = logging.Formatter( '%(asctime)s - %(name)s - %(levelname)s - %(message)s') sh.setFormatter(formatter) logger.addHandler(sh) waitress_log = logging.getLogger('waitress') waitress_log.addHandler(sh) from ichnaea.content.views import configure_content from ichnaea.service import configure_service configure_content(config) configure_service(config) # configure databases incl. test override hooks if _db_master is None: config.registry.db_master = Database( settings['db_master'], socket=settings.get('db_master_socket'), ) else: config.registry.db_master = _db_master if _db_slave is None: config.registry.db_slave = Database( settings['db_slave'], socket=settings.get('db_slave_socket'), create=False, ) else: config.registry.db_slave = _db_slave config.add_tween('ichnaea.db.db_tween_factory', under=EXCVIEW) config.add_request_method(db_master_session, property=True) config.add_request_method(db_slave_session, property=True) # replace json renderer with decimal json variant config.add_renderer('json', decimaljson.Renderer()) return config.make_wsgi_app()
def main(global_config, _db_master=None, _db_slave=None, **settings): config = Configurator(settings=settings) # add support for pt templates config.include('pyramid_chameleon') settings = config.registry.settings from ichnaea.content.views import configure_content from ichnaea.service import configure_service from ichnaea.heka_logging import configure_heka configure_content(config) configure_service(config) # configure databases incl. test override hooks if _db_master is None: config.registry.db_master = Database( settings['db_master'], socket=settings.get('db_master_socket'), ) else: config.registry.db_master = _db_master if _db_slave is None: config.registry.db_slave = Database( settings['db_slave'], socket=settings.get('db_slave_socket'), create=False, ) else: config.registry.db_slave = _db_slave config.registry.geoip_db = configure_geoip(config.registry.settings) config.registry.heka_client = configure_heka(config.registry.settings) config.add_tween('ichnaea.db.db_tween_factory', under=EXCVIEW) config.add_tween('ichnaea.heka_logging.heka_tween_factory', under=EXCVIEW) config.add_request_method(db_master_session, property=True) config.add_request_method(db_slave_session, property=True) # replace json renderer with decimal json variant config.add_renderer('json', decimaljson.Renderer()) return config.make_wsgi_app()
def main(argv): parser = argparse.ArgumentParser(prog=argv[0], description="Location Importer") parser.add_argument("--dry-run", action="store_true") # TODO rely on ICHNAEA_CFG / ichnaea.config, as the worker is relying # on it anyways parser.add_argument("config", help="config file") parser.add_argument("source", help="source file") args = parser.parse_args(argv[1:]) settings = Config(args.config).get_map("ichnaea") db = Database(settings["db_master"], socket=settings.get("db_master_socket"), create=False) session = db.session() added = load_file(session, args.source) print("Added %s records." % added) if args.dry_run: session.rollback() else: # pragma: no cover session.commit() return added
def main(): settings = read_config().get_map('ichnaea') db = Database(settings['db_slave']) session = db.session() bad = [] offset = 0 count = 10000 results = True while results: results = False r = session.execute("select id, lat, lon, mcc, mnc, lac, cid, radio, " "total_measures from cell where " "lat is not null and lon is not null and " "mcc not in (1, 260) " "order by id limit %d offset %d" % (count, offset)) offset += count for row in r: results = True (id, lat, lon, mcc, mnc, lac, cid, radio, total_measures) = row ccs = [c.alpha2 for c in mobile_codes.mcc(str(mcc))] if not any([location_is_in_country(lat, lon, c, 1) for c in ccs]): if ccs: s = ",".join(ccs) else: continue bad.append( dict(type='Feature', properties=dict(mcc=mcc, mnc=mnc, lac=lac, cid=cid, radio=radio, total_measures=total_measures, countries=s), geometry=dict(type='Point', coordinates=[lon, lat]))) json.dump(dict(type='FeatureCollection', features=bad), sys.stdout, indent=True)
def main(argv, _db_master=None): parser = argparse.ArgumentParser( prog=argv[0], description='Location Importer') parser.add_argument('source', help="source file") args = parser.parse_args(argv[1:]) settings = config().get_map('ichnaea') # configure databases incl. test override hooks if _db_master is None: db = Database( settings['db_master'], socket=settings.get('db_master_socket'), create=False, ) else: db = _db_master session = db.session() added = load_file(session, args.source) print('Added %s records.' % added) session.commit() return added
def main(argv, _db_master=None): parser = argparse.ArgumentParser(prog=argv[0], description='Location Importer') parser.add_argument('source', help="source file") args = parser.parse_args(argv[1:]) settings = config().get_map('ichnaea') # configure databases incl. test override hooks if _db_master is None: db = Database( settings['db_master'], socket=settings.get('db_master_socket'), create=False, ) else: db = _db_master session = db.session() added = load_file(session, args.source) print('Added %s records.' % added) session.commit() return added
class DBFixture(object): def __init__(self): self.db_master = Database(SQLURI) master_conn = self.db_master.engine.connect() self.master_trans = master_conn.begin() self.db_master_session = self.db_master.session() def install_wifi_aps(self): self.db_master_session.execute(Wifi.__table__.delete()) ap_data = json.load(open(AP_FILE), object_hook=JSONLocationDictDecoder) session = self.db_master_session data = [] for i, ((lat, lon), ap_set) in enumerate(ap_data.items()): for ap in ap_set: wifi = Wifi(key=ap['key'].replace(":", ""), lat=lat * (10 ** 7), lon=lon * (10 ** 7)) data.append(wifi) session.add_all(data) session.flush() data = [] session.commit() def install_cell_towers(self): self.db_master_session.execute(Cell.__table__.delete()) tower_data = json.load(open(TOWER_FILE), object_hook=JSONLocationDictDecoder) session = self.db_master_session data = [] for i, ((lat, lon), cell_data_set) in enumerate(tower_data.items()): for cell_data in cell_data_set: data.append(Cell(lat=lat * (10 ** 7), lon=lon * (10 ** 7), radio=cell_data['radio'], cid=cell_data['cid'], mcc=cell_data['mcc'], mnc=cell_data['mnc'], lac=cell_data['lac'])) session.add_all(data) session.flush() data = [] session.commit()
def main(argv, _db_rw=None, _raven_client=None): parser = argparse.ArgumentParser(prog=argv[0], description='Initialize Ichnaea database') parser.add_argument('--alembic_ini', help='Path to the alembic migration config.') parser.add_argument('--location_ini', help='Path to the ichnaea app config.') parser.add_argument('--initdb', action='store_true', help='Initialize database') args = parser.parse_args(argv[1:]) if args.initdb: # Either use explicit config file location or fallback # on environment variable or finally file in current directory if not args.location_ini: location_ini = os.environ.get('ICHNAEA_CFG', 'ichnaea.ini') else: location_ini = args.location_ini location_ini = os.path.abspath(location_ini) location_cfg = read_config(filename=location_ini) # Either use explicit config file location or fallback # to a file in the same directory as the ichnaea.ini if not args.alembic_ini: alembic_ini = os.path.join(os.path.dirname(location_ini), 'alembic.ini') else: alembic_ini = args.alembic_ini alembic_ini = os.path.abspath(alembic_ini) alembic_cfg = Config(alembic_ini) alembic_section = alembic_cfg.get_section('alembic') if _db_rw is None: db_rw = Database(alembic_section['sqlalchemy.url']) else: db_rw = _db_rw configure_raven(location_cfg.get('ichnaea', 'sentry_dsn'), _client=_raven_client) engine = db_rw.engine create_schema(engine, alembic_cfg, location_cfg) else: parser.print_help()
class TestSearch(TestCase): """ Search tests should only be affected by mysql outages. All redis tests are in one test case """ def setUp(self): TestCase.setUp(self) self.redis = VaurienRedis() self.mysql = VaurienMySQL() uri = os.environ.get('SQLURI', 'mysql+pymysql://root:mysql@localhost/location') self.db = Database(uri) self.install_apikey() self.install_fixtures() def install_fixtures(self): session = self.db.session() PARIS_LAT_DEG = from_degrees(PARIS_LAT) PARIS_LON_DEG = from_degrees(PARIS_LON) qry = session.query(Cell) if qry.count() > 0: session.query(Cell).delete() lat = from_degrees(PARIS_LAT) lon = from_degrees(PARIS_LON) key = dict(mcc=FRANCE_MCC, mnc=2, lac=3) data = [ Cell(lat=lat, lon=lon, radio=2, cid=4, **key), Cell(lat=lat + 20000, lon=lon + 40000, radio=2, cid=5, **key), ] session.add_all(data) if session.query(Wifi).count() > 0: session.query(Wifi).delete() wifis = [ Wifi(key="A1", lat=PARIS_LAT_DEG, lon=PARIS_LON_DEG), Wifi(key="B2", lat=PARIS_LAT_DEG, lon=PARIS_LON_DEG), Wifi(key="C3", lat=PARIS_LAT_DEG, lon=PARIS_LON_DEG), Wifi(key="D4", lat=None, lon=None), ] session.add_all(wifis) session.commit() def install_apikey(self): session = self.db.session() if session.query(ApiKey).filter( ApiKey.valid_key == 'test').count() > 0: session.query(ApiKey).delete() session.add(ApiKey(valid_key='test', maxreq=0)) session.commit() def test_mysql_dummy(self): # this should pass, otherwise, vaurien has screwed up self.mysql.dummy() self.redis.dummy() status_code, content = do_search(use_ip=PARIS_IP) eq_(status_code, 200) expected = {u'status': u'ok', u'lat': PARIS_LAT, u'lon': PARIS_LON, u'accuracy': 100} actual = json.loads(content) self.assertAlmostEquals(actual['status'], expected['status']) self.assertAlmostEquals(actual['lat'], expected['lat']) self.assertAlmostEquals(actual['lon'], expected['lon']) self.assertAlmostEquals(actual['accuracy'], expected['accuracy']) def test_mysql_delay(self): # this should pass, otherwise, vaurien has screwed up self.mysql.delay() self.redis.dummy() start = time.time() status_code, content = do_search() end = time.time() assert (end-start) > 1.0 eq_(status_code, 200) eq_(json.loads(content), {"status": "ok", "lat": PARIS_LAT, "lon": PARIS_LON, "accuracy": 100}) def test_mysql_blackout(self): # This test has been renamed so that it runs last self.mysql.blackout() self.redis.dummy() # MySQL blackouts will cause API key checking to be disabled status_code, content = do_search(apikey='invalid_key', use_ip=FREMONT_IP) # MySQL blackouts will force only geo-ip to work eq_(status_code, 200) actual = json.loads(content) expected = {"status": "ok", "lat": FREMONT_LAT, "lon": FREMONT_LON, "accuracy": GEOIP_CITY_ACCURACY} # TODO: not sure why we need almost equal for geoip self.assertAlmostEquals(actual['status'], expected['status']) self.assertAlmostEquals(actual['lat'], expected['lat']) self.assertAlmostEquals(actual['lon'], expected['lon']) self.assertAlmostEquals(actual['accuracy'], expected['accuracy']) def test_redis_dummy(self): self.mysql.dummy() self.redis.dummy() status_code, content = do_search() eq_(status_code, 200) eq_(json.loads(content), {"status": "ok", "lat": PARIS_LAT, "lon": PARIS_LON, "accuracy": 100}) def test_redis_delay(self): self.mysql.dummy() self.redis.delay() start = time.time() status_code, content = do_search() delta = time.time() - start # The delay in redis should not affect search self.assertTrue(delta < 1.0) eq_(status_code, 200) eq_(json.loads(content), {"status": "ok", "lat": PARIS_LAT, "lon": PARIS_LON, "accuracy": 100}) def test_redis_blackout(self): self.mysql.dummy() self.redis.blackout() start = time.time() status_code, content = do_search() delta = time.time() - start # The redis blackout should not affect search at all self.assertTrue(delta < 1.0) eq_(status_code, 200) eq_(json.loads(content), {"status": "ok", "lat": PARIS_LAT, "lon": PARIS_LON, "accuracy": 100})
def _make_db(uri=SQLURI): return Database(uri)
class TestSearch(TestCase): """ Search tests should only be affected by mysql outages. All redis tests are in one test case """ def setUp(self): TestCase.setUp(self) self.redis = VaurienRedis() self.mysql = VaurienMySQL() uri = os.environ.get('SQLURI', 'mysql+pymysql://root:mysql@localhost/location') self.db = Database(uri) self.install_apikey() self.install_fixtures() def install_fixtures(self): session = self.db.session() PARIS_LAT_DEG = from_degrees(PARIS_LAT) PARIS_LON_DEG = from_degrees(PARIS_LON) qry = session.query(Cell) if qry.count() > 0: session.query(Cell).delete() lat = from_degrees(PARIS_LAT) lon = from_degrees(PARIS_LON) key = dict(mcc=FRANCE_MCC, mnc=2, lac=3) data = [ Cell(lat=lat, lon=lon, radio=2, cid=4, **key), Cell(lat=lat + 20000, lon=lon + 40000, radio=2, cid=5, **key), ] session.add_all(data) if session.query(Wifi).count() > 0: session.query(Wifi).delete() wifis = [ Wifi(key="A1", lat=PARIS_LAT_DEG, lon=PARIS_LON_DEG), Wifi(key="B2", lat=PARIS_LAT_DEG, lon=PARIS_LON_DEG), Wifi(key="C3", lat=PARIS_LAT_DEG, lon=PARIS_LON_DEG), Wifi(key="D4", lat=None, lon=None), ] session.add_all(wifis) session.commit() def install_apikey(self): session = self.db.session() if session.query(ApiKey).filter( ApiKey.valid_key == 'test').count() > 0: session.query(ApiKey).delete() session.add(ApiKey(valid_key='test', maxreq=0)) session.commit() def test_mysql_dummy(self): # this should pass, otherwise, vaurien has screwed up self.mysql.dummy() self.redis.dummy() status_code, content = do_search(use_ip=PARIS_IP) eq_(status_code, 200) expected = { u'status': u'ok', u'lat': PARIS_LAT, u'lon': PARIS_LON, u'accuracy': 100 } actual = json.loads(content) self.assertAlmostEquals(actual['status'], expected['status']) self.assertAlmostEquals(actual['lat'], expected['lat']) self.assertAlmostEquals(actual['lon'], expected['lon']) self.assertAlmostEquals(actual['accuracy'], expected['accuracy']) def test_mysql_delay(self): # this should pass, otherwise, vaurien has screwed up self.mysql.delay() self.redis.dummy() start = time.time() status_code, content = do_search() end = time.time() assert (end - start) > 1.0 eq_(status_code, 200) eq_(json.loads(content), { "status": "ok", "lat": PARIS_LAT, "lon": PARIS_LON, "accuracy": 100 }) def test_mysql_blackout(self): # This test has been renamed so that it runs last self.mysql.blackout() self.redis.dummy() # MySQL blackouts will cause API key checking to be disabled status_code, content = do_search(apikey='invalid_key', use_ip=FREMONT_IP) # MySQL blackouts will force only geo-ip to work eq_(status_code, 200) actual = json.loads(content) expected = { "status": "ok", "lat": FREMONT_LAT, "lon": FREMONT_LON, "accuracy": GEOIP_CITY_ACCURACY } # TODO: not sure why we need almost equal for geoip self.assertAlmostEquals(actual['status'], expected['status']) self.assertAlmostEquals(actual['lat'], expected['lat']) self.assertAlmostEquals(actual['lon'], expected['lon']) self.assertAlmostEquals(actual['accuracy'], expected['accuracy']) def test_redis_dummy(self): self.mysql.dummy() self.redis.dummy() status_code, content = do_search() eq_(status_code, 200) eq_(json.loads(content), { "status": "ok", "lat": PARIS_LAT, "lon": PARIS_LON, "accuracy": 100 }) def test_redis_delay(self): self.mysql.dummy() self.redis.delay() start = time.time() status_code, content = do_search() delta = time.time() - start # The delay in redis should not affect search self.assertTrue(delta < 1.0) eq_(status_code, 200) eq_(json.loads(content), { "status": "ok", "lat": PARIS_LAT, "lon": PARIS_LON, "accuracy": 100 }) def test_redis_blackout(self): self.mysql.dummy() self.redis.blackout() start = time.time() status_code, content = do_search() delta = time.time() - start # The redis blackout should not affect search at all self.assertTrue(delta < 1.0) eq_(status_code, 200) eq_(json.loads(content), { "status": "ok", "lat": PARIS_LAT, "lon": PARIS_LON, "accuracy": 100 })
def __init__(self): self.db_master = Database(SQLURI) master_conn = self.db_master.engine.connect() self.master_trans = master_conn.begin() self.db_master_session = self.db_master.session()
def main(argv, _db_master=None, _heka_client=None, _stats_client=None): # run for example via: # bin/location_map --create --upload --datamaps=/path/to/datamaps/ \ # --output=ichnaea/content/static/tiles/ parser = argparse.ArgumentParser( prog=argv[0], description='Generate and upload datamap tiles.') parser.add_argument('--create', action='store_true', help='Create tiles.') parser.add_argument('--upload', action='store_true', help='Upload tiles to S3.') parser.add_argument('--concurrency', default=2, help='How many concurrent render processes to use?') parser.add_argument('--datamaps', help='Directory of the datamaps tools.') parser.add_argument('--output', help='Optional directory for local tile output.') args = parser.parse_args(argv[1:]) if args.create: conf = read_config() if _db_master: db = _db_master else: # pragma: no cover db = Database(conf.get('ichnaea', 'db_master')) bucketname = conf.get('ichnaea', 's3_assets_bucket').strip('/') heka_client = configure_heka(conf.filename, _heka_client=_heka_client) stats_client = configure_stats( conf.get('ichnaea', 'statsd_host'), _client=_stats_client) upload = False if args.upload: # pragma: no cover upload = bool(args.upload) concurrency = 2 if args.concurrency: concurrency = int(args.concurrency) datamaps = '' if args.datamaps: datamaps = os.path.abspath(args.datamaps) output = None if args.output: output = os.path.abspath(args.output) try: with stats_client.timer("datamaps.total_time"): generate(db, bucketname, heka_client, stats_client, upload=upload, concurrency=concurrency, datamaps=datamaps, output=output) except Exception: # pragma: no cover heka_client.raven(RAVEN_ERROR) raise else: # pragma: no cover parser.print_help()
def _make_db(create=True): return Database(SQLURI, socket=SQLSOCKET, create=create)
def main(global_config, heka_config=None, init=False, _db_master=None, _db_slave=None, _heka_client=None, _redis=None, _stats_client=None, **settings): config = Configurator(settings=settings) # add support for pt templates config.include('pyramid_chameleon') settings = config.registry.settings from ichnaea.content.views import configure_content from ichnaea.logging import configure_heka from ichnaea.logging import configure_stats from ichnaea.service import configure_service configure_content(config) configure_service(config) # configure databases incl. test override hooks if _db_master is None: config.registry.db_master = Database(settings['db_master']) else: config.registry.db_master = _db_master if _db_slave is None: config.registry.db_slave = Database(settings['db_slave']) else: config.registry.db_slave = _db_slave if _redis is None: config.registry.redis_client = None if 'redis_url' in settings: config.registry.redis_client = redis_client(settings['redis_url']) else: config.registry.redis_client = _redis if _heka_client is None: # pragma: no cover config.registry.heka_client = heka_client = configure_heka(heka_config) else: config.registry.heka_client = heka_client = _heka_client config.registry.stats_client = configure_stats(settings.get('statsd_host'), _client=_stats_client) config.registry.geoip_db = configure_geoip(config.registry.settings, heka_client=heka_client) config.add_tween('ichnaea.db.db_tween_factory', under=EXCVIEW) config.add_tween('ichnaea.logging.log_tween_factory', under=EXCVIEW) config.add_request_method(db_master_session, property=True) config.add_request_method(db_slave_session, property=True) # replace json renderer with custom json variant config.add_renderer('json', customjson.Renderer()) # Should we try to initialize and establish the outbound connections? if init: # pragma: no cover registry = config.registry registry.db_slave.ping() registry.redis_client.ping() registry.stats_client.ping() return config.make_wsgi_app()
def main(global_config, heka_config=None, init=False, _db_master=None, _db_slave=None, _heka_client=None, _redis=None, _stats_client=None, **settings): config = Configurator(settings=settings) # add support for pt templates config.include('pyramid_chameleon') settings = config.registry.settings from ichnaea.content.views import configure_content from ichnaea.logging import configure_heka from ichnaea.logging import configure_stats from ichnaea.service import configure_service configure_content(config) configure_service(config) # configure databases incl. test override hooks if _db_master is None: config.registry.db_master = Database(settings['db_master']) else: config.registry.db_master = _db_master if _db_slave is None: config.registry.db_slave = Database(settings['db_slave']) else: config.registry.db_slave = _db_slave if _redis is None: config.registry.redis_client = None if 'redis_url' in settings: config.registry.redis_client = redis_client(settings['redis_url']) else: config.registry.redis_client = _redis if _heka_client is None: # pragma: no cover config.registry.heka_client = heka_client = configure_heka(heka_config) else: config.registry.heka_client = heka_client = _heka_client config.registry.stats_client = configure_stats(settings.get('statsd_host'), _client=_stats_client) config.registry.geoip_db = configure_geoip(config.registry.settings, heka_client=heka_client) config.add_tween('ichnaea.db.db_tween_factory', under=EXCVIEW) config.add_tween('ichnaea.logging.log_tween_factory', under=EXCVIEW) config.add_request_method(db_master_session, property=True) config.add_request_method(db_slave_session, property=True) # replace json renderer with custom json variant config.add_renderer('json', customjson.Renderer()) # Should we try to initialize and establish the outbound connections? if init: # pragma: no cover # Test the slave DB connection with db_worker_session(config.registry.db_slave) as session: try: session.execute(select([func.now()])).first() except OperationalError: # Let the instance start, so it can recover / reconnect # to the DB later, but provide degraded service in the # meantime. pass # Test the redis connection try: config.registry.redis_client.ping() except ConnectionError: # Same as for the DB, continue with degraded service. pass return config.make_wsgi_app()
def _make_db(uri=SQLURI, socket=SQLSOCKET, create=True): return Database(uri, socket=socket, create=create)