def setUpClass(cls): super(LogTestCase, cls).setUpClass() # Use a debug configuration cls.raven_client = configure_raven( None, transport='sync', _client=DebugRavenClient()) cls.stats_client = configure_stats( None, _client=DebugStatsClient(tag_support=True))
def main(argv, _raven_client=None, _stats_client=None): # run for example via: # bin/location_map --create --upload --datamaps=/path/to/datamaps/ \ # --output=ichnaea/content/static/tiles/ parser = argparse.ArgumentParser( prog=argv[0], description='Generate and upload datamap tiles.') parser.add_argument('--create', action='store_true', help='Create tiles?') parser.add_argument('--upload', action='store_true', help='Upload tiles to S3?') parser.add_argument('--concurrency', default=2, help='How many concurrent processes to use?') parser.add_argument('--datamaps', help='Directory of the datamaps tools.') parser.add_argument('--output', help='Optional directory for output files.') args = parser.parse_args(argv[1:]) if args.create: conf = read_config() db_url = conf.get('database', 'rw_url') raven_client = configure_raven( conf.get('sentry', 'dsn'), transport='sync', _client=_raven_client) stats_client = configure_stats(conf, _client=_stats_client) bucketname = conf.get('assets', 'bucket').strip('/') upload = False if args.upload: upload = bool(args.upload) concurrency = billiard.cpu_count() if args.concurrency: concurrency = int(args.concurrency) datamaps = '' if args.datamaps: datamaps = os.path.abspath(args.datamaps) output = None if args.output: output = os.path.abspath(args.output) try: with stats_client.timed('datamaps', tags=['func:main']): generate(db_url, bucketname, raven_client, stats_client, upload=upload, concurrency=concurrency, datamaps=datamaps, output=output) except Exception: # pragma: no cover raven_client.captureException() raise else: # pragma: no cover parser.print_help()
def main(argv, _db_rw=None, _raven_client=None): parser = argparse.ArgumentParser( prog=argv[0], description='Initialize Ichnaea database') parser.add_argument('--alembic_ini', help='Path to the alembic migration config.') parser.add_argument('--location_ini', help='Path to the ichnaea app config.') parser.add_argument('--initdb', action='store_true', help='Initialize database') args = parser.parse_args(argv[1:]) if args.initdb: # Either use explicit config file location or fallback # on environment variable or finally file in current directory if not args.location_ini: location_ini = os.environ.get('ICHNAEA_CFG', 'ichnaea.ini') else: location_ini = args.location_ini location_ini = os.path.abspath(location_ini) location_cfg = read_config(filename=location_ini) # Either use explicit config file location or fallback # to a file in the same directory as the ichnaea.ini if not args.alembic_ini: alembic_ini = os.path.join( os.path.dirname(location_ini), 'alembic.ini') else: alembic_ini = args.alembic_ini alembic_ini = os.path.abspath(alembic_ini) alembic_cfg = Config(alembic_ini) alembic_section = alembic_cfg.get_section('alembic') if _db_rw is None: db_rw = Database(alembic_section['sqlalchemy.url']) else: db_rw = _db_rw configure_raven( location_cfg.get('ichnaea', 'sentry_dsn'), transport='sync', _client=_raven_client) engine = db_rw.engine create_schema(engine, alembic_cfg, location_cfg) else: parser.print_help()
def main(argv, _db_rw=None, _raven_client=None): parser = argparse.ArgumentParser(prog=argv[0], description='Initialize Ichnaea database') parser.add_argument('--alembic_ini', help='Path to the alembic migration config.') parser.add_argument('--location_ini', help='Path to the ichnaea app config.') parser.add_argument('--initdb', action='store_true', help='Initialize database') args = parser.parse_args(argv[1:]) if args.initdb: # Either use explicit config file location or fallback # on environment variable or finally file in current directory if not args.location_ini: location_ini = os.environ.get('ICHNAEA_CFG', 'ichnaea.ini') else: location_ini = args.location_ini location_ini = os.path.abspath(location_ini) location_cfg = read_config(filename=location_ini) # Either use explicit config file location or fallback # to a file in the same directory as the ichnaea.ini if not args.alembic_ini: alembic_ini = os.path.join(os.path.dirname(location_ini), 'alembic.ini') else: alembic_ini = args.alembic_ini alembic_ini = os.path.abspath(alembic_ini) alembic_cfg = Config(alembic_ini) alembic_section = alembic_cfg.get_section('alembic') if _db_rw is None: db_rw = Database(alembic_section['sqlalchemy.url']) else: db_rw = _db_rw configure_raven(location_cfg.get('ichnaea', 'sentry_dsn'), _client=_raven_client) engine = db_rw.engine create_schema(engine, alembic_cfg, location_cfg) else: parser.print_help()
def setUpClass(cls): super(LogTestCase, cls).setUpClass() # Use a debug configuration cls.raven_client = configure_raven(None, transport='sync', _client=DebugRavenClient()) cls.stats_client = configure_stats( None, _client=DebugStatsClient(tag_support=True))
def main(argv, _raven_client=None, _bucketname=None): # run for example via: # bin/location_map --create --upload \ # --output=ichnaea/content/static/tiles/ parser = argparse.ArgumentParser( prog=argv[0], description="Generate and upload datamap tiles.") parser.add_argument("--create", action="store_true", help="Create tiles?") parser.add_argument("--upload", action="store_true", help="Upload tiles to S3?") parser.add_argument("--concurrency", default=2, help="How many concurrent processes to use?") parser.add_argument("--output", help="Optional directory for output files.") args = parser.parse_args(argv[1:]) if args.create: raven_client = configure_raven(transport="sync", tags={"app": "datamap"}, _client=_raven_client) configure_stats() bucketname = _bucketname if not _bucketname: bucketname = settings("asset_bucket") if bucketname: bucketname = bucketname.strip("/") upload = False if args.upload: upload = bool(args.upload) concurrency = billiard.cpu_count() if args.concurrency: concurrency = int(args.concurrency) output = None if args.output: output = os.path.abspath(args.output) try: with METRICS.timer("datamaps", tags=["func:main"]): generate( bucketname, raven_client, upload=upload, concurrency=concurrency, output=output, ) except Exception: raven_client.captureException() raise else: parser.print_help()
def init_worker( celery_app, app_config, _db_rw=None, _db_ro=None, _geoip_db=None, _raven_client=None, _redis_client=None, _stats_client=None, ): """ Configure the passed in celery app, usually stored in :data:`ichnaea.async.app.celery_app`. Does connection, settings and queue setup. Attaches some additional functionality to the :class:`celery.Celery` instance. This is executed inside each forked worker process. The parameters starting with an underscore are test-only hooks to provide pre-configured connection objects. :param _db_ro: Ignored, read-only database connection isn't used. """ # make config file settings available celery_app.settings = app_config.asdict() # configure outside connections celery_app.db_rw = configure_db(app_config.get("ichnaea", "db_master"), _db=_db_rw) celery_app.raven_client = raven_client = configure_raven( app_config.get("ichnaea", "sentry_dsn"), transport="threaded", _client=_raven_client ) celery_app.redis_client = redis_client = configure_redis( app_config.get("ichnaea", "redis_url"), _client=_redis_client ) celery_app.stats_client = configure_stats(app_config.get("ichnaea", "statsd_host"), _client=_stats_client) celery_app.geoip_db = configure_geoip( app_config.get("ichnaea", "geoip_db_path"), raven_client=raven_client, _client=_geoip_db ) # configure data / export queues celery_app.all_queues = all_queues = set([q.name for q in CELERY_QUEUES]) celery_app.data_queues = data_queues = configure_data(redis_client) for queue in data_queues.values(): if queue.monitor_name: all_queues.add(queue.monitor_name) celery_app.export_queues = configure_export(redis_client, app_config) for queue in celery_app.export_queues.values(): if queue.monitor_name: all_queues.add(queue.monitor_name)
def init_worker(celery_app, app_config, _db_rw=None, _db_ro=None, _geoip_db=None, _raven_client=None, _redis_client=None, _stats_client=None): """ Configure the passed in celery app, usually stored in :data:`ichnaea.async.app.celery_app`. Does connection, settings and queue setup. Attaches some additional functionality to the :class:`celery.Celery` instance. This is executed inside each forked worker process. The parameters starting with an underscore are test-only hooks to provide pre-configured connection objects. :param _db_ro: Ignored, read-only database connection isn't used. """ # make config file settings available celery_app.settings = app_config.asdict() # configure outside connections celery_app.db_rw = configure_db(app_config.get('database', 'rw_url'), _db=_db_rw) celery_app.raven_client = raven_client = configure_raven( app_config.get('sentry', 'dsn'), transport='threaded', _client=_raven_client) celery_app.redis_client = redis_client = configure_redis( app_config.get('cache', 'cache_url'), _client=_redis_client) celery_app.stats_client = configure_stats(app_config, _client=_stats_client) celery_app.geoip_db = configure_geoip(app_config.get('geoip', 'db_path'), raven_client=raven_client, _client=_geoip_db) # configure data / export queues celery_app.all_queues = all_queues = set([q.name for q in CELERY_QUEUES]) celery_app.data_queues = data_queues = configure_data(redis_client) for queue in data_queues.values(): if queue.monitor_name: all_queues.add(queue.monitor_name) celery_app.export_queues = configure_export(redis_client, app_config) for queue in celery_app.export_queues.values(): if queue.monitor_name: all_queues.add(queue.monitor_name)
def main(argv, _raven_client=None, _stats_client=None, _bucketname=None): # run for example via: # bin/location_map --create --upload \ # --output=ichnaea/content/static/tiles/ parser = argparse.ArgumentParser( prog=argv[0], description='Generate and upload datamap tiles.') parser.add_argument('--create', action='store_true', help='Create tiles?') parser.add_argument('--upload', action='store_true', help='Upload tiles to S3?') parser.add_argument('--concurrency', default=2, help='How many concurrent processes to use?') parser.add_argument('--output', help='Optional directory for output files.') args = parser.parse_args(argv[1:]) if args.create: raven_client = configure_raven(transport='sync', _client=_raven_client) stats_client = configure_stats(_client=_stats_client) bucketname = _bucketname if not _bucketname: # pragma: no cover bucketname = ASSET_BUCKET if bucketname: bucketname = bucketname.strip('/') upload = False if args.upload: upload = bool(args.upload) concurrency = billiard.cpu_count() if args.concurrency: concurrency = int(args.concurrency) output = None if args.output: output = os.path.abspath(args.output) try: with stats_client.timed('datamaps', tags=['func:main']): generate(bucketname, raven_client, stats_client, upload=upload, concurrency=concurrency, output=output) except Exception: # pragma: no cover raven_client.captureException() raise else: # pragma: no cover parser.print_help()
def init_worker(celery_app, app_config, _db_rw=None, _db_ro=None, _geoip_db=None, _raven_client=None, _redis_client=None, _stats_client=None): # currently neither a db_ro nor geoip_db are set up # make config file settings available celery_app.settings = app_config.asdict() # configure data / export queues celery_app.all_queues = all_queues = set([q.name for q in CELERY_QUEUES]) celery_app.data_queues = data_queues = { 'cell_area_update': 'update_cell_lac', } for value in data_queues.values(): all_queues.add(value) celery_app.export_queues = export_queues = {} for section_name in app_config.sections(): if section_name.startswith('export:'): section = app_config.get_map(section_name) name = section_name.split(':')[1] queue_name = EXPORT_QUEUE_PREFIX + name export_queues[name] = { 'redis_key': queue_name, } all_queues.add(queue_name) for key, value in section.items(): if key == 'batch': export_queues[name][key] = int(value) else: export_queues[name][key] = value # configure outside connections celery_app.db_rw = configure_db(app_config.get('ichnaea', 'db_master'), _db=_db_rw) celery_app.raven_client = configure_raven(app_config.get( 'ichnaea', 'sentry_dsn'), transport='threaded', _client=_raven_client) celery_app.redis_client = configure_redis(app_config.get( 'ichnaea', 'redis_url'), _client=_redis_client) celery_app.stats_client = configure_stats(app_config.get( 'ichnaea', 'statsd_host'), _client=_stats_client)
def main(argv, _raven_client=None, _stats_client=None, _bucketname=None): # run for example via: # bin/location_map --create --upload \ # --output=ichnaea/content/static/tiles/ parser = argparse.ArgumentParser( prog=argv[0], description='Generate and upload datamap tiles.') parser.add_argument('--create', action='store_true', help='Create tiles?') parser.add_argument('--upload', action='store_true', help='Upload tiles to S3?') parser.add_argument('--concurrency', default=2, help='How many concurrent processes to use?') parser.add_argument('--output', help='Optional directory for output files.') args = parser.parse_args(argv[1:]) if args.create: raven_client = configure_raven( transport='sync', _client=_raven_client) stats_client = configure_stats(_client=_stats_client) bucketname = _bucketname if not _bucketname: # pragma: no cover bucketname = ASSET_BUCKET if bucketname: bucketname = bucketname.strip('/') upload = False if args.upload: upload = bool(args.upload) concurrency = billiard.cpu_count() if args.concurrency: concurrency = int(args.concurrency) output = None if args.output: output = os.path.abspath(args.output) try: with stats_client.timed('datamaps', tags=['func:main']): generate(bucketname, raven_client, stats_client, upload=upload, concurrency=concurrency, output=output) except Exception: # pragma: no cover raven_client.captureException() raise else: # pragma: no cover parser.print_help()
def main(global_config, app_config, init=False, _db_rw=None, _db_ro=None, _geoip_db=None, _raven_client=None, _redis_client=None, _stats_client=None): configure_logging() # make config file settings available config = Configurator(settings=app_config.asdict()) # add support for pt templates config.include('pyramid_chameleon') configure_content(config) configure_service(config) # configure outside connections registry = config.registry registry.db_rw = configure_db( app_config.get('ichnaea', 'db_master'), _db=_db_rw) registry.db_ro = configure_db( app_config.get('ichnaea', 'db_slave'), _db=_db_ro) registry.raven_client = raven_client = configure_raven( app_config.get('ichnaea', 'sentry_dsn'), transport='gevent', _client=_raven_client) registry.redis_client = configure_redis( app_config.get('ichnaea', 'redis_url'), _client=_redis_client) registry.stats_client = configure_stats( app_config.get('ichnaea', 'statsd_host'), _client=_stats_client) registry.geoip_db = configure_geoip( app_config.get('ichnaea', 'geoip_db_path'), raven_client=raven_client, _client=_geoip_db) config.add_tween('ichnaea.db.db_tween_factory', under=EXCVIEW) config.add_tween('ichnaea.log.log_tween_factory', under=EXCVIEW) config.add_request_method(db_rw_session, property=True) config.add_request_method(db_ro_session, property=True) # replace json renderer with custom json variant config.add_renderer('json', customjson.Renderer()) # Should we try to initialize and establish the outbound connections? if init: # pragma: no cover registry.db_ro.ping() registry.redis_client.ping() registry.stats_client.ping() return config.make_wsgi_app()
def cmd_clitest(ctx): """Run Sentry test through cli.""" sentry_dsn = settings("sentry_dsn") if not sentry_dsn: click.echo( click.style( "SENTRY_DSN is not configured so this will use DebugRavenClient.", fg="green", )) msg = "Testing Sentry configuration via cli (%s)" % str( datetime.datetime.now()) click.echo(click.style("Using message: %s" % msg, fg="green")) click.echo(click.style("Building Raven client...", fg="green")) client = configure_raven(transport="sync", tags={"app": "sentry_test"}) click.echo(click.style("Sending message...", fg="green")) client.captureMessage(msg)
def init_worker(celery_app, app_config, _db_rw=None, _db_ro=None, _geoip_db=None, _raven_client=None, _redis_client=None, _stats_client=None): # currently neither a db_ro nor geoip_db are set up # make config file settings available celery_app.settings = app_config.asdict() # configure data / export queues celery_app.all_queues = all_queues = set([q.name for q in CELERY_QUEUES]) celery_app.data_queues = data_queues = { 'cell_area_update': 'update_cell_lac', } for value in data_queues.values(): all_queues.add(value) celery_app.export_queues = export_queues = {} for section_name in app_config.sections(): if section_name.startswith('export:'): section = app_config.get_map(section_name) name = section_name.split(':')[1] queue_name = EXPORT_QUEUE_PREFIX + name export_queues[name] = { 'redis_key': queue_name, } all_queues.add(queue_name) for key, value in section.items(): if key == 'batch': export_queues[name][key] = int(value) else: export_queues[name][key] = value # configure outside connections celery_app.db_rw = configure_db( app_config.get('ichnaea', 'db_master'), _db=_db_rw) celery_app.raven_client = configure_raven( app_config.get('ichnaea', 'sentry_dsn'), _client=_raven_client) celery_app.redis_client = configure_redis( app_config.get('ichnaea', 'redis_url'), _client=_redis_client) celery_app.stats_client = configure_stats( app_config.get('ichnaea', 'statsd_host'), _client=_stats_client)
def init_worker(celery_app, _db_rw=None, _db_ro=None, _geoip_db=None, _raven_client=None, _redis_client=None, _stats_client=None): """ Configure the passed in celery app, usually stored in :data:`ichnaea.async.app.celery_app`. Does connection, settings and queue setup. Attaches some additional functionality to the :class:`celery.Celery` instance. This is executed inside each forked worker process. The parameters starting with an underscore are test-only hooks to provide pre-configured connection objects. :param _db_ro: Ignored, read-only database connection isn't used. """ # configure outside connections celery_app.db_rw = configure_db( celery_app.app_config.get('database', 'rw_url'), _db=_db_rw) celery_app.raven_client = raven_client = configure_raven( celery_app.app_config.get('sentry', 'dsn'), transport='threaded', _client=_raven_client) celery_app.redis_client = redis_client = configure_redis( celery_app.app_config.get('cache', 'cache_url'), _client=_redis_client) celery_app.stats_client = configure_stats( celery_app.app_config, _client=_stats_client) celery_app.geoip_db = configure_geoip( celery_app.app_config.get('geoip', 'db_path'), raven_client=raven_client, _client=_geoip_db) # configure data queues celery_app.all_queues = all_queues = set([q.name for q in CELERY_QUEUES]) celery_app.data_queues = data_queues = configure_data(redis_client) all_queues = all_queues.union( set([queue.key for queue in data_queues.values() if queue.key]))
def init_worker(celery_app, _db=None, _geoip_db=None, _raven_client=None, _redis_client=None, _stats_client=None): """ Configure the passed in celery app, usually stored in :data:`ichnaea.async.app.celery_app`. Does connection, settings and queue setup. Attaches some additional functionality to the :class:`celery.Celery` instance. This is executed inside each forked worker process. The parameters starting with an underscore are test-only hooks to provide pre-configured connection objects. """ # configure outside connections celery_app.db = configure_db('rw', _db=_db) celery_app.raven_client = raven_client = configure_raven( transport='threaded', _client=_raven_client) celery_app.redis_client = redis_client = configure_redis( _client=_redis_client) celery_app.stats_client = configure_stats(_client=_stats_client) celery_app.geoip_db = configure_geoip(raven_client=raven_client, _client=_geoip_db) # configure data queues celery_app.all_queues = all_queues = set([q.name for q in TASK_QUEUES]) celery_app.data_queues = data_queues = configure_data(redis_client) all_queues = all_queues.union( set([queue.key for queue in data_queues.values() if queue.key]))
def init_worker(celery_app, app_config, _db_rw=None, _db_ro=None, _geoip_db=None, _raven_client=None, _redis_client=None, _stats_client=None): # currently db_ro is not set up # make config file settings available celery_app.settings = app_config.asdict() # configure outside connections celery_app.db_rw = configure_db( app_config.get('ichnaea', 'db_master'), _db=_db_rw) celery_app.raven_client = raven_client = configure_raven( app_config.get('ichnaea', 'sentry_dsn'), transport='threaded', _client=_raven_client) celery_app.redis_client = redis_client = configure_redis( app_config.get('ichnaea', 'redis_url'), _client=_redis_client) celery_app.stats_client = configure_stats( app_config.get('ichnaea', 'statsd_host'), _client=_stats_client) celery_app.geoip_db = configure_geoip( app_config.get('ichnaea', 'geoip_db_path'), raven_client=raven_client, _client=_geoip_db) # configure data / export queues celery_app.all_queues = all_queues = set([q.name for q in CELERY_QUEUES]) celery_app.data_queues = data_queues = configure_data(redis_client) for queue in data_queues.values(): if queue.monitor_name: all_queues.add(queue.monitor_name) celery_app.export_queues = configure_export(redis_client, app_config) for queue in celery_app.export_queues.values(): if queue.monitor_name: all_queues.add(queue.monitor_name)
def setUpClass(cls): super(LogTestCase, cls).setUpClass() # Use a debug configuration cls.raven_client = configure_raven('', _client=DebugRavenClient()) cls.stats_client = configure_stats('', _client=DebugStatsClient())
def setup_logging(cls): # Use a debug configuration cls.raven_client = configure_raven('', _client=DebugRavenClient()) cls.stats_client = configure_stats('', _client=DebugStatsClient())
def main(argv, _db_rw=None, _raven_client=None, _stats_client=None): # run for example via: # bin/location_map --create --upload --datamaps=/path/to/datamaps/ \ # --output=ichnaea/content/static/tiles/ parser = argparse.ArgumentParser( prog=argv[0], description='Generate and upload datamap tiles.') parser.add_argument('--create', action='store_true', help='Create tiles.') parser.add_argument('--upload', action='store_true', help='Upload tiles to S3.') parser.add_argument('--concurrency', default=2, help='How many concurrent render processes to use?') parser.add_argument('--datamaps', help='Directory of the datamaps tools.') parser.add_argument('--output', help='Optional directory for local tile output.') args = parser.parse_args(argv[1:]) if args.create: conf = read_config() if _db_rw: db = _db_rw else: # pragma: no cover db = Database(conf.get('ichnaea', 'db_master')) bucketname = conf.get('ichnaea', 's3_assets_bucket').strip('/') raven_client = configure_raven( conf.get('ichnaea', 'sentry_dsn'), transport='sync', _client=_raven_client) stats_client = configure_stats( conf.get('ichnaea', 'statsd_host'), _client=_stats_client) upload = False if args.upload: # pragma: no cover upload = bool(args.upload) concurrency = 2 if args.concurrency: concurrency = int(args.concurrency) datamaps = '' if args.datamaps: datamaps = os.path.abspath(args.datamaps) output = None if args.output: output = os.path.abspath(args.output) try: with stats_client.timer('datamaps.total_time'): generate(db, bucketname, raven_client, stats_client, upload=upload, concurrency=concurrency, datamaps=datamaps, output=output) except Exception: # pragma: no cover raven_client.captureException() raise else: # pragma: no cover parser.print_help()
def raven_client(): raven_client = configure_raven(transport="sync") yield raven_client
def raven_client(): raven_client = configure_raven(transport='sync') yield raven_client
def main(global_config, app_config, init=False, _db_rw=None, _db_ro=None, _geoip_db=None, _raven_client=None, _redis_client=None, _stats_client=None): configure_logging() # make config file settings available config = Configurator(settings=app_config.asdict()) # add support for pt templates config.include('pyramid_chameleon') configure_content(config) configure_service(config) # configure outside connections registry = config.registry registry.db_rw = configure_db(app_config.get('ichnaea', 'db_master'), _db=_db_rw) registry.db_ro = configure_db(app_config.get('ichnaea', 'db_slave'), _db=_db_ro) registry.raven_client = raven_client = configure_raven( app_config.get('ichnaea', 'sentry_dsn'), transport='gevent', _client=_raven_client) registry.redis_client = configure_redis(app_config.get( 'ichnaea', 'redis_url'), _client=_redis_client) registry.stats_client = configure_stats(app_config.get( 'ichnaea', 'statsd_host'), _client=_stats_client) registry.geoip_db = configure_geoip(app_config.get('ichnaea', 'geoip_db_path'), raven_client=raven_client, _client=_geoip_db) config.add_tween('ichnaea.db.db_tween_factory', under=EXCVIEW) config.add_tween('ichnaea.log.log_tween_factory', under=EXCVIEW) config.add_request_method(db_rw_session, property=True) config.add_request_method(db_ro_session, property=True) # replace json renderer with custom json variant config.add_renderer('json', customjson.Renderer()) # Should we try to initialize and establish the outbound connections? if init: # pragma: no cover registry.db_ro.ping() registry.redis_client.ping() registry.stats_client.ping() return config.make_wsgi_app()
def main( ping_connections=False, _db=None, _geoip_db=None, _http_session=None, _raven_client=None, _redis_client=None, _position_searcher=None, _region_searcher=None, ): """ Configure the web app stored in :data:`ichnaea.webapp.app._APP`. Does connection, logging and view config setup. Attaches some additional functionality to the :class:`pyramid.registry.Registry` instance. At startup ping all outbound connections like the database once, to ensure they are actually up and responding. The parameters starting with an underscore are test-only hooks to provide pre-configured connection objects. :param ping_connections: If True, ping and test outside connections. :type ping_connections: bool :returns: A configured WSGI app, the result of calling :meth:`pyramid.config.Configurator.make_wsgi_app`. """ configure_logging() config = Configurator() check_config() # add support for pt templates config.include("pyramid_chameleon") # add a config setting to skip logging for some views config.registry.skip_logging = set() configure_api(config) configure_content(config) configure_monitor(config) # configure outside connections registry = config.registry registry.db = configure_db("ro", _db=_db) registry.raven_client = raven_client = configure_raven( transport="gevent", tags={"app": "webapp"}, _client=_raven_client ) registry.redis_client = redis_client = configure_redis(_client=_redis_client) configure_stats() registry.http_session = configure_http_session(_session=_http_session) registry.geoip_db = geoip_db = configure_geoip( raven_client=raven_client, _client=_geoip_db ) # Needs to be the exact same as the *_incoming entries in taskapp.config. registry.data_queues = data_queues = { "update_incoming": DataQueue( "update_incoming", redis_client, "report", batch=100, compress=True ) } for name, func, default in ( ("position_searcher", configure_position_searcher, _position_searcher), ("region_searcher", configure_region_searcher, _region_searcher), ): searcher = func( geoip_db=geoip_db, raven_client=raven_client, redis_client=redis_client, data_queues=data_queues, _searcher=default, ) setattr(registry, name, searcher) config.add_tween("ichnaea.db.db_tween_factory", under=EXCVIEW) config.add_tween("ichnaea.log.log_tween_factory", under=EXCVIEW) config.add_request_method(db_session, property=True) # freeze skip logging set config.registry.skip_logging = frozenset(config.registry.skip_logging) # Should we try to initialize and establish the outbound connections? if ping_connections: with db_worker_session(registry.db, commit=False) as session: ping_session(session) registry.redis_client.ping() return config.make_wsgi_app()
def main(app_config, ping_connections=False, _db_rw=None, _db_ro=None, _geoip_db=None, _http_session=None, _raven_client=None, _redis_client=None, _stats_client=None, _country_searcher=None, _position_searcher=None): """ Configure the web app stored in :data:`ichnaea.webapp.app._APP`. Does connection, logging and view config setup. Attaches some additional functionality to the :class:`pyramid.registry.Registry` instance. At startup ping all outbound connections like the database once, to ensure they are actually up and responding. The parameters starting with an underscore are test-only hooks to provide pre-configured connection objects. :param app_config: The parsed application ini. :type app_config: :class:`ichnaea.config.Config` :param ping_connections: If True, ping and test outside connections. :type ping_connections: bool :returns: A configured WSGI app, the result of calling :meth:`pyramid.config.Configurator.make_wsgi_app`. """ configure_logging() # make config file settings available config = Configurator(settings=app_config.asdict()) # add support for pt templates config.include('pyramid_chameleon') # add a config setting to skip logging for some views config.registry.skip_logging = set() configure_api(config) configure_content(config) configure_monitor(config) # configure outside connections registry = config.registry registry.db_rw = configure_db( app_config.get('database', 'rw_url'), _db=_db_rw) registry.db_ro = configure_db( app_config.get('database', 'ro_url'), _db=_db_ro) registry.raven_client = raven_client = configure_raven( app_config.get('sentry', 'dsn'), transport='gevent', _client=_raven_client) registry.redis_client = redis_client = configure_redis( app_config.get('cache', 'cache_url'), _client=_redis_client) registry.stats_client = stats_client = configure_stats( app_config, _client=_stats_client) registry.http_session = configure_http_session(_session=_http_session) registry.geoip_db = geoip_db = configure_geoip( app_config.get('geoip', 'db_path'), raven_client=raven_client, _client=_geoip_db) for name, func, default in (('country_searcher', configure_country_searcher, _country_searcher), ('position_searcher', configure_position_searcher, _position_searcher)): searcher = func(app_config, geoip_db=geoip_db, raven_client=raven_client, redis_client=redis_client, stats_client=stats_client, _searcher=default) setattr(registry, name, searcher) config.add_tween('ichnaea.db.db_tween_factory', under=EXCVIEW) config.add_tween('ichnaea.log.log_tween_factory', under=EXCVIEW) config.add_request_method(db_rw_session, property=True) config.add_request_method(db_ro_session, property=True) # Add special JSON renderer with nicer float representation config.add_renderer('floatjson', floatjson.FloatJSONRenderer()) # freeze skip logging set config.registry.skip_logging = frozenset(config.registry.skip_logging) # Should we try to initialize and establish the outbound connections? if ping_connections: # pragma: no cover registry.db_ro.ping() registry.redis_client.ping() return config.make_wsgi_app()
def main(app_config, ping_connections=False, _db_rw=None, _db_ro=None, _geoip_db=None, _raven_client=None, _redis_client=None, _stats_client=None): """ Configure the web app stored in :data:`ichnaea.webapp.app._APP`. Does connection, logging and view config setup. Attaches some additional functionality to the :class:`pyramid.registry.Registry` instance. At startup ping all outbound connections like the database once, to ensure they are actually up and responding. The parameters starting with an underscore are test-only hooks to provide pre-configured connection objects. :param app_config: The parsed application ini. :type app_config: :class:`ichnaea.config.Config` :param ping_connections: If True, ping and test outside connections. :type ping_connections: bool :returns: A configured WSGI app, the result of calling :meth:`pyramid.config.Configurator.make_wsgi_app`. """ configure_logging() # make config file settings available config = Configurator(settings=app_config.asdict()) # add support for pt templates config.include('pyramid_chameleon') configure_api(config) configure_content(config) configure_monitor(config) # configure outside connections registry = config.registry registry.db_rw = configure_db( app_config.get('ichnaea', 'db_master'), _db=_db_rw) registry.db_ro = configure_db( app_config.get('ichnaea', 'db_slave'), _db=_db_ro) registry.raven_client = raven_client = configure_raven( app_config.get('ichnaea', 'sentry_dsn'), transport='gevent', _client=_raven_client) registry.redis_client = configure_redis( app_config.get('ichnaea', 'redis_url'), _client=_redis_client) registry.stats_client = configure_stats( app_config.get('ichnaea', 'statsd_host'), _client=_stats_client) registry.geoip_db = configure_geoip( app_config.get('ichnaea', 'geoip_db_path'), raven_client=raven_client, _client=_geoip_db) config.add_tween('ichnaea.db.db_tween_factory', under=EXCVIEW) config.add_tween('ichnaea.log.log_tween_factory', under=EXCVIEW) config.add_request_method(db_rw_session, property=True) config.add_request_method(db_ro_session, property=True) # replace json renderer with custom json variant config.add_renderer('json', customjson.Renderer()) # Should we try to initialize and establish the outbound connections? if ping_connections: # pragma: no cover registry.db_ro.ping() registry.redis_client.ping() registry.stats_client.ping() return config.make_wsgi_app()
def main(app_config, ping_connections=False, _db_rw=None, _db_ro=None, _geoip_db=None, _http_session=None, _raven_client=None, _redis_client=None, _stats_client=None, _position_searcher=None, _region_searcher=None): """ Configure the web app stored in :data:`ichnaea.webapp.app._APP`. Does connection, logging and view config setup. Attaches some additional functionality to the :class:`pyramid.registry.Registry` instance. At startup ping all outbound connections like the database once, to ensure they are actually up and responding. The parameters starting with an underscore are test-only hooks to provide pre-configured connection objects. :param app_config: The parsed application ini. :type app_config: :class:`ichnaea.config.Config` :param ping_connections: If True, ping and test outside connections. :type ping_connections: bool :returns: A configured WSGI app, the result of calling :meth:`pyramid.config.Configurator.make_wsgi_app`. """ configure_logging() # make config file settings available config = Configurator(settings=app_config.asdict()) # add support for pt templates config.include('pyramid_chameleon') # add a config setting to skip logging for some views config.registry.skip_logging = set() configure_api(config) configure_content(config) configure_monitor(config) # configure outside connections registry = config.registry registry.db_rw = configure_db( app_config.get('database', 'rw_url'), _db=_db_rw) registry.db_ro = configure_db( app_config.get('database', 'ro_url'), _db=_db_ro) registry.raven_client = raven_client = configure_raven( app_config.get('sentry', 'dsn'), transport='gevent', _client=_raven_client) registry.redis_client = redis_client = configure_redis( app_config.get('cache', 'cache_url'), _client=_redis_client) registry.stats_client = stats_client = configure_stats( app_config, _client=_stats_client) registry.http_session = configure_http_session(_session=_http_session) registry.geoip_db = geoip_db = configure_geoip( app_config.get('geoip', 'db_path'), raven_client=raven_client, _client=_geoip_db) for name, func, default in (('position_searcher', configure_position_searcher, _position_searcher), ('region_searcher', configure_region_searcher, _region_searcher)): searcher = func(app_config, geoip_db=geoip_db, raven_client=raven_client, redis_client=redis_client, stats_client=stats_client, _searcher=default) setattr(registry, name, searcher) config.add_tween('ichnaea.db.db_tween_factory', under=EXCVIEW) config.add_tween('ichnaea.log.log_tween_factory', under=EXCVIEW) config.add_request_method(db_ro_session, property=True) # Add special JSON renderer with nicer float representation config.add_renderer('floatjson', floatjson.FloatJSONRenderer()) # Add text-as-JS renderer. config.add_renderer('js', renderers.JSRenderer()) # freeze skip logging set config.registry.skip_logging = frozenset(config.registry.skip_logging) # Should we try to initialize and establish the outbound connections? if ping_connections: # pragma: no cover registry.db_ro.ping() registry.redis_client.ping() return config.make_wsgi_app()
def main(ping_connections=False, _db=None, _geoip_db=None, _http_session=None, _raven_client=None, _redis_client=None, _stats_client=None, _position_searcher=None, _region_searcher=None): """ Configure the web app stored in :data:`ichnaea.webapp.app._APP`. Does connection, logging and view config setup. Attaches some additional functionality to the :class:`pyramid.registry.Registry` instance. At startup ping all outbound connections like the database once, to ensure they are actually up and responding. The parameters starting with an underscore are test-only hooks to provide pre-configured connection objects. :param ping_connections: If True, ping and test outside connections. :type ping_connections: bool :returns: A configured WSGI app, the result of calling :meth:`pyramid.config.Configurator.make_wsgi_app`. """ configure_logging() config = Configurator() # add support for pt templates config.include('pyramid_chameleon') # add a config setting to skip logging for some views config.registry.skip_logging = set() configure_api(config) configure_content(config) configure_monitor(config) # configure outside connections registry = config.registry registry.db = configure_db('ro', _db=_db) registry.raven_client = raven_client = configure_raven( transport='gevent', _client=_raven_client) registry.redis_client = redis_client = configure_redis( _client=_redis_client) registry.stats_client = stats_client = configure_stats( _client=_stats_client) registry.http_session = configure_http_session(_session=_http_session) registry.geoip_db = geoip_db = configure_geoip( raven_client=raven_client, _client=_geoip_db) # Needs to be the exact same as the *_incoming entries in async.config. registry.data_queues = data_queues = { 'update_incoming': DataQueue('update_incoming', redis_client, batch=100, compress=True), } for name, func, default in (('position_searcher', configure_position_searcher, _position_searcher), ('region_searcher', configure_region_searcher, _region_searcher)): searcher = func(geoip_db=geoip_db, raven_client=raven_client, redis_client=redis_client, stats_client=stats_client, data_queues=data_queues, _searcher=default) setattr(registry, name, searcher) config.add_tween('ichnaea.db.db_tween_factory', under=EXCVIEW) config.add_tween('ichnaea.log.log_tween_factory', under=EXCVIEW) config.add_request_method(db_session, property=True) # freeze skip logging set config.registry.skip_logging = frozenset(config.registry.skip_logging) # Should we try to initialize and establish the outbound connections? if ping_connections: # pragma: no cover registry.db.ping() registry.redis_client.ping() return config.make_wsgi_app()
def main(_argv=None, _raven_client=None, _bucket_name=None): """ Command-line entry point. :param _argv: Simulated sys.argv[1:] arguments for testing :param _raven_client: override Raven client for testing :param _bucket_name: override S3 bucket name for testing :return: A system exit code :rtype: int """ # Parse the command line parser = get_parser() args = parser.parse_args(_argv) create = args.create upload = args.upload concurrency = args.concurrency verbose = args.verbose # Setup basic services if verbose: configure_logging(local_dev_env=True, logging_level="DEBUG") else: configure_logging() raven_client = configure_raven( transport="sync", tags={"app": "datamap"}, _client=_raven_client ) # Check consistent output_dir, create, upload exit_early = 0 output_dir = None if args.output: output_dir = os.path.abspath(args.output) tiles_dir = os.path.join(output_dir, "tiles") if not create and not os.path.isdir(tiles_dir): LOG.error( "The tiles subfolder of the --output directory should already" " exist when calling --upload without --create, to avoid" " deleting files from the S3 bucket.", tiles_dir=tiles_dir, ) exit_early = 1 else: if create and not upload: LOG.error( "The --output argument is required with --create but without" " --upload, since the temporary folder is removed at exit." ) exit_early = 1 if upload and not create: LOG.error( "The --output argument is required with --upload but without" " --create, to avoid deleting all tiles in the S3 bucket." ) exit_early = 1 # Exit early with help message if error or nothing to do if exit_early or not (create or upload): parser.print_help() return exit_early # Determine the S3 bucket name bucket_name = _bucket_name if not _bucket_name: bucket_name = settings("asset_bucket") if bucket_name: bucket_name = bucket_name.strip("/") # Check that the implied credentials are authorized to use the bucket if upload: if not bucket_name: LOG.error("Unable to determine upload bucket_name.") return 1 else: works, fail_msg = check_bucket(bucket_name) if not works: LOG.error( f"Bucket {bucket_name} can not be used for uploads: {fail_msg}" ) return 1 # Generate and upload the tiles success = True interrupted = False result = {} try: with Timer() as timer: if output_dir: result = generate( output_dir, bucket_name, raven_client, create=create, upload=upload, concurrency=concurrency, ) else: with util.selfdestruct_tempdir() as temp_dir: result = generate( temp_dir, bucket_name, raven_client, create=create, upload=upload, concurrency=concurrency, ) except KeyboardInterrupt: interrupted = True success = False except Exception: raven_client.captureException() success = False raise finally: if create and upload: task = "generation and upload" elif create: task = "generation" else: task = "upload" if interrupted: complete = "interrupted" elif success: complete = "complete" else: complete = "failed" final_log = structlog.get_logger("canonical-log-line") final_log.info( f"Datamap tile {task} {complete} in {timer.duration_s:0.1f} seconds.", success=success, duration_s=timer.duration_s, script_name="ichnaea.scripts.datamap", create=create, upload=upload, concurrency=concurrency, bucket_name=bucket_name, **result, ) return 0
def raven_client(): raven_client = configure_raven( None, transport='sync', _client=DebugRavenClient()) yield raven_client
def main(argv, _db_rw=None, _raven_client=None, _stats_client=None): # run for example via: # bin/location_map --create --upload --datamaps=/path/to/datamaps/ \ # --output=ichnaea/content/static/tiles/ parser = argparse.ArgumentParser( prog=argv[0], description='Generate and upload datamap tiles.') parser.add_argument('--create', action='store_true', help='Create tiles.') parser.add_argument('--upload', action='store_true', help='Upload tiles to S3.') parser.add_argument('--concurrency', default=2, help='How many concurrent render processes to use?') parser.add_argument('--datamaps', help='Directory of the datamaps tools.') parser.add_argument('--output', help='Optional directory for local tile output.') args = parser.parse_args(argv[1:]) if args.create: conf = read_config() if _db_rw: db = _db_rw else: # pragma: no cover db = Database(conf.get('ichnaea', 'db_master')) bucketname = conf.get('ichnaea', 's3_assets_bucket').strip('/') raven_client = configure_raven(conf.get('ichnaea', 'sentry_dsn'), _client=_raven_client) stats_client = configure_stats(conf.get('ichnaea', 'statsd_host'), _client=_stats_client) upload = False if args.upload: # pragma: no cover upload = bool(args.upload) concurrency = 2 if args.concurrency: concurrency = int(args.concurrency) datamaps = '' if args.datamaps: datamaps = os.path.abspath(args.datamaps) output = None if args.output: output = os.path.abspath(args.output) try: with stats_client.timer("datamaps.total_time"): generate(db, bucketname, raven_client, stats_client, upload=upload, concurrency=concurrency, datamaps=datamaps, output=output) except Exception: # pragma: no cover raven_client.captureException() raise else: # pragma: no cover parser.print_help()