Esempio n. 1
0
def main(argv, _db_master=None):
    parser = argparse.ArgumentParser(
        prog=argv[0], description='Location Importer')

    parser.add_argument('source', help="The source file.")
    parser.add_argument('--userid', default=None,
                        help='Internal userid for attribution.')

    args = parser.parse_args(argv[1:])
    userid = None
    if args.userid is not None:
        userid = int(args.userid)

    settings = config().get_map('ichnaea')

    # configure databases incl. test override hooks
    if _db_master is None:
        db = Database(
            settings['db_master'],
            socket=settings.get('db_master_socket'),
            create=False,
        )
    else:
        db = _db_master
    session = db.session()
    added = load_file(session, args.source, userid=userid)
    print('Added a total of %s records.' % added)
    session.commit()
    return added
Esempio n. 2
0
def main(argv, _db_master=None):
    parser = argparse.ArgumentParser(prog=argv[0],
                                     description='Initialize Ichnaea database')

    parser.add_argument('--initdb',
                        action='store_true',
                        help='Initialize database')

    args = parser.parse_args(argv[1:])

    if args.initdb:
        from ichnaea import config
        conf = config()
        db_master = Database(conf.get('ichnaea', 'db_master'))
        engine = db_master.engine
        with engine.connect() as conn:
            trans = conn.begin()
            _Model.metadata.create_all(engine)
            trans.commit()

            # Now stamp the latest alembic version
            from alembic.config import Config
            from alembic import command
            import os
            ini = os.environ.get('ICHNAEA_CFG', 'ichnaea.ini')
            alembic_ini = os.path.join(os.path.split(ini)[0], 'alembic.ini')
            alembic_cfg = Config(alembic_ini)
            command.stamp(alembic_cfg, "head")
            command.current(alembic_cfg)
    else:
        parser.print_help()
Esempio n. 3
0
def main(argv, _db_master=None):
    parser = argparse.ArgumentParser(
        prog=argv[0], description='Initialize Ichnaea database')

    parser.add_argument('--initdb', action='store_true',
                        help='Initialize database')

    args = parser.parse_args(argv[1:])

    if args.initdb:
        from ichnaea import config
        # make sure content models are imported
        from ichnaea.content import models  # NOQA

        conf = config()
        db_master = Database(conf.get('ichnaea', 'db_master'))
        engine = db_master.engine
        with engine.connect() as conn:
            trans = conn.begin()
            _Model.metadata.create_all(engine)
            trans.commit()

            # Now stamp the latest alembic version
            from alembic.config import Config
            from alembic import command
            import os
            ini = os.environ.get('ICHNAEA_CFG', 'ichnaea.ini')
            alembic_ini = os.path.join(os.path.split(ini)[0], 'alembic.ini')
            alembic_cfg = Config(alembic_ini)
            command.stamp(alembic_cfg, "head")
            command.current(alembic_cfg)
    else:
        parser.print_help()
Esempio n. 4
0
def main(argv, _db_master=None):
    parser = argparse.ArgumentParser(prog=argv[0],
                                     description='Location Importer')

    parser.add_argument('source', help="The source file.")
    parser.add_argument('--userid',
                        default=None,
                        help='Internal userid for attribution.')

    args = parser.parse_args(argv[1:])
    userid = None
    if args.userid is not None:
        userid = int(args.userid)

    settings = config().get_map('ichnaea')

    # configure databases incl. test override hooks
    if _db_master is None:
        db = Database(
            settings['db_master'],
            socket=settings.get('db_master_socket'),
            create=False,
        )
    else:
        db = _db_master
    session = db.session()
    added = load_file(session, args.source, userid=userid)
    print('Added a total of %s records.' % added)
    session.commit()
    return added
Esempio n. 5
0
def attach_database(app, _db_master=None):
    # called manually during tests
    settings = config().get_map('ichnaea')
    if _db_master is None:  # pragma: no cover
        db_master = Database(settings['db_master'])
    else:
        db_master = _db_master
    app.db_master = db_master
Esempio n. 6
0
def attach_database(app, _db_master=None):
    # called manually during tests
    settings = config().get_map('ichnaea')
    if _db_master is None:  # pragma: no cover
        db_master = Database(settings['db_master'])
    else:
        db_master = _db_master
    app.db_master = db_master
Esempio n. 7
0
def init_worker_process(signal, sender, **kw):  # pragma: no cover
    # called automatically when `celery worker` is started
    # get the app in the current worker process
    app = app_or_default()
    settings = config().get_map('ichnaea')
    attach_database(app, settings=settings)
    configure_s3_backup(app, settings=settings)
    configure_heka()
Esempio n. 8
0
def configure_heka(registry_settings={}):
    # If a test client is defined just use that instead of whatever is
    # defined in the configuration
    if '_heka_client' in registry_settings:
        return registry_settings['_heka_client']

    # deal with konfig's include/extends syntax and construct a merged
    # file-like object from all the files
    merged_stream = StringIO()
    konfig = config()
    konfig.write(merged_stream)
    merged_stream.seek(0)

    client = get_heka_client()
    client = client_from_text_config(merged_stream.read(), 'heka', client)

    return client
Esempio n. 9
0
def configure_heka(registry_settings={}):
    # If a test client is defined just use that instead of whatever is
    # defined in the configuration
    if '_heka_client' in registry_settings:
        return registry_settings['_heka_client']

    # deal with konfig's include/extends syntax and construct a merged
    # file-like object from all the files
    merged_stream = StringIO()
    konfig = config()
    konfig.write(merged_stream)
    merged_stream.seek(0)

    client = get_heka_client()
    client = client_from_text_config(merged_stream.read(), 'heka', client)

    return client
Esempio n. 10
0
def main(argv, _db_master=None):
    parser = argparse.ArgumentParser(prog=argv[0],
                                     description='Location Importer')

    parser.add_argument('source', help="source file")
    args = parser.parse_args(argv[1:])
    settings = config().get_map('ichnaea')

    # configure databases incl. test override hooks
    if _db_master is None:
        db = Database(
            settings['db_master'],
            socket=settings.get('db_master_socket'),
            create=False,
        )
    else:
        db = _db_master
    session = db.session()
    added = load_file(session, args.source)
    print('Added %s records.' % added)
    session.commit()
    return added
Esempio n. 11
0
def main(argv, _db_master=None):
    parser = argparse.ArgumentParser(
        prog=argv[0], description='Location Importer')

    parser.add_argument('source', help="source file")
    args = parser.parse_args(argv[1:])
    settings = config().get_map('ichnaea')

    # configure databases incl. test override hooks
    if _db_master is None:
        db = Database(
            settings['db_master'],
            socket=settings.get('db_master_socket'),
            create=False,
        )
    else:
        db = _db_master
    session = db.session()
    added = load_file(session, args.source)
    print('Added %s records.' % added)
    session.commit()
    return added
Esempio n. 12
0
def configure(celery=celery):
    conf = config()
    if conf.has_section('celery'):
        section = conf.get_map('celery')
    else:  # pragma: no cover
        # happens while building docs locally and on rtfd.org
        return

    database_options = {
        "pool_recycle": 3600,
        "pool_size": 10,
        "pool_timeout": 10,
        "isolation_level": "READ COMMITTED",
    }

    # testing overrides
    sqluri = os.environ.get('SQLURI', '')
    sqlsocket = os.environ.get('SQLSOCKET', '')

    if sqluri:
        broker_url = sqluri
        result_url = sqluri
    else:  # pragma: no cover
        broker_url = section['broker_url']
        result_url = section['result_url']

    broker_url = 'sqla+' + broker_url

    if sqlsocket:
        broker_socket = sqlsocket
        result_socket = sqluri
    else:  # pragma: no cover
        broker_socket = section.get('broker_socket')
        result_socket = section.get('result_socket')

    broker_connect_args = {"charset": "utf8"}
    if broker_socket:
        broker_connect_args['unix_socket'] = broker_socket
    broker_options = database_options.copy()
    broker_options['connect_args'] = broker_connect_args

    result_connect_args = {"charset": "utf8"}
    if result_socket:
        result_connect_args['unix_socket'] = result_socket
    result_options = database_options.copy()
    result_options['connect_args'] = result_connect_args

    # testing setting
    always_eager = bool(os.environ.get('CELERY_ALWAYS_EAGER', False))

    celery.conf.update(
        # testing
        CELERY_ALWAYS_EAGER=always_eager,
        CELERY_EAGER_PROPAGATES_EXCEPTIONS=always_eager,
        # broker
        BROKER_URL=broker_url,
        BROKER_TRANSPORT_OPTIONS=broker_options,
        # results
        CELERY_RESULT_BACKEND='database',
        CELERY_RESULT_DBURI=result_url,
        CELERY_RESULT_ENGINE_OPTIONS=result_options,
        # tasks
        CELERY_IMPORTS=CELERY_IMPORTS,
        # default to idempotent tasks
        CELERY_ACKS_LATE=True,
        # forward compatibility
        CELERYD_FORCE_EXECV=True,
        # optimization
        CELERY_DISABLE_RATE_LIMITS=True,
        # security
        CELERY_ACCEPT_CONTENT=['json'],
        CELERY_RESULT_SERIALIZER='json',
        CELERY_TASK_SERIALIZER='json',
        # schedule
        CELERYBEAT_LOG_LEVEL="INFO",
        CELERYBEAT_SCHEDULE=CELERYBEAT_SCHEDULE,
    )
Esempio n. 13
0
def configure(celery=celery):
    conf = config()
    if conf.has_section('celery'):
        section = conf.get_map('celery')
    else:  # pragma: no cover
        # happens while building docs locally and on rtfd.org
        return

    database_options = {
        "pool_recycle": 3600,
        "pool_size": 10,
        "pool_timeout": 10,
        "isolation_level": "READ COMMITTED",
    }

    # testing overrides
    sqluri = os.environ.get('SQLURI', '')

    if sqluri:
        broker_url = sqluri
        result_url = sqluri
    else:  # pragma: no cover
        broker_url = section['broker_url']
        result_url = section['result_url']

    if 'pymysql' in broker_url:
        broker_url = 'sqla+' + broker_url

    if 'pymysql' in broker_url:
        broker_connect_args = {"charset": "utf8"}
        broker_options = database_options.copy()
        broker_options['connect_args'] = broker_connect_args
    elif 'redis' in broker_url:
        broker_options = {}
        broker_options['fanout_prefix'] = True
        broker_options['visibility_timeout'] = 3600

    if 'pymysql' in result_url:
        result_connect_args = {"charset": "utf8"}
        result_options = database_options.copy()
        result_options['connect_args'] = result_connect_args
        celery.conf.update(
            CELERY_RESULT_BACKEND='database',
            CELERY_RESULT_DBURI=result_url,
            CELERY_RESULT_ENGINE_OPTIONS=result_options,
        )
    elif 'redis' in result_url:
        celery.conf.update(CELERY_RESULT_BACKEND=result_url, )

    # testing setting
    always_eager = bool(os.environ.get('CELERY_ALWAYS_EAGER', False))

    celery.conf.update(
        # testing
        CELERY_ALWAYS_EAGER=always_eager,
        CELERY_EAGER_PROPAGATES_EXCEPTIONS=always_eager,
        # broker
        BROKER_URL=broker_url,
        BROKER_TRANSPORT_OPTIONS=broker_options,
        # tasks
        CELERY_IMPORTS=CELERY_IMPORTS,
        # forward compatibility
        CELERYD_FORCE_EXECV=True,
        # optimization
        CELERYD_PREFETCH_MULTIPLIER=8,
        CELERY_DISABLE_RATE_LIMITS=True,
        CELERY_MESSAGE_COMPRESSION='gzip',
        # security
        CELERY_ACCEPT_CONTENT=['json'],
        CELERY_RESULT_SERIALIZER='json',
        CELERY_TASK_SERIALIZER='json',
        # schedule
        CELERYBEAT_LOG_LEVEL="WARNING",
        CELERYBEAT_SCHEDULE=CELERYBEAT_SCHEDULE,
    )
Esempio n. 14
0
def configure(celery=celery):
    conf = config()
    if conf.has_section('celery'):
        section = conf.get_map('celery')
    else:  # pragma: no cover
        # happens while building docs locally and on rtfd.org
        return

    database_options = {
        "pool_recycle": 3600,
        "pool_size": 10,
        "pool_timeout": 10,
        "isolation_level": "READ COMMITTED",
    }

    # testing overrides
    sqluri = os.environ.get('SQLURI', '')

    if sqluri:
        broker_url = sqluri
        result_url = sqluri
    else:  # pragma: no cover
        broker_url = section['broker_url']
        result_url = section['result_url']

    if 'pymysql' in broker_url:
        broker_url = 'sqla+' + broker_url

    if 'pymysql' in broker_url:
        broker_connect_args = {"charset": "utf8"}
        broker_options = database_options.copy()
        broker_options['connect_args'] = broker_connect_args
    elif 'redis' in broker_url:
        broker_options = {}
        broker_options['fanout_prefix'] = True
        broker_options['visibility_timeout'] = 3600

    if 'pymysql' in result_url:
        result_connect_args = {"charset": "utf8"}
        result_options = database_options.copy()
        result_options['connect_args'] = result_connect_args
        celery.conf.update(
            CELERY_RESULT_BACKEND='database',
            CELERY_RESULT_DBURI=result_url,
            CELERY_RESULT_ENGINE_OPTIONS=result_options,
        )
    elif 'redis' in result_url:
        celery.conf.update(
            CELERY_RESULT_BACKEND=result_url,
        )

    # testing setting
    always_eager = bool(os.environ.get('CELERY_ALWAYS_EAGER', False))

    celery.conf.update(
        # testing
        CELERY_ALWAYS_EAGER=always_eager,
        CELERY_EAGER_PROPAGATES_EXCEPTIONS=always_eager,
        # broker
        BROKER_URL=broker_url,
        BROKER_TRANSPORT_OPTIONS=broker_options,
        # tasks
        CELERY_IMPORTS=CELERY_IMPORTS,
        # forward compatibility
        CELERYD_FORCE_EXECV=True,
        # optimization
        CELERYD_PREFETCH_MULTIPLIER=8,
        CELERY_DISABLE_RATE_LIMITS=True,
        CELERY_MESSAGE_COMPRESSION='gzip',
        # security
        CELERY_ACCEPT_CONTENT=['json'],
        CELERY_RESULT_SERIALIZER='json',
        CELERY_TASK_SERIALIZER='json',
        # schedule
        CELERYBEAT_LOG_LEVEL="WARNING",
        CELERYBEAT_SCHEDULE=CELERYBEAT_SCHEDULE,
    )
Esempio n. 15
0
def configure(celery=celery):
    conf = config()
    if conf.has_section('celery'):
        section = conf.get_map('celery')
    else:  # pragma: no cover
        # happens while building docs locally and on rtfd.org
        return

    database_options = {
        "pool_recycle": 3600,
        "pool_size": 10,
        "pool_timeout": 10,
        "isolation_level": "READ COMMITTED",
    }

    # testing overrides
    sqluri = os.environ.get('SQLURI', '')
    sqlsocket = os.environ.get('SQLSOCKET', '')

    if sqluri:
        broker_url = sqluri
        result_url = sqluri
    else:  # pragma: no cover
        broker_url = section['broker_url']
        result_url = section['result_url']

    broker_url = 'sqla+' + broker_url

    if sqlsocket:
        broker_socket = sqlsocket
        result_socket = sqluri
    else:  # pragma: no cover
        broker_socket = section.get('broker_socket')
        result_socket = section.get('result_socket')

    broker_connect_args = {"charset": "utf8"}
    if broker_socket:
        broker_connect_args['unix_socket'] = broker_socket
    broker_options = database_options.copy()
    broker_options['connect_args'] = broker_connect_args

    result_connect_args = {"charset": "utf8"}
    if result_socket:
        result_connect_args['unix_socket'] = result_socket
    result_options = database_options.copy()
    result_options['connect_args'] = result_connect_args

    # testing setting
    always_eager = bool(os.environ.get('CELERY_ALWAYS_EAGER', False))

    celery.conf.update(
        # testing
        CELERY_ALWAYS_EAGER=always_eager,
        CELERY_EAGER_PROPAGATES_EXCEPTIONS=always_eager,
        # broker
        BROKER_URL=broker_url,
        BROKER_TRANSPORT_OPTIONS=broker_options,
        # results
        CELERY_RESULT_BACKEND='database',
        CELERY_RESULT_DBURI=result_url,
        CELERY_RESULT_ENGINE_OPTIONS=result_options,
        # tasks
        CELERY_IMPORTS=CELERY_IMPORTS,
        # default to idempotent tasks
        CELERY_ACKS_LATE=True,
        # forward compatibility
        CELERYD_FORCE_EXECV=True,
        # optimization
        CELERY_DISABLE_RATE_LIMITS=True,
        # security
        CELERY_ACCEPT_CONTENT=['json'],
        CELERY_RESULT_SERIALIZER='json',
        CELERY_TASK_SERIALIZER='json',
        # schedule
        CELERYBEAT_LOG_LEVEL="INFO",
        CELERYBEAT_SCHEDULE=CELERYBEAT_SCHEDULE,
    )