Ejemplo n.º 1
0
def configure_db(app):
    app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = True

    if app.config['TESTING']:
        # tests do not actually use sqlite but run a postgres instance and
        # reconfigure flask-sqlalchemy to use that database.  by setting
        # a dummy uri explicitly instead of letting flask-sqlalchemy do
        # the exact same thing we avoid a warning when running tests.
        app.config.setdefault('SQLALCHEMY_DATABASE_URI', 'sqlite:///:memory:')
    else:
        if config.SQLALCHEMY_DATABASE_URI is None:
            raise Exception(
                "No proper SQLAlchemy store has been configured. Please edit your indico.conf"
            )

        app.config['SQLALCHEMY_DATABASE_URI'] = config.SQLALCHEMY_DATABASE_URI
        app.config['SQLALCHEMY_RECORD_QUERIES'] = False
        app.config['SQLALCHEMY_POOL_SIZE'] = config.SQLALCHEMY_POOL_SIZE
        app.config['SQLALCHEMY_POOL_TIMEOUT'] = config.SQLALCHEMY_POOL_TIMEOUT
        app.config['SQLALCHEMY_POOL_RECYCLE'] = config.SQLALCHEMY_POOL_RECYCLE
        app.config['SQLALCHEMY_MAX_OVERFLOW'] = config.SQLALCHEMY_MAX_OVERFLOW

    import_all_models()
    db.init_app(app)
    if not app.config['TESTING']:
        apply_db_loggers(app)

    plugins_loaded.connect(lambda sender: configure_mappers(), app, weak=False)
    models_committed.connect(on_models_committed, app)
Ejemplo n.º 2
0
def configure_db(app):
    app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = True

    if not app.config['TESTING']:
        cfg = Config.getInstance()
        db_uri = cfg.getSQLAlchemyDatabaseURI()

        if db_uri is None:
            raise Exception(
                "No proper SQLAlchemy store has been configured. Please edit your indico.conf"
            )

        app.config['SQLALCHEMY_DATABASE_URI'] = db_uri
        app.config['SQLALCHEMY_RECORD_QUERIES'] = False
        app.config['SQLALCHEMY_POOL_SIZE'] = cfg.getSQLAlchemyPoolSize()
        app.config['SQLALCHEMY_POOL_TIMEOUT'] = cfg.getSQLAlchemyPoolTimeout()
        app.config['SQLALCHEMY_POOL_RECYCLE'] = cfg.getSQLAlchemyPoolRecycle()
        app.config['SQLALCHEMY_MAX_OVERFLOW'] = cfg.getSQLAlchemyMaxOverflow()

    import_all_models()
    db.init_app(app)
    if not app.config['TESTING']:
        apply_db_loggers(app)

    plugins_loaded.connect(lambda sender: configure_mappers(), app, weak=False)
    models_committed.connect(on_models_committed, app)
Ejemplo n.º 3
0
def configure_db(app):
    if not app.config['TESTING']:
        cfg = Config.getInstance()
        db_uri = cfg.getSQLAlchemyDatabaseURI()

        if db_uri is None:
            raise Exception("No proper SQLAlchemy store has been configured. Please edit your indico.conf")

        app.config['SQLALCHEMY_DATABASE_URI'] = db_uri

        # DB options
        app.config['SQLALCHEMY_ECHO'] = cfg.getSQLAlchemyEcho()
        app.config['SQLALCHEMY_RECORD_QUERIES'] = cfg.getSQLAlchemyRecordQueries()
        app.config['SQLALCHEMY_POOL_SIZE'] = cfg.getSQLAlchemyPoolSize()
        app.config['SQLALCHEMY_POOL_TIMEOUT'] = cfg.getSQLAlchemyPoolTimeout()
        app.config['SQLALCHEMY_POOL_RECYCLE'] = cfg.getSQLAlchemyPoolRecycle()
        app.config['SQLALCHEMY_MAX_OVERFLOW'] = cfg.getSQLAlchemyMaxOverflow()

    import_all_models()
    db.init_app(app)
    if not app.config['TESTING']:
        apply_db_loggers(app)

    plugins_loaded.connect(lambda sender: configure_mappers(), app, weak=False)
    models_committed.connect(on_models_committed, app)
Ejemplo n.º 4
0
def configure_db(app):
    app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = True

    if app.config['TESTING']:
        # tests do not actually use sqlite but run a postgres instance and
        # reconfigure flask-sqlalchemy to use that database.  by setting
        # a dummy uri explicitly instead of letting flask-sqlalchemy do
        # the exact same thing we avoid a warning when running tests.
        app.config.setdefault('SQLALCHEMY_DATABASE_URI', 'sqlite:///:memory:')
    else:
        if config.SQLALCHEMY_DATABASE_URI is None:
            raise Exception("No proper SQLAlchemy store has been configured. Please edit your indico.conf")

        app.config['SQLALCHEMY_DATABASE_URI'] = config.SQLALCHEMY_DATABASE_URI
        app.config['SQLALCHEMY_RECORD_QUERIES'] = False
        app.config['SQLALCHEMY_POOL_SIZE'] = config.SQLALCHEMY_POOL_SIZE
        app.config['SQLALCHEMY_POOL_TIMEOUT'] = config.SQLALCHEMY_POOL_TIMEOUT
        app.config['SQLALCHEMY_POOL_RECYCLE'] = config.SQLALCHEMY_POOL_RECYCLE
        app.config['SQLALCHEMY_MAX_OVERFLOW'] = config.SQLALCHEMY_MAX_OVERFLOW

    import_all_models()
    db.init_app(app)
    if not app.config['TESTING']:
        apply_db_loggers(app)

    plugins_loaded.connect(lambda sender: configure_mappers(), app, weak=False)
    models_committed.connect(on_models_committed, app)
Ejemplo n.º 5
0
def configure_db(app):
    if not app.config['TESTING']:
        cfg = Config.getInstance()
        db_uri = cfg.getSQLAlchemyDatabaseURI()

        if db_uri is None:
            raise Exception(
                "No proper SQLAlchemy store has been configured. Please edit your indico.conf"
            )

        app.config['SQLALCHEMY_DATABASE_URI'] = db_uri

        # DB options
        app.config['SQLALCHEMY_ECHO'] = cfg.getSQLAlchemyEcho()
        app.config[
            'SQLALCHEMY_RECORD_QUERIES'] = cfg.getSQLAlchemyRecordQueries()
        app.config['SQLALCHEMY_POOL_SIZE'] = cfg.getSQLAlchemyPoolSize()
        app.config['SQLALCHEMY_POOL_TIMEOUT'] = cfg.getSQLAlchemyPoolTimeout()
        app.config['SQLALCHEMY_POOL_RECYCLE'] = cfg.getSQLAlchemyPoolRecycle()
        app.config['SQLALCHEMY_MAX_OVERFLOW'] = cfg.getSQLAlchemyMaxOverflow()

    import_all_models()
    db.init_app(app)
    if not app.config['TESTING']:
        apply_db_loggers(app.debug)
    configure_mappers()  # Make sure all backrefs are set
    models_committed.connect(on_models_committed, app)
Ejemplo n.º 6
0
    def setup(self):
        update_session_options(db)  # get rid of the zope transaction extension

        self.app = app = IndicoFlask('indico_zodbimport')
        app.config['PLUGINENGINE_NAMESPACE'] = 'indico.plugins'
        app.config['PLUGINENGINE_PLUGINS'] = self.plugins
        app.config['SQLALCHEMY_DATABASE_URI'] = self.sqlalchemy_uri
        plugin_engine.init_app(app)
        if not plugin_engine.load_plugins(app):
            print cformat(
                '%{red!}Could not load some plugins: {}%{reset}').format(
                    ', '.join(plugin_engine.get_failed_plugins(app)))
            sys.exit(1)
        db.init_app(app)
        import_all_models()
        alembic_migrate.init_app(
            app, db, os.path.join(app.root_path, '..', 'migrations'))

        self.connect_zodb()

        try:
            self.tz = pytz.timezone(
                getattr(self.zodb_root['MaKaCInfo']['main'], '_timezone',
                        'UTC'))
        except KeyError:
            self.tz = pytz.utc

        with app.app_context():
            if not self.pre_check():
                sys.exit(1)

            if self.destructive:
                print cformat('%{yellow!}*** DANGER')
                print cformat(
                    '%{yellow!}***%{reset} '
                    '%{red!}ALL DATA%{reset} in your database %{yellow!}{!r}%{reset} will be '
                    '%{red!}PERMANENTLY ERASED%{reset}!').format(db.engine.url)
                if raw_input(
                        cformat(
                            '%{yellow!}***%{reset} To confirm this, enter %{yellow!}YES%{reset}: '
                        )) != 'YES':
                    print 'Aborting'
                    sys.exit(1)
                delete_all_tables(db)
                stamp()
                db.create_all()
            if self.has_data():
                # Usually there's no good reason to migrate with data in the DB. However, during development one might
                # comment out some migration tasks and run the migration anyway.
                print cformat('%{yellow!}*** WARNING')
                print cformat(
                    '%{yellow!}***%{reset} Your database is not empty, migration will most likely fail!'
                )
                if raw_input(
                        cformat(
                            '%{yellow!}***%{reset} To confirm this, enter %{yellow!}YES%{reset}: '
                        )) != 'YES':
                    print 'Aborting'
                    sys.exit(1)
Ejemplo n.º 7
0
Archivo: cli.py Proyecto: florv/indico
    def setup(self):
        update_session_options(db)  # get rid of the zope transaction extension

        self.app = app = IndicoFlask("indico_zodbimport")
        app.config["PLUGINENGINE_NAMESPACE"] = "indico.plugins"
        app.config["PLUGINENGINE_PLUGINS"] = self.plugins
        app.config["SQLALCHEMY_DATABASE_URI"] = self.sqlalchemy_uri
        app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = True
        plugin_engine.init_app(app)
        if not plugin_engine.load_plugins(app):
            print(
                cformat("%{red!}Could not load some plugins: {}%{reset}").format(
                    ", ".join(plugin_engine.get_failed_plugins(app))
                )
            )
            sys.exit(1)
        db.init_app(app)
        import_all_models()
        alembic_migrate.init_app(app, db, os.path.join(app.root_path, "..", "migrations"))

        self.connect_zodb()

        try:
            self.tz = pytz.timezone(getattr(self.zodb_root["MaKaCInfo"]["main"], "_timezone", "UTC"))
        except KeyError:
            self.tz = pytz.utc

        with app.app_context():
            if not self.pre_check():
                sys.exit(1)

            if self.destructive:
                print(cformat("%{yellow!}*** DANGER"))
                print(
                    cformat(
                        "%{yellow!}***%{reset} "
                        "%{red!}ALL DATA%{reset} in your database %{yellow!}{!r}%{reset} will be "
                        "%{red!}PERMANENTLY ERASED%{reset}!"
                    ).format(db.engine.url)
                )
                if raw_input(cformat("%{yellow!}***%{reset} To confirm this, enter %{yellow!}YES%{reset}: ")) != "YES":
                    print("Aborting")
                    sys.exit(1)
                delete_all_tables(db)
                stamp()
                db.create_all()
            if self.has_data():
                # Usually there's no good reason to migrate with data in the DB. However, during development one might
                # comment out some migration tasks and run the migration anyway.
                print(cformat("%{yellow!}*** WARNING"))
                print(
                    cformat(
                        "%{yellow!}***%{reset} Your database is not empty, migration may fail or add duplicate " "data!"
                    )
                )
                if raw_input(cformat("%{yellow!}***%{reset} To confirm this, enter %{yellow!}YES%{reset}: ")) != "YES":
                    print("Aborting")
                    sys.exit(1)
Ejemplo n.º 8
0
def setup(logger, zodb_root, sqlalchemy_uri, dblog=False, restore=False):
    app = IndicoFlask('indico_migrate')
    app.config['PLUGINENGINE_NAMESPACE'] = 'indico.plugins'
    app.config['SQLALCHEMY_DATABASE_URI'] = sqlalchemy_uri
    app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = True
    _monkeypatch_config()

    plugin_engine.init_app(app)
    if not plugin_engine.load_plugins(app):
        print(
            cformat('%[red!]Could not load some plugins: {}%[reset]').format(
                ', '.join(plugin_engine.get_failed_plugins(app))))
        sys.exit(1)
    db.init_app(app)
    if dblog:
        app.debug = True
        apply_db_loggers(app, force=True)
        db_logger = Logger.get('_db')
        db_logger.level = logging.DEBUG
        db_logger.propagate = False
        db_logger.addHandler(SocketHandler('127.0.0.1', 9020))

    # avoid "no handlers registered" warnings
    logging.root.addHandler(logging.NullHandler())

    import_all_models()
    configure_mappers()
    alembic_migrate.init_app(app, db, os.path.join(app.root_path,
                                                   'migrations'))

    try:
        tz = pytz.timezone(
            getattr(zodb_root['MaKaCInfo']['main'], '_timezone', 'UTC'))
    except KeyError:
        tz = pytz.utc

    with app.app_context():
        if not restore:
            all_tables = sum(get_all_tables(db).values(), [])
            if all_tables:
                if db_has_data():
                    logger.fatal_error(
                        'Your database is not empty!\n'
                        'If you want to reset it, please drop and recreate it first.'
                    )
            else:
                # the DB is empty, prepare DB tables
                # prevent alembic from messing with the logging config
                tmp = logging.config.fileConfig
                logging.config.fileConfig = lambda fn: None
                prepare_db(empty=True,
                           root_path=get_root_path('indico'),
                           verbose=False)
                logging.config.fileConfig = tmp
                _create_oauth_apps()
    return app, tz
def setup(main_zodb_uri, rb_zodb_uri, sqlalchemy_uri):
    app = Flask('migration')
    fix_root_path(app)
    app.config['SQLALCHEMY_DATABASE_URI'] = sqlalchemy_uri
    db.init_app(app)
    alembic_migrate.init_app(app, db, os.path.join(app.root_path, '..', 'migrations'))

    main_root = UnbreakingDB(get_storage(main_zodb_uri)).open().root()
    rb_root = UnbreakingDB(get_storage(rb_zodb_uri)).open().root()

    return main_root, rb_root, app
Ejemplo n.º 10
0
    def setup(self):
        self.app = app = IndicoFlask('indico_zodbimport')
        app.config['PLUGINENGINE_NAMESPACE'] = 'indico.plugins'
        app.config['PLUGINENGINE_PLUGINS'] = self.plugins
        app.config['SQLALCHEMY_DATABASE_URI'] = self.sqlalchemy_uri
        app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = True
        plugin_engine.init_app(app)
        if not plugin_engine.load_plugins(app):
            print(
                cformat(
                    '%{red!}Could not load some plugins: {}%{reset}').format(
                        ', '.join(plugin_engine.get_failed_plugins(app))))
            sys.exit(1)
        db.init_app(app)
        setup_request_stats(app)
        if self.dblog:
            app.debug = True
            apply_db_loggers(app)
        import_all_models()
        alembic_migrate.init_app(app, db,
                                 os.path.join(app.root_path, 'migrations'))

        self.connect_zodb()

        try:
            self.tz = pytz.timezone(
                getattr(self.zodb_root['MaKaCInfo']['main'], '_timezone',
                        'UTC'))
        except KeyError:
            self.tz = pytz.utc

        with app.app_context():
            request_stats_request_started()

            if not self.pre_check():
                sys.exit(1)

            if self.has_data():
                # Usually there's no good reason to migrate with data in the DB. However, during development one might
                # comment out some migration tasks and run the migration anyway.
                print(cformat('%{yellow!}*** WARNING'))
                print(
                    cformat(
                        '%{yellow!}***%{reset} Your database is not empty, migration may fail or add duplicate '
                        'data!'))
                if raw_input(
                        cformat(
                            '%{yellow!}***%{reset} To confirm this, enter %{yellow!}YES%{reset}: '
                        )) != 'YES':
                    print('Aborting')
                    sys.exit(1)
Ejemplo n.º 11
0
    def setup(self):
        update_session_options(db)  # get rid of the zope transaction extension

        self.app = app = IndicoFlask('indico_zodbimport')
        app.config['PLUGINENGINE_NAMESPACE'] = 'indico.plugins'
        app.config['PLUGINENGINE_PLUGINS'] = self.plugins
        app.config['SQLALCHEMY_DATABASE_URI'] = self.sqlalchemy_uri
        plugin_engine.init_app(app)
        if not plugin_engine.load_plugins(app):
            print(cformat('%{red!}Could not load some plugins: {}%{reset}').format(
                ', '.join(plugin_engine.get_failed_plugins(app))))
            sys.exit(1)
        db.init_app(app)
        import_all_models()
        alembic_migrate.init_app(app, db, os.path.join(app.root_path, '..', 'migrations'))

        self.connect_zodb()

        try:
            self.tz = pytz.timezone(getattr(self.zodb_root['MaKaCInfo']['main'], '_timezone', 'UTC'))
        except KeyError:
            self.tz = pytz.utc

        with app.app_context():
            if not self.pre_check():
                sys.exit(1)

            if self.destructive:
                print(cformat('%{yellow!}*** DANGER'))
                print(cformat('%{yellow!}***%{reset} '
                              '%{red!}ALL DATA%{reset} in your database %{yellow!}{!r}%{reset} will be '
                              '%{red!}PERMANENTLY ERASED%{reset}!').format(db.engine.url))
                if raw_input(cformat('%{yellow!}***%{reset} To confirm this, enter %{yellow!}YES%{reset}: ')) != 'YES':
                    print('Aborting')
                    sys.exit(1)
                delete_all_tables(db)
                stamp()
                db.create_all()
            if self.has_data():
                # Usually there's no good reason to migrate with data in the DB. However, during development one might
                # comment out some migration tasks and run the migration anyway.
                print(cformat('%{yellow!}*** WARNING'))
                print(cformat('%{yellow!}***%{reset} Your database is not empty, migration may fail or add duplicate '
                              'data!'))
                if raw_input(cformat('%{yellow!}***%{reset} To confirm this, enter %{yellow!}YES%{reset}: ')) != 'YES':
                    print('Aborting')
                    sys.exit(1)
Ejemplo n.º 12
0
def setup(logger, zodb_root, sqlalchemy_uri, dblog=False, restore=False):
    app = IndicoFlask('indico_migrate')
    app.config['PLUGINENGINE_NAMESPACE'] = 'indico.plugins'
    app.config['SQLALCHEMY_DATABASE_URI'] = sqlalchemy_uri
    app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = True
    _monkeypatch_config()

    plugin_engine.init_app(app)
    if not plugin_engine.load_plugins(app):
        print(
            cformat('%[red!]Could not load some plugins: {}%[reset]').format(
                ', '.join(plugin_engine.get_failed_plugins(app))))
        sys.exit(1)
    db.init_app(app)
    if dblog:
        app.debug = True
        apply_db_loggers(app)

    import_all_models()
    configure_mappers()
    alembic_migrate.init_app(app, db, os.path.join(app.root_path,
                                                   'migrations'))

    try:
        tz = pytz.timezone(
            getattr(zodb_root['MaKaCInfo']['main'], '_timezone', 'UTC'))
    except KeyError:
        tz = pytz.utc

    with app.app_context():
        if not restore:
            all_tables = sum(get_all_tables(db).values(), [])
            if all_tables:
                if db_has_data():
                    logger.fatal_error(
                        'Your database is not empty!\n'
                        'If you want to reset it, please drop and recreate it first.'
                    )
            else:
                # the DB is empty, prepare DB tables
                prepare_db(empty=True,
                           root_path=get_root_path('indico'),
                           verbose=False)
    return app, tz
Ejemplo n.º 13
0
def configure_db(app):
    app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = True

    if not app.config['TESTING']:
        if config.SQLALCHEMY_DATABASE_URI is None:
            raise Exception(
                "No proper SQLAlchemy store has been configured. Please edit your indico.conf"
            )

        app.config['SQLALCHEMY_DATABASE_URI'] = config.SQLALCHEMY_DATABASE_URI
        app.config['SQLALCHEMY_RECORD_QUERIES'] = False
        app.config['SQLALCHEMY_POOL_SIZE'] = config.SQLALCHEMY_POOL_SIZE
        app.config['SQLALCHEMY_POOL_TIMEOUT'] = config.SQLALCHEMY_POOL_TIMEOUT
        app.config['SQLALCHEMY_POOL_RECYCLE'] = config.SQLALCHEMY_POOL_RECYCLE
        app.config['SQLALCHEMY_MAX_OVERFLOW'] = config.SQLALCHEMY_MAX_OVERFLOW

    import_all_models()
    db.init_app(app)
    if not app.config['TESTING']:
        apply_db_loggers(app)

    plugins_loaded.connect(lambda sender: configure_mappers(), app, weak=False)
    models_committed.connect(on_models_committed, app)
Ejemplo n.º 14
0
def configure_db(app):
    cfg = Config.getInstance()
    db_uri = cfg.getSQLAlchemyDatabaseURI()

    if db_uri is None:
        raise Exception("No proper SQLAlchemy store has been configured."
                        " Please edit your indico.conf")

    app.config['SQLALCHEMY_DATABASE_URI'] = db_uri

    # DB options
    app.config['SQLALCHEMY_ECHO'] = cfg.getSQLAlchemyEcho()
    app.config['SQLALCHEMY_RECORD_QUERIES'] = cfg.getSQLAlchemyRecordQueries()
    app.config['SQLALCHEMY_POOL_SIZE'] = cfg.getSQLAlchemyPoolSize()
    app.config['SQLALCHEMY_POOL_TIMEOUT'] = cfg.getSQLAlchemyPoolTimeout()
    app.config['SQLALCHEMY_POOL_RECYCLE'] = cfg.getSQLAlchemyPoolRecycle()
    app.config['SQLALCHEMY_MAX_OVERFLOW'] = cfg.getSQLAlchemyMaxOverflow()
    app.config['SQLALCHEMY_COMMIT_ON_TEARDOWN'] = False

    db.init_app(app)
    apply_db_loggers(app.debug)
    configure_mappers()  # Make sure all backrefs are set
    models_committed.connect(on_models_committed, app)