示例#1
0
def setupPackage():
    os.environ['MONGO_URI'] = 'mongodb://localhost'
    os.environ['MONGO_DB_NAME'] = 'royal_example'
    os.environ['MONGO_DB_PREFIX'] = ''

    # sqla extentsion setup.
    global engine

    alembic_config = Config()
    alembic_config.set_main_option('script_location',
                                   'example/ext/sqla/db')
    alembic_config.set_main_option('sqlalchemy.url', mysql_uri)

    engine = create_engine(mysql_uri)

    try:
        command.downgrade(alembic_config, 'base')
    except:
        log.exception("Migration downgrade failed, clearing all tables")
        metadata = MetaData(engine)
        metadata.reflect()
        for table in metadata.tables.values():
            for fk in table.foreign_keys:
                engine.execute(DropConstraint(fk.constraint))
        metadata.drop_all()

    command.upgrade(alembic_config, 'head')
示例#2
0
文件: manage.py 项目: willkg/ernest
def db_create():
    """Create the tables and do alembic stuff"""
    print 'db at: {0}'.format(
        app.config['SQLALCHEMY_DATABASE_URI'])
    try:
        db.engine.execute('select * from project')
        print 'Database already exists with tables.'
        return

    except (OperationalError, ProgrammingError):
        # An operational error here means that the "project" table
        # doesn't exist so we should create things!
        pass

    print 'Creating {0}....'.format(
        app.config['SQLALCHEMY_DATABASE_URI'])

    db.create_all()

    from alembic.config import Config
    from alembic import command
    alembic_cfg = Config('alembic.ini')
    alembic_cfg.set_main_option("sqlalchemy.url",
        app.config["SQLALCHEMY_DATABASE_URI"])

    command.stamp(alembic_cfg, 'head')
    print 'Done.'
def db_migration(request):
    print("Doing db setup")
    app_env_var_mock = mock.patch.dict('gds_metrics.os.environ', {'PROMETHEUS_METRICS_PATH': '/_metrics'})
    app_env_var_mock.start()
    app = create_app('test')
    Migrate(app, db)
    Manager(db, MigrateCommand)
    ALEMBIC_CONFIG = os.path.join(os.path.dirname(__file__), '../migrations/alembic.ini')
    config = Config(ALEMBIC_CONFIG)
    config.set_main_option("script_location", "migrations")

    with app.app_context():
        upgrade(config, 'head')

    print("Done db setup")

    def teardown():
        app = create_app('test')
        with app.app_context():
            db.session.remove()
            db.engine.execute("drop sequence suppliers_supplier_id_seq cascade")
            db.drop_all()
            db.engine.execute("drop table alembic_version")
            insp = inspect(db.engine)
            for enum in insp.get_enums():
                db.Enum(name=enum['name']).drop(db.engine)
            db.get_engine(app).dispose()
            app_env_var_mock.stop()
    request.addfinalizer(teardown)
示例#4
0
def alembic_revision(log_url):
    """local task to merge changes from glottologcurator available to the production
    site via an alembic migration script.

    pulls the changelog from glottologcurator and create a new alembic revision with it.
    """
    user = raw_input('HTTP Basic auth user for glottologcurator: ')
    password = getpass('HTTP Basic auth password for glottologcurator: ')
    kw = {}
    if user and password:
        kw['auth'] = (user, password)
    changes = requests.get(log_url, **kw).json()

    config = Config()
    config.set_main_option("script_location", path('.').joinpath('migrations'))
    scriptdir = ScriptDirectory.from_config(config)
    script = scriptdir.generate_revision(
        rev_id(), "Glottolog Curator", refresh=True,
        upgrades="""\
# from glottologcurator
    conn = op.get_bind()
    for sql, params in [
%s
    ]:
        conn.execute(sql, params)
""" % '\n'.join(u'    ("""{0}""", {1}),'.format(*event) for event in changes['events']))

    print('new alembic migration script created:')
    print(script.path)
示例#5
0
def main(args):  # pragma: no cover
    """local task to make changes from glottologcurator available to the production
    site via an alembic migration script.

    pulls the changelog from glottologcurator and creates a new alembic revision with it.
    """
    kw = {}
    if args.http_user and args.http_password:
        kw['auth'] = (args.http_user, args.http_password)
    changes = requests.get(args.log_url, **kw).json()

    config = Config()
    config.set_main_option("script_location", args.migrations_dir)
    scriptdir = ScriptDirectory.from_config(config)
    script = scriptdir.generate_revision(
        rev_id(), "Glottolog Curator", refresh=True,
        upgrades="""\
# from glottologcurator
    conn = op.get_bind()
    for sql, params in [
%s
    ]:
        conn.execute(sql, params)
""" % '\n'.join(u'    ("""{0}""", {1}),'.format(
        event[0], parse_json_with_datetime(event[1])) for event in changes['events']))

    args.log.info('new alembic migration script created:')
    args.log.info(script.path)
    args.log.info('run "alembic upgrade head" to merge changes')
示例#6
0
文件: migrations.py 项目: CCI-MOC/hil
def _expected_heads():
    cfg_path = join(dirname(__file__), 'migrations',  'alembic.ini')
    cfg = Config(cfg_path)
    _configure_alembic(cfg)
    cfg.set_main_option('script_location', dirname(cfg_path))
    script_dir = ScriptDirectory.from_config(cfg)
    return set(script_dir.get_heads())
示例#7
0
文件: upgrade.py 项目: GaretJax/irco
def main():
    log = get_logger()

    argparser = argparse.ArgumentParser('irco-import')
    argparser.add_argument('-v', '--verbose', action='store_true')
    argparser.add_argument('database')

    args = argparser.parse_args()

    sentry.context.merge({
        'tags': {
            'command': 'irco-upgrade',
        },
        'extra': {
            'parsed_arguments': args.__dict__,
        }
    })

    log.info('arguments_parsed', args=args)

    config = Config()
    config.set_main_option('script_location', 'irco:migrations')
    config.set_main_option('sqlalchemy.url', args.database)

    command.upgrade(config, 'head', sql=False, tag=None)
示例#8
0
def init_config(basedir):
    alembic_dir = os.path.join(basedir, 'alembic')
    
    alembic_cfg = Config()
    alembic_cfg.set_main_option('sqlalchemy.url', get_db_url())
    alembic_cfg.set_main_option('script_location', alembic_dir)
    command.upgrade(alembic_cfg, "head")
示例#9
0
def db_session2(request):
    sql_str = os.environ.get("DB_STRING2", "sqlite://")
    engine = create_engine(sql_str)
    engine.echo = True
    # pyramid way
    maker = sessionmaker(bind=engine)
    Base.metadata.bind = engine
    Base.metadata.drop_all(engine)
    engine.execute("DROP TABLE IF EXISTS alembic_ziggurat_foundations_version")
    if sql_str.startswith("sqlite"):
        # sqlite will not work with alembic
        Base.metadata.create_all(engine)
    else:
        alembic_cfg = Config()
        alembic_cfg.set_main_option(
            "script_location", "ziggurat_foundations:migrations"
        )
        alembic_cfg.set_main_option("sqlalchemy.url", sql_str)
        command.upgrade(alembic_cfg, "head")

    session = maker()

    def teardown():
        session.rollback()
        session.close()

    request.addfinalizer(teardown)

    return session
示例#10
0
def build_alembic_config(global_config, cmd_options, session):
    """
    Build up a config that the alembic tooling can use based on our
    configuration.  Initialize the database session appropriately
    as well.
    """
    root_dir = os.path.abspath(os.path.dirname(os.path.dirname(
        os.path.dirname(__file__))))
    alembic_cfg_path = os.path.join(root_dir, 'alembic.ini')
    cfg = Config(alembic_cfg_path,
                 cmd_opts=cmd_options)
    cfg.attributes["session"] = session

    version_locations = [
        pkg_resources.resource_filename(
            "mediagoblin.db", os.path.join("migrations", "versions")),
    ]

    cfg.set_main_option("sqlalchemy.url", str(session.get_bind().url))

    for plugin in global_config.get("plugins", []):
        plugin_migrations = pkg_resources.resource_filename(
            plugin, "migrations")
        is_migrations_dir = (os.path.exists(plugin_migrations) and
                             os.path.isdir(plugin_migrations))
        if is_migrations_dir:
            version_locations.append(plugin_migrations)

    cfg.set_main_option(
        "version_locations",
        " ".join(version_locations))

    return cfg
示例#11
0
    def run_alembic_migration(self):
        """ Migrate to latest Alembic revision if not up-to-date. """

        def migrate_if_required(rev, context):
            rev = script.get_revision(rev)
            if not (rev and rev.is_head):
                migration_required = True

            return []

        migration_required = False
        config = Config(os.path.join(os.path.dirname(__file__), 'alembic.ini'))
        config.set_section_option('alembic', 'script_location',
                                  os.path.join(os.path.dirname(__file__), 'migrations'))
        config.set_main_option('sqlalchemy.url',
                               'sqlite:///' + self.filepath)
        script = ScriptDirectory.from_config(config)

        with EnvironmentContext(config, script, fn=migrate_if_required):
            script.run_env()

        if migration_required:
            logging.info('Migrating SQLite database to latest revision')
            alembic.command.upgrade(config, 'head')
        else:
            logging.info('SQLite database is on the latest revision')
示例#12
0
def perform_migratons(config_name):
    ''' If fails, then we should revert to previous version of SLUG running on Heroku
        link: http://stackoverflow.com/questions/24622170/using-alembic-api-from-inside-application-code
    '''
    db_url = configuration[config_name].SQLALCHEMY_DATABASE_URI
    alembic_config = AlembicConfig('.\\AlertWeb\\alembic.ini')

    alembic_config.set_main_option('sqlalchemy.url', db_url)
    alembic_config.set_main_option('script_location', '.\\AlertWeb\\migrations')

    script_dir = ScriptDirectory.from_config(alembic_config)
    head_revision = script_dir.get_current_head()

    current_revision = get_current_revision(db_url)
    
    def upgrade(rev, context):
        print(rev)
        return script_dir._upgrade_revs(head_revision, rev)

    #script_dir.
    # Facade for migration context.
    with EnvironmentContext(alembic_config, 
                            script_dir,
                            as_sql=False,
                            fn=upgrade,
                            starting_rev=current_revision,
                            destination_rev=head_revision,
                            tag=None
    ):
        script_dir.run_env()
示例#13
0
文件: configure.py 项目: grze/gstack
def _create_database():
    directory = os.path.join(os.path.dirname(__file__), '../migrations')
    database_config = AlembicConfig(os.path.join(
        directory,
        'alembic.ini'
    ))
    database_config.set_main_option('script_location', directory)
    command.upgrade(database_config, 'head', sql=False, tag=None)
示例#14
0
文件: utils.py 项目: nkhuyu/airflow
def upgradedb():
    logging.info("Creating tables")
    package_dir = os.path.abspath(os.path.dirname(__file__))
    directory = os.path.join(package_dir, "migrations")
    config = Config(os.path.join(package_dir, "alembic.ini"))
    config.set_main_option("script_location", directory)
    config.set_main_option("sqlalchemy.url", conf.get("core", "SQL_ALCHEMY_CONN"))
    command.upgrade(config, "head")
示例#15
0
    def actionUpgradeDB(self, version="head"):
        from alembic.config import Config
        from alembic import command

        alembic_cfg = Config()
        alembic_cfg.set_main_option("script_location", "alembic")
        alembic_cfg.set_main_option("url", config.DB_URL)
        command.upgrade(alembic_cfg, version)
示例#16
0
文件: cmd.py 项目: ritksm/polaris
def upgrade():
    from alembic.config import Config
    alembic_cfg = Config()
    scripts_path = os.path.join(os.path.dirname(__file__), "migrations")
    alembic_cfg.set_main_option("script_location", scripts_path)

    from alembic.command import upgrade
    return upgrade(alembic_cfg, revision="head")
def _get_config():
    """
    Return an alembic config.

    :return: config
    """
    config = Config('alembic.ini')
    config.set_main_option('script_location', 'migrations')
    return config
示例#18
0
def migration_downgrade(commit):
    """Migrate database structure"""
    from alembic.config import Config
    from alembic.command import downgrade

    config = Config(os.path.normpath(os.path.abspath(__file__) + "/../alembic.ini"))
    config.set_main_option("script_location", "alembic")

    downgrade(config, commit)
示例#19
0
def upgradedb():
    logging.info("Creating tables")
    package_dir = os.path.abspath(os.path.dirname(__file__))
    directory = os.path.join(package_dir, 'migrations')
    config = Config(os.path.join(package_dir, 'alembic.ini'))
    config.set_main_option('script_location', directory)
    config.set_main_option('sqlalchemy.url',
                           configuration.get('core', 'SQL_ALCHEMY_CONN'))
    command.upgrade(config, 'heads')
示例#20
0
    def actionDowngradeDB(self, version=None):
        from alembic.config import Config
        from alembic import command

        if not version:
            self.parser.error("Downgrade DB requires version for migration")
        alembic_cfg = Config()
        alembic_cfg.set_main_option("script_location", "alembic")
        alembic_cfg.set_main_option("url", config.DB_URL)
        command.downgrade(alembic_cfg, version)
示例#21
0
    def actionCreateMigration(self, message=None, auto=True):
        from alembic.config import Config
        from alembic import command

        if not message:
            self.parser.error("Please add comment")
        alembic_cfg = Config()
        alembic_cfg.set_main_option("script_location", "alembic")
        alembic_cfg.set_main_option("url", config.DB_URL)
        command.revision(alembic_cfg, message=message, autogenerate=auto)
示例#22
0
def create_alembic_config(**kwargs):
    """Returns an `alembic.config.Config` object configured for uber.
    """
    kwargs['file_'] = alembic_ini_path
    alembic_config = AlembicConfig(**kwargs)
    # Override settings from "alembic.ini"
    alembic_config.set_main_option('script_location', script_location)
    alembic_config.set_main_option(
        'version_locations', version_locations_option)
    return alembic_config
示例#23
0
    def actionCreateDB(self):
        from models import metadata, engine
        from alembic.config import Config
        from alembic import command

        metadata.create_all(engine)
        alembic_cfg = Config()
        alembic_cfg.set_main_option("script_location", "alembic")
        alembic_cfg.set_main_option("url", config.DB_URL)
        command.stamp(alembic_cfg, "head")
示例#24
0
def db(app):
    """ Manage application database. """

    directory = app.instance.config.get('MIGRATIONS_ROOT')
    db_uri = app.instance.config.get('SQLALCHEMY_DSN')

    config = AlembicConfig(os.path.join(directory, 'alembic.ini'))
    config.set_main_option('script_location', directory)
    config.set_main_option('sqlalchemy.url', db_uri)

    app.migrations_config = config
示例#25
0
def make_extension_config(module):
  config = Config(get_base_config_file())
  config.set_main_option(
      'script_location',
      get_extension_migrations_dir(module),
      )
  config.set_main_option(
      'sqlalchemy.url',
      settings.SQLALCHEMY_DATABASE_URI,
      )
  return config
示例#26
0
文件: migration.py 项目: kates/mould
    def __init__(self, app):
        config = None

        if app.config.get('TESTING'):
            config = Config()
            config.set_main_option('script_location', 'alembic')
        else:
            config = Config(
                    os.path.realpath(os.path.dirname(__name__)) + "/alembic.ini")
        
        config.set_main_option('sqlalchemy.url', app.config.get('SQLALCHEMY_DATABASE_URI'))
        self.alembic_config = config
示例#27
0
def get_config_from_dict(module, **options):
    config = Config()

    script_location = options.get("script_location", None)
    if script_location is not None:
        config.set_main_option("script_location", script_location)

    use_target_metadata = options.get("use_metadata", True)

    setattr(config, "salqlchemymodule", module)
    setattr(config, "use_target_metadata", use_target_metadata)

    return config
示例#28
0
def upgradedb():
    # alembic adds significant import time, so we import it lazily
    from alembic import command
    from alembic.config import Config

    logging.info("Creating tables")
    current_dir = os.path.dirname(os.path.abspath(__file__))
    package_dir = os.path.normpath(os.path.join(current_dir, '..'))
    directory = os.path.join(package_dir, 'migrations')
    config = Config(os.path.join(package_dir, 'alembic.ini'))
    config.set_main_option('script_location', directory)
    config.set_main_option('sqlalchemy.url', settings.SQL_ALCHEMY_CONN)
    command.upgrade(config, 'heads')
示例#29
0
def update_db(update=True):
    db_connection = DatabaseConnection(database=options.sql_database,
                                   hostname=options.sql_host,
                                   port=options.sql_port,
                                   username=options.sql_user,
                                   password=options.sql_password,
                                   dialect=options.sql_dialect)
    alembic_cfg = Config('alembic/alembic.ini')
    alembic_cfg.attributes['configure_logger'] = False
    alembic_cfg.set_main_option('sqlalchemy.url', unquote_plus(str(db_connection)))
    if update:
        command.upgrade(alembic_cfg, "head")
    else:
        command.stamp(alembic_cfg, "head")
def alembic_head(request, ini_filepath: str):
    alembic_config = Config(ini_filepath)

    # Setup database URL for Codeship
    if PG_USER:
        connection_url = f'postgresql+psycopg2://{PG_USER}:{PG_PASSWORD}@127.0.0.1:5432/test'
        alembic_config.set_main_option('sqlalchemy.url', connection_url)

    command.upgrade(alembic_config, 'head')

    def alembic_base():
        command.downgrade(alembic_config, 'base')

    request.addfinalizer(alembic_base)
示例#31
0
文件: migrator.py 项目: pztrn/regius
    def __migrate_core(self):
        """
        Execute framework database migrations.
        """
        script_path = self.config.get_temp_value("SCRIPT_PATH")
        if os.path.exists(os.path.join(
                script_path, "alembic.ini")) and os.path.exists(
                    os.path.join(script_path, "migrations")):
            self.log(0, "Executing core database migrations...")
            self.log(2, "Loading alembic configuration...")
            core_config = Config(os.path.join(script_path, "alembic.ini"))
            core_config.set_main_option(
                "script_location", os.path.join(script_path, "migrations"))
            core_config.set_main_option("version_table", "db_version_for_core")
            core_config.set_main_option(
                "sqlalchemy.url",
                self.config.get_temp_value("database/db_string"))

            self.log(2, "Running migrations...")
            conn = self.__database.get_database_connection()
            with conn.begin() as connection:
                command.upgrade(core_config, "head")
示例#32
0
    def setup_migration_version_control(self):
        self.reset_alembic_output()
        alembic_config = AlembicConfig()
        alembic_config.set_main_option("script_location",
                                       self.migrate_repository)
        alembic_config.set_main_option("sqlalchemy.url",
                                       str(self.metadata.bind.url))
        try:
            sqlalchemy_migrate_version = self.metadata.bind.execute(
                u'select version from migrate_version').scalar()
        except ProgrammingError:
            sqlalchemy_migrate_version = 0

        # this value is used for graceful upgrade from
        # sqlalchemy-migrate to alembic
        alembic_config.set_main_option("sqlalchemy_migrate_version",
                                       str(sqlalchemy_migrate_version))
        # This is an interceptor for alembic output. Otherwise,
        # everything will be printed to stdout
        alembic_config.print_stdout = self.add_alembic_output

        self.alembic_config = alembic_config
示例#33
0
    def init_environment_context(cls, conf):
        file_template = conf.get('alembic.file_template',
                                 '%%(day).3d-%%(rev)s-%%(slug)s')
        script_location = conf.get('alembic.script_location',
                                   'mediacore:migrations')
        version_table = conf.get('alembic.version_table', 'alembic_migrations')

        alembic_cfg = Config(ini_section='main')
        alembic_cfg.set_main_option('script_location', script_location)
        alembic_cfg.set_main_option('sqlalchemy.url', conf['sqlalchemy.url'])
        # TODO: add other sqlalchemy options
        alembic_cfg.set_main_option('file_template', file_template)

        script = ScriptDirectory.from_config(alembic_cfg)

        def upgrade(current_db_revision, context):
            return script._upgrade_revs('head', current_db_revision)

        table_name = prefix_table_name(conf, table_name=version_table)
        return EnvironmentContext(alembic_cfg,
                                  script,
                                  fn=upgrade,
                                  version_table=table_name)
示例#34
0
def test_offline(capsys):
    """Test offline generation with alembic-offline helpers used."""
    config = Config()
    config.set_main_option("script_location", "tests:migrations")
    config.set_main_option("sqlalchemy.url", "sqlite:///")
    config.set_main_option("phases", "before-deploy after-deploy final")

    upgrade(config, revision='1', sql=True)
    resout, reserr = capsys.readouterr()
    assert resout == """
CREATE TABLE alembic_version (
    version_num VARCHAR(32) NOT NULL
);

-- Running upgrade  -> 1

-- PHASE::before-deploy::;

CREATE TABLE account (
    id INTEGER NOT NULL,{space}
    name VARCHAR(50) NOT NULL,{space}
    description NVARCHAR(200),{space}
    timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP,{space}
    PRIMARY KEY (id)
);

-- PHASE::after-deploy::;

update account set name='some';

-- PHASE::final::;

-- SCRIPT::scripts/script.py::;

INSERT INTO alembic_version (version_num) VALUES ('1');

""".format(space=' ').lstrip()
示例#35
0
import sys, os
from sqlalchemy_utils import database_exists, create_database, drop_database
from alembic.config import Config
from alembic import command
from server.database import engine, reset_db

if __name__ == "__main__":
    # a simple flag to skip DB creation
    skip_db_creation = len(sys.argv) > 1 and sys.argv[1] == "--skip-db-creation"

    print(f"database: {engine.url}")

    if skip_db_creation:
        print("skipping DB drop/create ...")
    else:
        if database_exists(engine.url):
            print("dropping database…")
            drop_database(engine.url)

        print("creating database…")
        create_database(engine.url)

    print("resetting tables…")
    reset_db()

    print("stamping latest migration revision...")
    # Following recipe: https://alembic.sqlalchemy.org/en/latest/cookbook.html#building-an-up-to-date-database-from-scratch
    alembic_cfg = Config(os.path.join(os.path.dirname(__file__), "../alembic.ini"))
    alembic_cfg.set_main_option("sqlalchemy.url", str(engine.url))
    command.stamp(alembic_cfg, "head")
示例#36
0
from alembic.config import Config

from flask_app.src.settings import BASE_DIR, DatabaseSettings

alembic_cfg = Config()
alembic_cfg.set_main_option("script_location",
                            f"{BASE_DIR}/src/database/migration")
alembic_cfg.set_main_option("sqlalchemy.url", DatabaseSettings.DATABASE_URI)
示例#37
0
def initialize(settings, options):
    engine = engine_from_config(settings, 'sqlalchemy.')

    config = Configurator(settings=settings)
    pyramid_dogpile_cache.includeme(config)

    from gengine.metadata import (
        init_session,
        init_declarative_base,
        init_db
    )
    init_caches()
    init_session()
    init_declarative_base()
    init_db(engine)

    from gengine.metadata import (
        Base,
        DBSession
    )

    if options.get("reset_db", False):
        Base.metadata.drop_all(engine)
        engine.execute("DROP SCHEMA IF EXISTS public CASCADE")

    engine.execute("CREATE SCHEMA IF NOT EXISTS public")

    from alembic.config import Config
    from alembic import command
    from alembic.runtime.migration import MigrationContext

    alembic_cfg = Config(attributes={
        'engine': engine,
        'schema': 'public'
    })
    script_location = os.path.join(
        os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))),
        'app/alembic'
    )
    alembic_cfg.set_main_option("script_location", script_location)

    context = MigrationContext.configure(engine.connect())
    current_rev = context.get_current_revision()

    if not current_rev:
        # init
        from gengine.app import model

        tables = [t for name, t in model.__dict__.items() if isinstance(t, Table)]
        Base.metadata.create_all(engine, tables=tables)

        command.stamp(alembic_cfg, "head")

        if options.get("populate_demo", False):
            populate_demo(DBSession)

        admin_user = options.get("admin_user", False)
        admin_password = options.get("admin_password", False)

        if admin_user and admin_password:
            create_user(DBSession=DBSession, user=admin_user, password=admin_password)
    else:
        # upgrade
        command.upgrade(alembic_cfg, 'head')

    engine.dispose()
from whyattend import model
from whyattend import config

model.init_db()

from alembic.config import Config
from alembic import command
alembic_cfg = Config("alembic.ini")
alembic_cfg.set_main_option("sqlalchemy.url", config.DATABASE_URI)
command.stamp(alembic_cfg, "head")
示例#39
0
import contextlib
from os import path
from glob import glob

from alembic.config import Config
from alembic import command
from alembic.script import ScriptDirectory

alembic_cfg = Config()
scripts_folder = path.dirname((path.abspath(__file__)))
alembic_cfg.set_main_option("script_location", scripts_folder)
script = ScriptDirectory.from_config(alembic_cfg)


def alembic_table_exists():
    from connectors.db_connection import get_db_session_scope

    with get_db_session_scope() as session:
        return session.execute(
            "SELECT to_regclass('alembic_version');").first()[0]


def get_current_revision_in_db():
    from connectors.db_connection import get_db_session_scope

    if not alembic_table_exists():
        return None
    with get_db_session_scope() as session:
        res = session.execute("""select version_num from alembic_version""")
        res = res.first()
        if res:
示例#40
0
def with_migrations(request, base_app):
    settings = base_app.registry.settings
    alembic_cfg = Config()
    alembic_cfg.set_main_option("script_location",
                                "ziggurat_foundations:migrations")
    alembic_cfg.set_main_option("sqlalchemy.url", settings["sqlalchemy.url"])
    command.upgrade(alembic_cfg, "head")
    alembic_cfg = Config()
    alembic_cfg.set_main_option("script_location", "appenlight:migrations")
    alembic_cfg.set_main_option("sqlalchemy.url", settings["sqlalchemy.url"])
    command.upgrade(alembic_cfg, "head")

    for plugin_name, config in base_app.registry.appenlight_plugins.items():
        if config['sqlalchemy_migrations']:
            alembic_cfg = Config()
            alembic_cfg.set_main_option("script_location",
                                        config['sqlalchemy_migrations'])
            alembic_cfg.set_main_option(
                "sqlalchemy.url",
                base_app.registry.settings["sqlalchemy.url"])
            command.upgrade(alembic_cfg, "head")
示例#41
0
    def main(self):

        database_name = self.config.database_name
        if not database_name:
            self.config.logger.error(
                '"database_name" cannot be an empty string'
            )
            return 1

        # superuser credentials for overall database
        superuser_pg_url = self.create_connection_url(
            'postgres',
            self.config.database_superusername,
            self.config.database_superuserpassword
        )

        # superuser credentials for working database
        superuser_normaldb_pg_url = self.create_connection_url(
            database_name,
            self.config.database_superusername,
            self.config.database_superuserpassword
        )

        # normal user credentials
        normal_user_pg_url = self.create_connection_url(
            database_name,
            self.config.database_username,
            self.config.database_password
        )

        # ensure that if on Heroku the the normal_user_pg_url and the
        # superuser_pg_url are the same
        if self.config.on_heroku and (normal_user_pg_url != superuser_pg_url):
            self.config.logger.error(
                'there is no superuser (%s) when using Heroku',
                self.config.database_superusername
            )
            return 1

        # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
        # table logging section
        # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
        if self.config.unlogged:
            @compiles(CreateTable)
            def create_table(element, compiler, **kw):
                text = compiler.visit_create_table(element, **kw)
                text = re.sub("^\sCREATE(.*TABLE)",
                              lambda m: "CREATE UNLOGGED %s" %
                              m.group(1), text)
                return text

        # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
        # Postgres version check section
        # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
        self.config.logger.info(
            'Postgres version check section with %s',
            superuser_pg_url
        )
        with PostgreSQLAlchemyManager(
            superuser_pg_url,
            self.config.logger,
            autocommit=False
        ) as db:
            if not db.min_ver_check(90200):
                self.config.logger.error(
                    'unrecognized PostgreSQL version: %s',
                    db.version_string()
                )
                self.config.logger.error('Only 9.2+ is supported at this time')
                return 1

        # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
        # drop database section
        # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
        # We can only do the following if the DB is not Heroku
        # XXX Might add the heroku commands for resetting a DB here
        if self.config.dropdb and not self.config.on_heroku:
            self.config.logger.info(
                'drop database section with %s',
                superuser_pg_url
            )
            with PostgreSQLAlchemyManager(
                superuser_pg_url,
                self.config.logger,
                autocommit=False
            ) as db:
                if 'test' not in database_name and not self.config.force:
                    confirm = raw_input(
                        'drop database %s [y/N]: ' % database_name)
                    if not confirm == "y":
                        self.config.logger.warn('NOT dropping table')
                        return 2
                db.drop_database(database_name)
                db.commit()

        # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
        # create database section
        # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
        if self.config.createdb:
            self.config.logger.info(
                'create database section with %s',
                superuser_pg_url
            )
            with PostgreSQLAlchemyManager(
                superuser_pg_url,
                self.config.logger,
                autocommit=False
            ) as db:
                db.create_database(database_name)
                if self.config.no_roles:
                    self.config.logger.info("Skipping role creation")
                else:
                    db.create_roles(self.config)
                db.commit()

        # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
        # database extensions section
        # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
        self.config.logger.info(
            'database extensions section with %s',
            superuser_normaldb_pg_url
        )
        with PostgreSQLAlchemyManager(
            superuser_normaldb_pg_url,
            self.config.logger,
            autocommit=False,
            on_heroku=self.config.on_heroku
        ) as db:
            db.setup_extensions()
            db.grant_public_schema_ownership(self.config.database_username)
            db.commit()

        # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
        # database schema section
        # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
        if self.config.no_schema:
            self.config.logger.info("not adding a schema")
            return 0

        alembic_cfg = Config(self.config.alembic_config)
        alembic_cfg.set_main_option('sqlalchemy.url', normal_user_pg_url)

        self.config.logger.info(
            'database schema section with %s',
            normal_user_pg_url
        )
        with PostgreSQLAlchemyManager(
            normal_user_pg_url,
            self.config.logger,
            autocommit=False,
            on_heroku=self.config.on_heroku
        ) as db:
            # Order matters below
            db.turn_function_body_checks_off()
            db.load_raw_sql('types')
            db.load_raw_sql('procs')
            # We need to commit to make a type visible for table creation
            db.commit()

            db.create_tables()
            db.load_raw_sql('views')
            db.commit()

            if not self.config.get('no_staticdata'):
                self.import_staticdata(db)
            if self.config['fakedata']:
                self.generate_fakedata(db, self.config['fakedata_days'])
            db.commit()
            command.stamp(alembic_cfg, "heads")
            db.session.close()

        # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
        # database owner section
        # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
        self.config.logger.info(
            'database extensions section with %s',
            superuser_normaldb_pg_url
        )
        with PostgreSQLAlchemyManager(
            superuser_normaldb_pg_url,
            self.config.logger,
            autocommit=False,
            on_heroku=self.config.on_heroku
        ) as db:
            db.set_table_owner(self.config.database_username)
            db.set_default_owner(database_name, self.config.database_username)
            db.set_grants(self.config)  # config has user lists

        return 0
示例#42
0
from alembic.config import Config

from weather_forecast.settings import BASE_DIR, DATABASE_URI

alembic_cfg = Config()
alembic_cfg.set_main_option("script_location",
                            f"{BASE_DIR}/weather_forecast/database/migration")
alembic_cfg.set_main_option("sqlalchemy.url", DATABASE_URI)
示例#43
0
    def handle_createdb(self, superuser_normaldb_pg_url, superuser_pg_url,
                        normal_user_pg_url, database_name):
        """Handles creating the database and populating it with critical stuff

        :arg string superuser_normaldb_pg_url: super creds for working db
        :arg string superuser_pg_url: superuser creds for overall db
        :arg string normal_user_pg_url: normal user creds
        :arg string database_name: the name of the database to create

        :returns: True if everything worked

        """
        self.config.logger.info('create database section with %s',
                                superuser_pg_url)
        with PostgreSQLAlchemyManager(superuser_pg_url,
                                      self.config.logger,
                                      autocommit=False) as db:
            db.create_database(database_name)
            if self.config.no_roles:
                self.config.logger.info("Skipping role creation")
            else:
                db.create_roles(self.config)
            db.commit()

        # database extensions section
        self.config.logger.info('database extensions section with %s',
                                superuser_normaldb_pg_url)
        with PostgreSQLAlchemyManager(superuser_normaldb_pg_url,
                                      self.config.logger,
                                      autocommit=False) as db:
            db.setup_extensions()
            db.grant_public_schema_ownership(self.config.database_username)
            db.commit()

        # database schema section
        if self.config.no_schema:
            self.config.logger.info("not adding a schema")
            return True

        alembic_cfg = Config(self.config.alembic_config)
        alembic_cfg.set_main_option('sqlalchemy.url', normal_user_pg_url)

        self.config.logger.info('database schema section with %s',
                                normal_user_pg_url)
        with PostgreSQLAlchemyManager(normal_user_pg_url,
                                      self.config.logger,
                                      autocommit=False) as db:
            # Order matters below
            db.turn_function_body_checks_off()
            db.load_raw_sql('types')
            db.load_raw_sql('procs')
            # We need to commit to make a type visible for table creation
            db.commit()

            db.create_tables()
            db.commit()

            if not self.config.get('no_staticdata'):
                self.import_staticdata(db)
            db.commit()
            command.stamp(alembic_cfg, "heads")
            db.session.close()

        # database owner section
        self.config.logger.info('database extensions section with %s',
                                superuser_normaldb_pg_url)
        with PostgreSQLAlchemyManager(superuser_normaldb_pg_url,
                                      self.config.logger,
                                      autocommit=False) as db:
            db.set_table_owner(self.config.database_username)
            db.set_default_owner(database_name, self.config.database_username)
            db.set_grants(self.config)  # config has user lists
        return True
示例#44
0
from alembic import command
from alembic.config import Config
import pytest

from patients.dbmodels.database import db_engine
from patients.dbmodels.models import Base

alembic_cfg = Config()
alembic_cfg.set_main_option('script_location', 'alembic')


@pytest.fixture(autouse=True)
def flush_db():
    db_engine.execute('drop schema if exists public cascade')
    db_engine.execute('create schema public')
    yield
    db_engine.execute('drop schema public cascade')


def test_upgrade():
    command.upgrade(alembic_cfg, 'head')


def test_upgrade_downgrade_upgrade():
    command.upgrade(alembic_cfg, 'head')
    command.downgrade(alembic_cfg, '-1')
    command.upgrade(alembic_cfg, 'head')


def test_create():
    Base.metadata.create_all(db_engine)
示例#45
0
def _get_config():
    config = Config('alembic.ini')
    config.set_main_option('script_location', 'migrations')
    return config
示例#46
0
def _get_config(directory=None):
    directory = directory or os.path.dirname(__file__)
    config = Config(os.path.join(directory, 'alembic.ini'))
    config.set_main_option('script_location', directory)
    return config
示例#47
0
class Alembic(object):
    def __init__(self, core_lib_path: str, core_lib_config: DictConfig):
        self.config = core_lib_config.core_lib.alembic
        self.alembic_cfg = Config()

        self.config['sqlalchemy.url'] = build_url(
            **core_lib_config.core_lib.data.sqlalchemy.url)

        self.__engine = create_engine(
            self.config['sqlalchemy.url'],
            echo=core_lib_config.core_lib.data.sqlalchemy.log_queries)

        self.script_location = None
        if self.config.script_location:
            if not os.path.isdir(
                    self.config.script_location) is not os.path.isabs(
                        self.config.script_location):
                self.script_location = os.path.normpath(
                    os.path.join(core_lib_path, self.config.script_location))

        if not self.script_location or not os.path.isdir(self.script_location):
            raise ValueError(
                "config.alembic.script_location dose not exists `{}`".format(
                    self.script_location))

        if not self.config.version_file_name:
            raise ValueError("config.alembic.version_file_name cannot be None")

        for key, value in self.config.items():
            if isinstance(value, str):
                self.alembic_cfg.set_main_option(key, value)
        self.alembic_cfg.set_main_option("script_location",
                                         self.script_location)

    def __migrate_to_revision(self, update_rev: str, migrate_up: bool = True):
        def downgrade(rev, context):
            return context.script._downgrade_revs(update_rev, rev)

        def upgrade(rev, context):
            return context.script._upgrade_revs(update_rev, rev)

        fn = upgrade
        if not migrate_up:
            fn = downgrade

        self.__run_migration_callback(fn)

    def __run_migration_callback(self, callback):
        with self.__engine.begin() as connection:
            script = ScriptDirectory.from_config(self.alembic_cfg)

            from alembic.runtime.environment import EnvironmentContext
            with EnvironmentContext(self.alembic_cfg, script,
                                    fn=callback) as context:
                context.configure(version_table=self.config.version_table,
                                  connection=connection)
                with context.begin_transaction():
                    context.run_migrations()

    def upgrade(self, revision: str = "head"):
        self.__migrate_to_revision(revision, True)

    def downgrade(self, revision: str = "base"):
        self.__migrate_to_revision(revision, False)

    def create_migration(self, migration_name):
        if not migration_name:
            raise ValueError("Migration name must be set")

        version = self._read_version()
        new_version = version + 1
        command.revision(self.alembic_cfg,
                         message=migration_name,
                         rev_id=str(new_version))
        self._write_version(new_version)

    def _read_version(self):
        script = ScriptDirectory.from_config(self.alembic_cfg)
        count = 0
        for _ in script.walk_revisions():
            count = count + 1
        return count

    def _write_version(self, version):
        with open(
                os.path.join(self.script_location,
                             self.config.version_file_name), 'w') as file:
            file.write(str(version))
示例#48
0
 def setup_alembic_config(self):
     alembic_cfg = AlembicCfg(self.ALEMBIC_INI)
     alembic_cfg.set_main_option("sqlalchemy.url", self.DATABASE_URI)
     return alembic_cfg
示例#49
0
 def _make_config(location: str) -> Config:
     cfg = Config()
     cfg.set_main_option("script_location", location)
     cfg.set_main_option("sqlalchemy.url", get_settings()["sqlalchemy.url"])
     return cfg
示例#50
0
def get_alembic_config(alembic_dir):
    config = Config(os.path.join(alembic_dir, '../../../../alembic.ini'))
    config.set_main_option('script_location', alembic_dir)
    config.set_main_option('pyramid_config_file', config_file_path)
    return config
示例#51
0
def get_db_migration_config():
    ini_path = os.path.join(PACKAGE_DIR, 'alembic.ini')
    config = Config(ini_path)
    config.set_main_option("script_location",
                           os.path.join(PACKAGE_DIR, 'migration'))
    return config
class Settings(BaseSettings):
    """Settings class."""
    def __init__(self):
        """Initialize Settings instance."""
        self._settings = lya.AttrDict.from_yaml(settings_default.as_posix())
        if settings_local.exists():
            self._settings.update_yaml(settings_local.as_posix())


#: step 1 - create settings object
settings = Settings()

#: step 2 - configure alembic
alembic_cfg = Config()
alembic_cfg.set_main_option('script_location',
                            Path(PROJECT_DIR, 'db', 'migrations').as_posix())
alembic_cfg.set_main_option('url', settings.db.master.dsn)


def configure_logging():
    """Configure logging."""
    if 'logging' not in settings:
        warnings.warn(
            'Logging is not configured. Add logging setting to fix it.')
        return

    if 'logging_dir' in settings and settings.logging_dir:
        logging_dir = settings.logging_dir
        for _, handler in settings.logging.handlers.items():
            if 'filename' not in handler:
                continue
示例#53
0
class initialize(object):
    def __init__(self):
        logging.debug("Executing database.__init__()")

        self.mysql_conn = None
        self.mysql_cursor = None
        self.debugLogLevel = False

        if logging.root.level == 10:  # DEBUG
            self.debugLogLevel = True

        try:
            DBImport_Home = os.environ['DBIMPORT_HOME']
        except KeyError:
            print(
                "Error: System Environment Variable DBIMPORT_HOME is not set")
            #			self.remove_temporary_files()
            sys.exit(1)

        # Fetch configuration about MySQL database and how to connect to it
        self.configHostname = configuration.get("Database", "mysql_hostname")
        self.configPort = configuration.get("Database", "mysql_port")
        self.configDatabase = configuration.get("Database", "mysql_database")
        self.configUsername = configuration.get("Database", "mysql_username")
        self.configPassword = configuration.get("Database", "mysql_password")

        # Esablish a SQLAlchemy connection to the DBImport database
        #		try:
        self.connectStr = "mysql+pymysql://%s:%s@%s:%s/%s" % (
            self.configUsername, self.configPassword, self.configHostname,
            self.configPort, self.configDatabase)

        try:
            self.configDB = sa.create_engine(self.connectStr,
                                             echo=self.debugLogLevel)
            self.configDB.connect()
            self.configDBSession = sessionmaker(bind=self.configDB)

        except sa.exc.OperationalError as err:
            logging.error("%s" % err)
            sys.exit(1)
        except:
            print("Unexpected error: ")
            print(sys.exc_info())
            sys.exit(1)

        # Setup configuration for Alembic
        self.alembicSchemaDir = DBImport_Home + '/bin/SchemaUpgrade'
        self.alembicConfig = Config()
        self.alembicConfig.set_main_option('script_location',
                                           self.alembicSchemaDir)
        self.alembicConfig.set_main_option('sqlalchemy.url', self.connectStr)

        # Esablish a connection to the DBImport database in MySQL
        try:
            self.mysql_conn = mysql.connector.connect(
                host=self.configHostname,
                port=self.configPort,
                database=self.configDatabase,
                user=self.configUsername,
                password=self.configPassword)
        except mysql.connector.errors.ProgrammingError as err:
            logging.error("%s" % err)
            #			self.remove_temporary_files()
            sys.exit(1)
        except mysql.connector.Error as err:
            if err.errno == errorcode.ER_ACCESS_DENIED_ERROR:
                logging.error(
                    "Something is wrong with your user name or password")
            elif err.errno == errorcode.ER_BAD_DB_ERROR:
                logging.error("Database does not exist")
            else:
                logging.error("%s" % err)
            logging.error(
                "Error: There was a problem connecting to the MySQL database. Please check configuration and serverstatus and try again"
            )
            #			self.remove_temporary_files()
            sys.exit(1)
        else:
            self.mysql_cursor = self.mysql_conn.cursor(buffered=False)

        logging.debug("Executing database.__init__() - Finished")

    def generateSchemaRst(self):

        query = "select max(length(c.column_name)), max(length(c.column_comment)) "
        query += "from information_schema.columns c "
        query += "left join information_schema.tables t "
        query += "   on c.table_schema = t.table_schema and c.table_name = t.table_name "
        query += "where c.table_schema = 'DBImport' "

        logging.debug("SQL Statement executed: %s" % (query))
        self.mysql_cursor.execute(query)

        row = self.mysql_cursor.fetchone()
        maxColumnLength = row[0]
        maxDescLength = row[1]

        lineSingle = "+"
        lineDouble = "+"
        lineHeaderColumn = "Column"
        lineHeaderDesc = "Documentation"

        for i in range(maxColumnLength + 2):
            lineSingle += "-"
            lineDouble += "="
        lineSingle += "+"
        lineDouble += "+"
        for i in range(maxDescLength + 2):
            lineSingle += "-"
            lineDouble += "="
        lineSingle += "+"
        lineDouble += "+"

        lineHeader = "| %s" % (lineHeaderColumn)
        for i in range(maxColumnLength + 1 - len(lineHeaderColumn)):
            lineHeader += " "
        lineHeader += "| %s" % (lineHeaderDesc)
        for i in range(maxDescLength + 1 - len(lineHeaderDesc)):
            lineHeader += " "
        lineHeader += "|"

        print("Database Tables")
        print("===============")
        print("")

        print(
            "As this version of DBImport dont have the web admin tool available, the documentation will be agains each column in the configuration database tables. The admin tool will later use the same fields so whats said in here will later be applicable on the admin tool aswell."
        )
        print("")

        query = "select "
        query += "   c.table_name, "
        query += "   t.table_comment, "
        query += "   c.column_name, "
        query += "   c.column_comment "
        query += "from information_schema.columns c "
        query += "left join information_schema.tables t "
        query += "   on c.table_schema = t.table_schema and c.table_name = t.table_name "
        query += "where c.table_schema = 'DBImport' "
        #		query += "   and c.table_name not like 'airflow%' "
        #		query += "   and c.table_name != 'etl_jobs' "
        query += "   and c.table_name != 'auto_discovered_tables' "
        query += "   and c.table_name != 'airflow_dag_triggers' "
        query += "   and c.table_name != 'airflow_import_dag_execution' "
        query += "   and c.table_name != 'airflow_import_task_execution' "
        query += "   and c.table_name != 'airflow_execution_type' "
        query += "   and c.table_name != 'airflow_dag_sensors' "
        query += "   and c.table_name != 'alembic_version' "
        query += "order by c.table_name, c.ordinal_position "

        logging.debug("SQL Statement executed: %s" % (query))
        self.mysql_cursor.execute(query)

        previousTable = ""
        for row in self.mysql_cursor.fetchall():
            tableName = row[0]
            tableComment = row[1]
            columnName = row[2]
            columnComment = row[3]
            if previousTable != tableName:
                previousTable = tableName
                print(" ")
                print(" ")
                print(" ")
                print("Table - %s" % (tableName))
                line = ""
                for i in range(len(tableName) + 8):
                    line += "-"
                print(line)
                print("")
                if tableComment != None:
                    print(tableComment)
                print("")
                print(lineSingle)
                print(lineHeader)
                print(lineDouble)

            line = "| %s" % (columnName)
            for i in range(maxColumnLength + 1 - len(columnName)):
                line += " "
            line += "| %s" % (columnComment)
            for i in range(maxDescLength + 1 - len(columnComment)):
                line += " "
            line += "|"
            print(line)
            print(lineSingle)

    def getListOfConfigTables(self):
        logging.debug("Executing common_config.getListOfConfigTables()")

        query = "select "
        query += "   table_name "
        query += "from information_schema.tables  "
        query += "where table_schema = %s "
        query += "order by table_name "

        logging.debug("SQL Statement executed: %s" % (query))
        self.mysql_cursor.execute(query, (self.configDatabase, ))

        tablesDF = pd.DataFrame(self.mysql_cursor.fetchall())

        if len(tablesDF) > 0:
            tablesDF.columns = ['table']

        logging.debug(
            "Executing common_config.getListOfConfigTables() - Finished")
        return tablesDF

    def createDB(self, createOnlyTable=None):

        inspector = sa.inspect(self.configDB)
        allTables = inspector.get_table_names()
        if "alembic_version" in allTables:
            print(
                "DBImport configuration database is already created. If you are deploying a new version, please run --upgradeDB"
            )
            return

        alembicCommand.upgrade(self.alembicConfig, 'head')
        self.createDefaultEnvironmentTypes()
        self.updateConfigurationValues()

        print("DBImport configuration database is created successfully")

    def upgradeDB(self):

        print("Upgrading database to latest level")
        alembicCommand.upgrade(self.alembicConfig, 'head')
        self.createDefaultEnvironmentTypes()
        self.updateConfigurationValues()

    def createDefaultEnvironmentTypes(self):
        session = self.configDBSession()

        numberOfEnvironemntTypes = session.query(
            configSchema.jdbcConnectionsEnvironments.__table__).count()
        if numberOfEnvironemntTypes == 0:
            objectsToAdd = [
                configSchema.jdbcConnectionsEnvironments(
                    environment='Production'),
                configSchema.jdbcConnectionsEnvironments(
                    environment='Integration Test'),
                configSchema.jdbcConnectionsEnvironments(
                    environment='System Test'),
                configSchema.jdbcConnectionsEnvironments(
                    environment='Development'),
                configSchema.jdbcConnectionsEnvironments(environment='Sandbox')
            ]
            session.add_all(objectsToAdd)
            session.commit()

    def updateConfigurationValues(self):

        query = sa.select([configSchema.jdbcConnectionsDrivers.database_type])
        result_df = pd.DataFrame(self.configDB.execute(query).fetchall())

        if result_df.empty or (result_df[0] == 'DB2 AS400').any() == False:
            query = sa.insert(configSchema.jdbcConnectionsDrivers).values(
                database_type='DB2 AS400',
                version='default',
                driver='com.ibm.as400.access.AS400JDBCDriver',
                classpath='add path to JAR file')
            self.configDB.execute(query)

        if result_df.empty or (result_df[0] == 'DB2 UDB').any() == False:
            query = sa.insert(configSchema.jdbcConnectionsDrivers).values(
                database_type='DB2 UDB',
                version='default',
                driver='com.ibm.db2.jcc.DB2Driver',
                classpath='add path to JAR file')
            self.configDB.execute(query)

        if result_df.empty or (result_df[0] == 'MySQL').any() == False:
            query = sa.insert(configSchema.jdbcConnectionsDrivers).values(
                database_type='MySQL',
                version='default',
                driver='com.mysql.jdbc.Driver',
                classpath='add path to JAR file')
            self.configDB.execute(query)

        if result_df.empty or (result_df[0] == 'Oracle').any() == False:
            query = sa.insert(configSchema.jdbcConnectionsDrivers).values(
                database_type='Oracle',
                version='default',
                driver='oracle.jdbc.driver.OracleDriver',
                classpath='add path to JAR file')
            self.configDB.execute(query)

        if result_df.empty or (result_df[0] == 'PostgreSQL').any() == False:
            query = sa.insert(configSchema.jdbcConnectionsDrivers).values(
                database_type='PostgreSQL',
                version='default',
                driver='org.postgresql.Driver',
                classpath='add path to JAR file')
            self.configDB.execute(query)

        if result_df.empty or (result_df[0] == 'Progress DB').any() == False:
            query = sa.insert(configSchema.jdbcConnectionsDrivers).values(
                database_type='Progress DB',
                version='default',
                driver='com.ddtek.jdbc.openedge.OpenEdgeDriver',
                classpath='add path to JAR file')
            self.configDB.execute(query)

        if result_df.empty or (result_df[0] == 'SQL Server').any() == False:
            query = sa.insert(configSchema.jdbcConnectionsDrivers).values(
                database_type='SQL Server',
                version='default',
                driver='com.microsoft.sqlserver.jdbc.SQLServerDriver',
                classpath='add path to JAR file')
            self.configDB.execute(query)

            query = sa.insert(configSchema.jdbcConnectionsDrivers).values(
                database_type='SQL Server',
                version='jTDS',
                driver='net.sourceforge.jtds.jdbc.Driver',
                classpath='add path to JAR file')
            self.configDB.execute(query)

        if result_df.empty or (result_df[0] == 'CacheDB').any() == False:
            query = sa.insert(configSchema.jdbcConnectionsDrivers).values(
                database_type='CacheDB',
                version='default',
                driver='com.intersys.jdbc.CacheDriver',
                classpath='add path to JAR file')
            self.configDB.execute(query)

        query = sa.select([configSchema.configuration.configKey])
        result_df = pd.DataFrame(self.configDB.execute(query).fetchall())

        if result_df.empty or (result_df[0]
                               == 'airflow_disable').any() == False:
            query = sa.insert(configSchema.configuration).values(
                configKey='airflow_disable',
                valueInt='0',
                description=
                'Disable All executions from Airflow. This is what the \"start\" Task is looking at'
            )
            self.configDB.execute(query)

        if result_df.empty or (result_df[0]
                               == 'export_stage_disable').any() == False:
            query = sa.insert(configSchema.configuration).values(
                configKey='export_stage_disable',
                valueInt='0',
                description=
                'With 1, you prevent new Export tasks from starting and running tasks will stop after the current stage is completed.'
            )
            self.configDB.execute(query)

        if result_df.empty or (result_df[0]
                               == 'export_staging_database').any() == False:
            query = sa.insert(configSchema.configuration).values(
                configKey='export_staging_database',
                valueStr='etl_export_staging',
                description='Name of staging database to use during Exports')
            self.configDB.execute(query)

        if result_df.empty or (result_df[0]
                               == 'export_start_disable').any() == False:
            query = sa.insert(configSchema.configuration).values(
                configKey='export_start_disable',
                valueInt='0',
                description=
                'With 1, you prevent new Export tasks from starting. Running tasks will be completed'
            )
            self.configDB.execute(query)

        if result_df.empty or (result_df[0]
                               == 'import_stage_disable').any() == False:
            query = sa.insert(configSchema.configuration).values(
                configKey='import_stage_disable',
                valueInt='0',
                description=
                'With 1, you prevent new tasks from starting and running Import tasks will stop after the current stage is completed.'
            )
            self.configDB.execute(query)

        if result_df.empty or (result_df[0]
                               == 'import_staging_database').any() == False:
            query = sa.insert(configSchema.configuration).values(
                configKey='import_staging_database',
                valueStr='etl_import_staging',
                description='Name of staging database to use during Imports')
            self.configDB.execute(query)

        if result_df.empty or (result_df[0]
                               == 'import_start_disable').any() == False:
            query = sa.insert(configSchema.configuration).values(
                configKey='import_start_disable',
                valueInt='0',
                description=
                'With 1, you prevent new Import tasks from starting. Running tasks will be completed'
            )
            self.configDB.execute(query)

        if result_df.empty or (result_df[0]
                               == 'hive_remove_locks_by_force').any() == False:
            query = sa.insert(configSchema.configuration).values(
                configKey='hive_remove_locks_by_force',
                valueInt='0',
                description=
                'With 1, DBImport will remove Hive locks before import by force'
            )
            self.configDB.execute(query)

        if result_df.empty or (result_df[0] == 'hive_major_compact_after_merge'
                               ).any() == False:
            query = sa.insert(configSchema.configuration).values(
                configKey='hive_major_compact_after_merge',
                valueInt='0',
                description=
                'With 1, DBImport will run a major compaction after the merge operations is completed'
            )
            self.configDB.execute(query)

        if result_df.empty or (result_df[0] == 'hive_validate_before_execution'
                               ).any() == False:
            query = sa.insert(configSchema.configuration).values(
                configKey='hive_validate_before_execution',
                valueInt='1',
                description=
                'With 1, DBImport will run a group by query agains the validate table and verify the result against reference values hardcoded in DBImport'
            )
            self.configDB.execute(query)

        if result_df.empty or (result_df[0]
                               == 'hive_validate_table').any() == False:
            query = sa.insert(configSchema.configuration).values(
                configKey='hive_validate_table',
                valueStr='dbimport.validate_table',
                description='The table to run the validate query against')
            self.configDB.execute(query)

        if result_df.empty or (result_df[0]
                               == 'hive_print_messages').any() == False:
            query = sa.insert(configSchema.configuration).values(
                configKey='hive_print_messages',
                valueInt='0',
                description=
                'With 1, Hive will print additional messages during SQL operations'
            )
            self.configDB.execute(query)

        if result_df.empty or (
                result_df[0] == 'airflow_dbimport_commandpath').any() == False:
            query = sa.insert(configSchema.configuration).values(
                configKey='airflow_dbimport_commandpath',
                valueStr='sudo -iu ${SUDO_USER} /usr/local/dbimport/',
                description=
                'This is the path to DBImport. If sudo is required, this can be added here aswell. Use the variable ${SUDO_USER} instead of hardcoding the sudo username. Must end with a /'
            )
            self.configDB.execute(query)

        if result_df.empty or (result_df[0]
                               == 'airflow_sudo_user').any() == False:
            query = sa.insert(configSchema.configuration).values(
                configKey='airflow_sudo_user',
                valueStr='dbimport',
                description=
                'What user will Airflow sudo to for executing DBImport. This value will replace the ${SUDO_USER} variable in airflow_dbimport_commandpath setting'
            )
            self.configDB.execute(query)

        if result_df.empty or (result_df[0]
                               == 'airflow_major_version').any() == False:
            query = sa.insert(configSchema.configuration).values(
                configKey='airflow_major_version',
                valueInt='2',
                description=
                'What is the major version of Airflow? 1 or 2 is valid options. Controls how the DAG files are generated'
            )
            self.configDB.execute(query)

        if result_df.empty or (result_df[0]
                               == 'airflow_dag_directory').any() == False:
            query = sa.insert(configSchema.configuration).values(
                configKey='airflow_dag_directory',
                valueStr='/usr/local/airflow/dags',
                description='Airflow path to DAG directory')
            self.configDB.execute(query)

        if result_df.empty or (result_df[0] == 'airflow_dag_staging_directory'
                               ).any() == False:
            query = sa.insert(configSchema.configuration).values(
                configKey='airflow_dag_staging_directory',
                valueStr='/usr/local/airflow/dags_generated_from_dbimport',
                description='Airflow path to staging DAG directory')
            self.configDB.execute(query)

        if result_df.empty or (result_df[0]
                               == 'airflow_dummy_task_queue').any() == False:
            query = sa.insert(configSchema.configuration).values(
                configKey='airflow_dummy_task_queue',
                valueStr='default',
                description=
                'Queue to use for dummy tasks (stop, stage_one_complete and more)'
            )
            self.configDB.execute(query)

        if result_df.empty or (
                result_df[0] == 'airflow_dag_file_permission').any() == False:
            query = sa.insert(configSchema.configuration).values(
                configKey='airflow_dag_file_permission',
                valueStr='660',
                description='File permission of created DAG file')
            self.configDB.execute(query)

        if result_df.empty or (result_df[0]
                               == 'airflow_dag_file_group').any() == False:
            query = sa.insert(configSchema.configuration).values(
                configKey='airflow_dag_file_group',
                valueStr='airflow',
                description='Group owner of created DAG file')
            self.configDB.execute(query)

        if result_df.empty or (result_df[0] == 'timezone').any() == False:
            query = sa.insert(configSchema.configuration).values(
                configKey='timezone',
                valueStr='UTC',
                description=
                'The timezone that the configured times are meant for (example is Europe/Stockholm) '
            )
            self.configDB.execute(query)

        if result_df.empty or (result_df[0] == 'cluster_name').any() == False:
            query = sa.insert(configSchema.configuration).values(
                configKey='cluster_name',
                valueStr='hadoopcluster',
                description='Name of Hadoop cluster')
            self.configDB.execute(query)

        if result_df.empty or (result_df[0] == 'hdfs_address').any() == False:
            query = sa.insert(configSchema.configuration).values(
                configKey='hdfs_address',
                valueStr='hdfs://hadoopcluster:8020',
                description='Address to HDFS')
            self.configDB.execute(query)

        if result_df.empty or (result_df[0]
                               == 'hdfs_blocksize').any() == False:
            query = sa.insert(configSchema.configuration).values(
                configKey='hdfs_blocksize',
                valueStr='134217728',
                description=
                'The HDFS blocksize in bytes. Can usually be found in /etc/hadoop/conf/hdfs-site.xml (search for dfs.blocksize)'
            )
            self.configDB.execute(query)

        if result_df.empty or (result_df[0] == 'hdfs_basedir').any() == False:
            query = sa.insert(configSchema.configuration).values(
                configKey='hdfs_basedir',
                valueStr='/apps/dbimport',
                description=
                'The base dir to write data to. Example /apps/dbimport')
            self.configDB.execute(query)

        if result_df.empty or (
                result_df[0] == 'sqoop_import_default_mappers').any() == False:
            query = sa.insert(configSchema.configuration).values(
                configKey='sqoop_import_default_mappers',
                valueInt='12',
                description=
                'How many mappers should be used for tables who have never been imported before'
            )
            self.configDB.execute(query)

        if result_df.empty or (result_df[0]
                               == 'sqoop_import_max_mappers').any() == False:
            query = sa.insert(configSchema.configuration).values(
                configKey='sqoop_import_max_mappers',
                valueInt='32',
                description=
                'The maximum number of mappers to use during imports')
            self.configDB.execute(query)

        if result_df.empty or (
                result_df[0] == 'sqoop_export_default_mappers').any() == False:
            query = sa.insert(configSchema.configuration).values(
                configKey='sqoop_export_default_mappers',
                valueInt='2',
                description=
                'How many mappers should be used for tables who have never been exported before'
            )
            self.configDB.execute(query)

        if result_df.empty or (result_df[0]
                               == 'sqoop_export_max_mappers').any() == False:
            query = sa.insert(configSchema.configuration).values(
                configKey='sqoop_export_max_mappers',
                valueInt='32',
                description=
                'The maximum number of mappers to use during exports')
            self.configDB.execute(query)

        if result_df.empty or (result_df[0] == 'spark_import_default_executors'
                               ).any() == False:
            query = sa.insert(configSchema.configuration).values(
                configKey='spark_import_default_executors',
                valueInt='12',
                description=
                'How many executors should be used for tables who have never been imported before'
            )
            self.configDB.execute(query)

        if result_df.empty or (result_df[0]
                               == 'spark_import_max_executors').any() == False:
            query = sa.insert(configSchema.configuration).values(
                configKey='spark_import_max_executors',
                valueInt='32',
                description=
                'The maximum number of executors to use during imports')
            self.configDB.execute(query)

        if result_df.empty or (result_df[0] == 'spark_export_default_executors'
                               ).any() == False:
            query = sa.insert(configSchema.configuration).values(
                configKey='spark_export_default_executors',
                valueInt='2',
                description=
                'How many executors should be used for tables who have never been exported before'
            )
            self.configDB.execute(query)

        if result_df.empty or (result_df[0]
                               == 'spark_export_max_executors').any() == False:
            query = sa.insert(configSchema.configuration).values(
                configKey='spark_export_max_executors',
                valueInt='32',
                description=
                'The maximum number of executors to use during exports')
            self.configDB.execute(query)

        if result_df.empty or (result_df[0]
                               == 'atlas_discovery_interval').any() == False:
            query = sa.insert(configSchema.configuration).values(
                configKey='atlas_discovery_interval',
                valueInt='24',
                description=
                'How many hours there should pass between each Atlas discovery of a jdbc connection'
            )
            self.configDB.execute(query)

        if result_df.empty or (result_df[0]
                               == 'import_process_empty').any() == False:
            query = sa.insert(configSchema.configuration).values(
                configKey='import_process_empty',
                valueInt='0',
                description=
                'If 1, then the import will do a full processing of import even if they contain no data.'
            )
            self.configDB.execute(query)

        if result_df.empty or (result_df[0]
                               == 'hive_insert_only_tables').any() == False:
            query = sa.insert(configSchema.configuration).values(
                configKey='hive_insert_only_tables',
                valueInt='1',
                description=
                'If 1, then the non-merge tables in Hive will be ACID insert-only'
            )
            self.configDB.execute(query)

        if result_df.empty or (result_df[0]
                               == 'hive_acid_with_clusteredby').any() == False:
            query = sa.insert(configSchema.configuration).values(
                configKey='hive_acid_with_clusteredby',
                valueInt='0',
                description=
                'If 1, then ACID tables will be created with a clustered by option based on the PK. Not required with Hive3 and later'
            )
            self.configDB.execute(query)

        if result_df.empty or (
                result_df[0] == 'post_airflow_dag_operations').any() == False:
            query = sa.insert(configSchema.configuration).values(
                configKey='post_airflow_dag_operations',
                valueInt='0',
                description=
                'Post start and stop activities for Airflow DAGs to Kafka and/or Rest, depending on what is enabled'
            )
            self.configDB.execute(query)

        if result_df.empty or (result_df[0]
                               == 'post_data_to_rest').any() == False:
            query = sa.insert(configSchema.configuration).values(
                configKey='post_data_to_rest',
                valueInt='0',
                description=
                'Enable the REST endpoint to be able to receive information regarding completed imports and exports'
            )
            self.configDB.execute(query)

        if result_df.empty or (result_df[0]
                               == 'post_data_to_rest_extended').any() == False:
            query = sa.insert(configSchema.configuration).values(
                configKey='post_data_to_rest_extended',
                valueInt='0',
                description='Enable extended statistics in the REST data')
            self.configDB.execute(query)

        if result_df.empty or (result_df[0]
                               == 'post_data_to_kafka').any() == False:
            query = sa.insert(configSchema.configuration).values(
                configKey='post_data_to_kafka',
                valueInt='0',
                description=
                'Enable the Kafka endpoint to be able to receive information regarding completed imports and exports'
            )
            self.configDB.execute(query)

        if result_df.empty or (
                result_df[0] == 'post_data_to_kafka_extended').any() == False:
            query = sa.insert(configSchema.configuration).values(
                configKey='post_data_to_kafka_extended',
                valueInt='0',
                description='Enable extended statistics in Kafka data')
            self.configDB.execute(query)

        if result_df.empty or (result_df[0] == 'kafka_brokers').any() == False:
            query = sa.insert(configSchema.configuration).values(
                configKey='kafka_brokers',
                valueStr='localhost:9092',
                description='Comma separeted list of Kafka brokers')
            self.configDB.execute(query)

        if result_df.empty or (result_df[0]
                               == 'kafka_trustcafile').any() == False:
            query = sa.insert(configSchema.configuration).values(
                configKey='kafka_trustcafile',
                valueStr='/etc/pki/tls/certs/ca-bundle.crt',
                description='Kafka CA Trust file for SSL')
            self.configDB.execute(query)

        if result_df.empty or (result_df[0]
                               == 'kafka_securityprotocol').any() == False:
            query = sa.insert(configSchema.configuration).values(
                configKey='kafka_securityprotocol',
                valueStr='SASL_SSL',
                description='Kafka Security Protocol')
            self.configDB.execute(query)

        if result_df.empty or (result_df[0]
                               == 'kafka_saslmechanism').any() == False:
            query = sa.insert(configSchema.configuration).values(
                configKey='kafka_saslmechanism',
                valueStr='GSSAPI',
                description='Kafka SASL mechanism')
            self.configDB.execute(query)

        if result_df.empty or (result_df[0] == 'kafka_topic').any() == False:
            query = sa.insert(configSchema.configuration).values(
                configKey='kafka_topic',
                valueStr='dbimport_topic',
                description='Kafka topic to send the data to')
            self.configDB.execute(query)

        if result_df.empty or (result_df[0] == 'rest_url').any() == False:
            query = sa.insert(configSchema.configuration).values(
                configKey='rest_url',
                valueStr='https://localhost:8443/dbimport',
                description='Rest server URL')
            self.configDB.execute(query)

        if result_df.empty or (result_df[0] == 'rest_timeout').any() == False:
            query = sa.insert(configSchema.configuration).values(
                configKey='rest_timeout',
                valueInt='5',
                description='Timeout for the REST call')
            self.configDB.execute(query)

        if result_df.empty or (result_df[0]
                               == 'rest_verifyssl').any() == False:
            query = sa.insert(configSchema.configuration).values(
                configKey='rest_verifyssl',
                valueInt='1',
                description='Verify SSL certificate during REST call')
            self.configDB.execute(query)

        if result_df.empty or (result_df[0]
                               == 'rest_trustcafile').any() == False:
            query = sa.insert(configSchema.configuration).values(
                configKey='rest_trustcafile',
                valueStr='/etc/pki/tls/certs/ca-bundle.crt',
                description='REST CA Trust file for SSL')
            self.configDB.execute(query)
示例#54
0
class DBUpdater(object):
    """
    Database update system
    """
    def __init__(self):
        """
        Constructor
        """
        self._config = Config()
        self._config.set_main_option("script_location", "alembic")

        self._script = ScriptDirectory.from_config(self._config)
        self._engine = create_engine(
            current_app.config.get('SQLALCHEMY_DATABASE_URI'))

    @property
    def needs_update(self):
        """Returns whether or not the component needs an update"""
        if self.current_revision != self.newest_revision:
            return True

        return False

    @property
    def current_revision(self):
        """Returns the current database revision"""
        return self._current_revision

    @property
    def newest_revision(self):
        """Returns the newest revision available"""
        return self._newest_revision

    @property
    def status(self):
        """Returns the component status"""
        return ''

    def refresh(self):
        """
        Refreshes the component status
        """
        self._open()

        self._current_revision = self._context.get_current_revision()
        self._newest_revision = self._script.get_current_head()

        self._close()

        return True

    def update(self):
        """
        Performs the update

        :returns: The update results
        """

        if self._current_revision != self._newest_revision:
            _log('DBUpdater: starting..')

            try:
                script_directory = ScriptDirectory.from_config(self._config)

                revision_list = []
                for script in script_directory.walk_revisions(
                        self._current_revision, self._newest_revision):
                    if script.revision != self._current_revision:
                        revision_list.append(script.revision)

                for rev in reversed(revision_list):
                    try:
                        _log('Applying database revision: {0}'.format(rev))
                        command.upgrade(self._config, rev)
                    except sqlalchemy.exc.OperationalError, err:
                        if 'already exists' in str(err):
                            _log(
                                'Table already exists.. stamping to revision.')
                            self._stamp_database(rev)

            except sqlalchemy.exc.OperationalError, err:
                _log('DBUpdater: failure - {0}'.format(err),
                     logLevel=logging.ERROR)

                return False

            _log('DBUpdater: success')

        return True
示例#55
0
 def config(self):
     from alembic.config import Config
     config = Config()
     config.set_main_option('script_location', str(self['path']))
     return config
示例#56
0
def make_migrations():
    alembic_config = AlembicConfig('alembic.ini')
    alembic_config.set_main_option('script_location', 'alembic')
    alembic_upgrade(alembic_config, 'head')
示例#57
0
文件: sqlite.py 项目: xunyou/dagobah
 def _get_alembic_config(self):
     config = Config(os.path.join(os.path.dirname(__file__), 'alembic.ini'))
     config.set_section_option('alembic', 'script_location',
                               os.path.join(os.path.dirname(__file__), 'migrations'))
     config.set_main_option('sqlalchemy.url', self.connect_string)
     return config
示例#58
0
import click
import datetime as dt

from alembic.config import Config
from alembic import command
from pathlib import Path

from .sources import Source


source = Source.from_name("MAIN")

alembic_cfg = Config(Path() / "alembic.ini")
alembic_cfg.set_main_option("sqlalchemy.url", source.connection_url)
alembic_cfg.attributes["connection"] = source.connection


@click.command()
def upgrade():
    command.upgrade(alembic_cfg, "head")

    command.revision(
        alembic_cfg, message=f"AUTO@{dt.datetime.now().isoformat()}", autogenerate=True
    )
    command.upgrade(alembic_cfg, "head")


# The following should be found at the end of the file
commands = click.Group(name="database")
[commands.add_command(x) for x in locals().values() if isinstance(x, click.Command)]
示例#59
0
def main():
    from docassemble.webapp.database import dbprefix
    if dbprefix.startswith('postgresql') and not daconfig.get(
            'force text to varchar upgrade', False):
        do_varchar_upgrade = False
    else:
        do_varchar_upgrade = True
    with app.app_context():
        if daconfig.get('use alembic', True):
            if do_varchar_upgrade:
                changed = False
                if db.engine.has_table(dbtableprefix + 'userdict'):
                    db.session.query(UserDict).filter(
                        db.func.length(UserDict.filename) > 255).delete(
                            synchronize_session=False)
                    changed = True
                if db.engine.has_table(dbtableprefix + 'userdictkeys'):
                    db.session.query(UserDictKeys).filter(
                        db.func.length(UserDictKeys.filename) > 255).delete(
                            synchronize_session=False)
                    changed = True
                if db.engine.has_table(dbtableprefix + 'chatlog'):
                    db.session.query(ChatLog).filter(
                        db.func.length(ChatLog.filename) > 255).delete(
                            synchronize_session=False)
                    changed = True
                if db.engine.has_table(dbtableprefix + 'uploads'):
                    db.session.query(Uploads).filter(
                        db.func.length(Uploads.filename) > 255).delete(
                            synchronize_session=False)
                    db.session.query(Uploads).filter(
                        db.func.length(Uploads.yamlfile) > 255).delete(
                            synchronize_session=False)
                    changed = True
                if db.engine.has_table(dbtableprefix + 'objectstorage'):
                    db.session.query(ObjectStorage).filter(
                        db.func.length(ObjectStorage.key) > 1024).delete(
                            synchronize_session=False)
                    changed = True
                if db.engine.has_table(dbtableprefix + 'speaklist'):
                    db.session.query(SpeakList).filter(
                        db.func.length(SpeakList.filename) > 255).delete(
                            synchronize_session=False)
                    changed = True
                if db.engine.has_table(dbtableprefix + 'shortener'):
                    db.session.query(Shortener).filter(
                        db.func.length(Shortener.filename) > 255).delete(
                            synchronize_session=False)
                    db.session.query(Shortener).filter(
                        db.func.length(Shortener.key) > 255).delete(
                            synchronize_session=False)
                    changed = True
                if db.engine.has_table(dbtableprefix + 'machinelearning'):
                    db.session.query(MachineLearning).filter(
                        db.func.length(MachineLearning.key) > 1024).delete(
                            synchronize_session=False)
                    db.session.query(MachineLearning).filter(
                        db.func.length(MachineLearning.group_id) > 1024
                    ).delete(synchronize_session=False)
                    changed = True
                if db.engine.has_table(dbtableprefix + 'globalobjectstorage'):
                    db.session.query(GlobalObjectStorage).filter(
                        db.func.length(GlobalObjectStorage.key) > 1024).delete(
                            synchronize_session=False)
                    changed = True
                if changed:
                    db.session.commit()
            packagedir = pkg_resources.resource_filename(
                pkg_resources.Requirement.parse('docassemble.webapp'),
                'docassemble/webapp')
            if not os.path.isdir(packagedir):
                sys.exit("path for running alembic could not be found")
            from alembic.config import Config
            from alembic import command
            alembic_cfg = Config(os.path.join(packagedir, 'alembic.ini'))
            alembic_cfg.set_main_option("sqlalchemy.url",
                                        alchemy_connection_string())
            alembic_cfg.set_main_option("script_location",
                                        os.path.join(packagedir, 'alembic'))
            if not db.engine.has_table(dbtableprefix + 'alembic_version'):
                start_time = time.time()
                sys.stderr.write("Creating alembic stamp\n")
                command.stamp(alembic_cfg, "head")
                sys.stderr.write("Done creating alembic stamp after " +
                                 str(time.time() - start_time) + " seconds\n")
            if db.engine.has_table(dbtableprefix + 'user'):
                start_time = time.time()
                sys.stderr.write("Creating alembic stamp\n")
                sys.stderr.write("Running alembic upgrade\n")
                command.upgrade(alembic_cfg, "head")
                sys.stderr.write("Done running alembic upgrade after " +
                                 str(time.time() - start_time) + " seconds\n")
        #db.drop_all()
        start_time = time.time()
        try:
            sys.stderr.write("Trying to create tables\n")
            db.create_all()
        except:
            sys.stderr.write(
                "Error trying to create tables; trying a second time.\n")
            try:
                db.create_all()
            except:
                sys.stderr.write(
                    "Error trying to create tables; trying a third time.\n")
                db.create_all()
        sys.stderr.write("Finished creating tables after " +
                         str(time.time() - start_time) + " seconds.\n")
        populate_tables()
        db.engine.dispose()
示例#60
0
class MigrateCommand(Command):
    """Create and apply SQLAlchemy migrations
    Migrations will be managed inside the 'migration/versions' directory

    Usage: gearbox migrate COMMAND ...
    Use 'gearbox help migrate' to get list of commands and their usage
    """
    def get_description(self):
        return '''Create and apply SQLAlchemy migrations.

Migrations will be managed inside the 'migration/versions' directory
and applied to the database defined by sqlalchemy.url inside the
configuration file.

Create a new migration::

    $ gearbox migrate create 'Add New Things'

Apply migrations::

    $ gearbox migrate upgrade

Get current database version::

    $ gearbox migrate db_version

Downgrade version::

    $ gearbox migrate downgrade
'''

    def get_parser(self, prog_name):
        parser = super(MigrateCommand, self).get_parser(prog_name)
        parser.formatter_class = argparse.RawDescriptionHelpFormatter

        parser.add_argument(
            "-c",
            "--config",
            help='application config file to read (default: development.ini)',
            dest='config',
            default="development.ini")

        subparser = parser.add_subparsers(dest='command')

        create_parser = subparser.add_parser('create', add_help=False)
        create_parser.add_argument('name')

        create_parser = subparser.add_parser('autogenerate', add_help=False)
        create_parser.add_argument('name')

        db_version_parser = subparser.add_parser('db_version', add_help=False)

        upgrade_parser = subparser.add_parser('upgrade', add_help=False)
        upgrade_parser.add_argument('version', nargs='?', default='head')

        downgrade_parser = subparser.add_parser('downgrade', add_help=False)
        downgrade_parser.add_argument('version', nargs='?', default='-1')

        test_parser = subparser.add_parser('test', add_help=False)

        return parser

    def take_action(self, opts):
        from alembic.config import Config
        from alembic import command as alembic_commands

        self.alembic_commands = alembic_commands
        self.alembic_cfg = Config(opts.config, ini_section='app:main')
        self.alembic_cfg.set_main_option('script_location', 'migration')

        command = getattr(self, 'command_%s' % opts.command)
        command(opts)

    def command_create(self, opts):
        self.alembic_commands.revision(self.alembic_cfg, opts.name)

    def command_autogenerate(self, opts):
        config_name = 'config:%s' % opts.config

        here_dir = os.getcwd()
        sys.path.insert(0, here_dir)

        # Load the wsgi app first so that everything is initialized right
        loadapp(config_name, relative_to=here_dir)

        self.alembic_commands.revision(self.alembic_cfg,
                                       opts.name,
                                       autogenerate=True)

        log.warn('!!! REMEMBER TO EDIT THE AUTOGENERATED MIGRATION, '
                 'it will usually drop any support table which is not '
                 'registered into your application metadata.')

    def command_db_version(self, opts):
        self.alembic_commands.current(self.alembic_cfg)

    def command_upgrade(self, opts):
        self.alembic_commands.upgrade(self.alembic_cfg, opts.version)

    def command_downgrade(self, opts):
        self.alembic_commands.downgrade(self.alembic_cfg, opts.version)

    def command_test(self, opts):
        self.alembic_commands.upgrade(self.alembic_cfg, '+1')
        self.alembic_commands.downgrade(self.alembic_cfg, '-1')