def upgrade():
    conn = op.get_bind()
    conn.execute(text("set @@lock_wait_timeout = 20;"))
    conn.execute(text("set @@foreign_key_checks = 0;"))

    # Add new columns + ForeignKey constraints.
    shard_id = int(context.get_x_argument(as_dictionary=True).get("shard_id"))
    if shard_id == 0:
        conn.execute(
            text("ALTER TABLE genericaccount "
                 "ADD COLUMN imap_username CHAR(255) DEFAULT NULL, "
                 "ADD COLUMN smtp_username CHAR(255) DEFAULT NULL, "
                 "ADD COLUMN imap_password_id INT(11), "
                 "ADD COLUMN smtp_password_id INT(11), "
                 "ADD CONSTRAINT imap_password_id_ifbk FOREIGN KEY "
                 "(`imap_password_id`) REFERENCES `secret` (`id`), "
                 "ADD CONSTRAINT smtp_password_id_ifbk FOREIGN KEY "
                 "(`smtp_password_id`) REFERENCES `secret` (`id`);"))
    else:
        conn.execute(
            text("ALTER TABLE genericaccount "
                 "ADD COLUMN imap_username CHAR(255) DEFAULT NULL, "
                 "ADD COLUMN smtp_username CHAR(255) DEFAULT NULL, "
                 "ADD COLUMN imap_password_id BIGINT(20), "
                 "ADD COLUMN smtp_password_id BIGINT(20), "
                 "ADD CONSTRAINT imap_password_id_ifbk FOREIGN KEY "
                 "(`imap_password_id`) REFERENCES `secret` (`id`), "
                 "ADD CONSTRAINT smtp_password_id_ifbk FOREIGN KEY "
                 "(`smtp_password_id`) REFERENCES `secret` (`id`);"))
Example #2
0
def run_migrations_online():
    """Run migrations in 'online' mode.

    In this scenario we need to create an Engine and associate a
    connection with the context.
    """
    connectable = engine_from_config(
        {
            "sqlalchemy.url":
            ALEMBIC_CONFIG.url.__to_string__(hide_password=False)
        },
        prefix="sqlalchemy.",
        poolclass=pool.NullPool,
    )
    with connectable.connect() as connection:
        context.configure(
            connection=connection,
            target_metadata=target_metadata,
            include_object=include_object,
        )

        with context.begin_transaction() as transaction:
            context.run_migrations()
            if "dry-run" in context.get_x_argument():
                print("Dry-run succeeded; now rolling back transaction")
                transaction.rollback()
Example #3
0
def run_migrations_offline():
    """Run migrations in 'offline' mode.

    This configures the context with just a URL
    and not an Engine, though an Engine is acceptable
    here as well.  By skipping the Engine creation
    we don't even need a DBAPI to be available.

    Calls to context.execute() here emit the given string to the
    script output.

    """
    # target DB type from command line args
    try:
        db_type = context.get_x_argument(as_dictionary=True)['db-type']
    except KeyError:
        raise Exception(
            'Argument "-x db-type ..." is required for offline migrations!'
        )

    context.configure(
        dialect_name=db_type,
        target_metadata=target_metadata,
        literal_binds=True,
        dialect_opts={"paramstyle": "named"},
    )

    with context.begin_transaction():
        context.run_migrations()
Example #4
0
def run_migrations_online():
    """Run migrations in 'online' mode.

    In this scenario we need to create an Engine
    and associate a connection with the context.

    """
    ini_section = config.get_section(config.config_ini_section)

    # if a database path was provided, override the one in alembic.ini
    db_path = context.get_x_argument(as_dictionary=True).get('url')
    if not db_path:
        db_path = os.environ.get('BLOCK_DATABASE_URI', None)
        if db_path is None:
            if not ini_section.get('sqlalchemy.url', None):
                raise ValueError(
                    'You need to provide a database url: '
                    '-x url=postgresql://postgres:postgres@localhost/qabel-block'
                )
    ini_section['sqlalchemy.url'] = db_path

    connectable = engine_from_config(ini_section,
                                     prefix='sqlalchemy.',
                                     poolclass=pool.NullPool)

    with connectable.connect() as connection:
        context.configure(connection=connection,
                          target_metadata=target_metadata)

        with context.begin_transaction():
            context.run_migrations()
Example #5
0
def run_migrations_online():
    """Run migrations in 'online' mode.

    In this scenario we need to create an Engine
    and associate a connection with the context.

    """
    env_file = context.get_x_argument(as_dictionary=True).get('env')
    load_dotenv(env_file)
    
    
    alembic_config = config.get_section(config.config_ini_section)
    alembic_config['sqlalchemy.url'] = 'postgres://{}:{}@{}:5432/{}'.format(
        environ['DbUser'],
        base64.b64decode(environ['DbPassword'].encode('ascii')).decode('ascii'),
        environ['DbUrl'],
        environ['DbName']
    )
    connectable = engine_from_config(
        alembic_config,
        prefix="sqlalchemy.",
        poolclass=pool.NullPool,
    )

    with connectable.connect() as connection:
        context.configure(
            connection=connection, target_metadata=target_metadata
        )

        with context.begin_transaction():
            context.run_migrations()
Example #6
0
def upgrade():
    try:
        local_srid = context.get_x_argument(as_dictionary=True)['local-srid']
    except KeyError:
        raise Exception("Missing local srid, please use -x local-srid=...")
    with TemporaryDirectory() as temp_dir:
        with open_remote_file(base_url, archive_name,
                              open_fct=ZipFile) as archive:
            archive.extract(file_name, path=temp_dir)
        path = os.path.join(temp_dir, file_name)
        cmd = f"raster2pgsql -s {local_srid} -c -C -I -M -d -t 5x5 {path} ref_geo.dem | psql"
        db_uri = urlsplit(config['SQLALCHEMY_DATABASE_URI'])
        env = {
            'PGHOST': db_uri.hostname,
            'PGPORT': str(db_uri.port),
            'PGUSER': db_uri.username,
            'PGPASSWORD': db_uri.password,
            'PGDATABASE': db_uri.path.lstrip('/'),
        }
        subprocess.run(cmd,
                       stdout=subprocess.DEVNULL,
                       shell=True,
                       check=True,
                       env=env)
    logger.info("Refresh DEM spatial index…")
    op.execute("REINDEX INDEX ref_geo.dem_st_convexhull_idx")
def run_migrations_online():
    """Run migrations in 'online' mode.

    In this scenario we need to create an Engine
    and associate a connection with the context.

    """

    connectable = engine_from_config(
        config.get_section(config.config_ini_section),
        prefix="sqlalchemy.",
        poolclass=pool.NullPool,
    )

    with connectable.connect() as connection:
        context.configure(connection=connection,
                          target_metadata=target_metadata)

        with context.begin_transaction():

            context.run_migrations()

        if context.get_x_argument(
                as_dictionary=True).get("apply_hasura_metadata"):
            apply_hasura_metadata(context)
Example #8
0
def set_sqlalchemy_url():
    """Sets sqlalchemy.url from command-line arguments.

  Substitutions are made for the following paramters:

    - db_type: The database type.
    - db_user: The database username.
    - db_password: The database password.
    - db_host: The database host.
    - db_port: The database port.
    - db_name: The database name.
  """
    x_args = context.get_x_argument(as_dictionary=True)
    db_type = x_args.get('db_type', 'sqlite')
    db_user = x_args.get('db_user', 'uwsolar')
    db_password = x_args.get('db_password', '')
    db_host = x_args.get('db_host', ':memory:')
    db_port = x_args.get('db_port', 3306)
    db_name = x_args.get('db_name', 'uwsolar')

    if db_type == 'sqlite':
        url = '%s:///%s' % (db_type, db_host)
    else:
        url = '%s://%s:%s@%s:%d/%s' % (db_type, db_user, db_password, db_host,
                                       db_port, db_name)

    config.set_main_option('sqlalchemy.url', url)
Example #9
0
def run_migrations_offline():
    """Run migrations in 'offline' mode.

    This configures the context with just a URL
    and not an Engine, though an Engine is acceptable
    here as well.  By skipping the Engine creation
    we don't even need a DBAPI to be available.

    Calls to context.execute() here emit the given string to the
    script output.

    """

    # if a database path was provided, override the one in alembic.ini
    database = context.get_x_argument(as_dictionary=True).get('database')
    if database:
        url = "sqlite:///%s" % database
    else:
        url = config.get_main_option("sqlalchemy.url")

    context.configure(url=url,
                      target_metadata=target_metadata,
                      literal_binds=True)

    with context.begin_transaction():
        context.run_migrations()
Example #10
0
def run_migrations_online():
    """Run migrations in 'online' mode.

    In this scenario we need to create an Engine
    and associate a connection with the context.

    """
    env_type = context.get_x_argument(as_dictionary=True).get('env_type')

    if env_type == 'local':
        app = create_app(config=LocalConfig)
    elif env_type == 'staging':
        app = create_app(config=StagingConfig)
    elif env_type == 'prod':
        app = create_app(config=ProdConfig)
    else:
        raise Exception("environment type not specified!")

    alembic_config = config.get_section(config.config_ini_section)
    alembic_config['sqlalchemy.url'] = app.config['SQLALCHEMY_DATABASE_URI']

    engine = engine_from_config(alembic_config,
                                prefix='sqlalchemy.',
                                poolclass=pool.NullPool)

    connection = engine.connect()
    context.configure(connection=connection,
                      target_metadata=db.metadata,
                      include_object=include_object)

    try:
        with context.begin_transaction():
            context.run_migrations()
    finally:
        connection.close()
Example #11
0
def run_migrations_online():
    """Run migrations in 'online' mode.

    In this scenario we need to create an Engine
    and associate a connection with the context.

    """

    # get the alembic section of the config file
    ini_section = config.get_section(config.config_ini_section)

    # if a database path was provided, override the one in alembic.ini
    database = context.get_x_argument(as_dictionary=True).get('database')
    if database:
        ini_section['sqlalchemy.url'] = "sqlite:///%s" % database
    else:
        ini_section = config.get_section(config.config_ini_section)

    connectable = engine_from_config(ini_section,
                                     prefix='sqlalchemy.',
                                     poolclass=pool.NullPool)

    with connectable.connect() as connection:
        context.configure(connection=connection,
                          target_metadata=target_metadata)

        with context.begin_transaction():
            context.run_migrations()
Example #12
0
def downgrade():
    # ### commands auto generated by Alembic - please adjust! ###
    op.drop_column("opinion", "courtlistener_json_checksum")
    op.drop_column("cluster", "courtlistener_json_checksum")
    # ### end Alembic commands ###
    if context.get_x_argument(as_dictionary=True).get("data", None):
        data_downgrade()
Example #13
0
def run_migrations_online():
    """Run migrations in 'online' mode.

    In this scenario we need to create an Engine
    and associate a connection with the context.

    """
    ini_section = config.get_section(config.config_ini_section)

    db_path = context.get_x_argument(as_dictionary=True).get('dbPath')
    if db_path:
        ini_section['sqlalchemy.url'] += db_path

    connectable = config.attributes.get('connection', None)

    if connectable is None:
        # only create Engine if we don't have a Connection
        # from the outside
        connectable = engine_from_config(config.get_section(
            config.config_ini_section),
                                         prefix='sqlalchemy.',
                                         poolclass=pool.NullPool)

    with connectable.connect() as connection:
        context.configure(connection=connection,
                          target_metadata=target_metadata,
                          render_as_batch=True)

        with context.begin_transaction():
            context.run_migrations()
Example #14
0
def upgrade():
    # ### commands auto generated by Alembic - please adjust! ###
    op.create_unique_constraint(None, "citation",
                                ["citing_opinion_id", "cited_opinion_id"])
    # ### end Alembic commands ###
    if context.get_x_argument(as_dictionary=True).get("data", None):
        data_upgrade()
Example #15
0
def run_migrations_online():
    """Run migrations in 'online' mode.

    In this scenario we need to create an Engine
    and associate a connection with the context.

    """
    # target DB URI from command line args
    try:
        uri = context.get_x_argument(as_dictionary=True)['uri']
    except KeyError:
        raise Exception(
            'Argument "-x uri ..." is required for online migrations!'
        )

    connectable = engine_from_config(
        config.get_section(config.config_ini_section),
        url=uri,
        prefix="sqlalchemy.",
        poolclass=pool.NullPool,
    )

    with connectable.connect() as connection:
        context.configure(
            connection=connection, target_metadata=target_metadata
        )

        with context.begin_transaction():
            context.run_migrations()
Example #16
0
def run_migrations_online():
    """Run migrations in 'online' mode.

    In this scenario we need to create an Engine
    and associate a connection with the context.

    """
    jobid = context.get_x_argument(as_dictionary=True).get("jobid")
    jobdb = config.get_main_option("jobdb").format()
    url = jobdb.format(jobid=jobid)
    engine = create_engine(
                url,
                poolclass=pool.NullPool)

    connection = engine.connect()
    context.configure(
                connection=connection,
                target_metadata=target_metadata
                )

    try:
        with context.begin_transaction():
            context.run_migrations()
    finally:
        connection.close()
Example #17
0
def run_migrations_online():
    """Run migrations in 'online' mode.

    In this scenario we need to create an Engine
    and associate a connection with the context.

    """
    ini_section = config.get_section(config.config_ini_section)

    url = context.get_x_argument(as_dictionary=True).get("db_url")
    if url is not None:
        ini_section["sqlalchemy.url"] = url

    connectable = engine_from_config(
        ini_section,
        prefix="sqlalchemy.",
        poolclass=pool.NullPool,
    )

    with connectable.connect() as connection:
        context.configure(
            connection=connection,
            target_metadata=target_metadata,
            render_as_batch=True,
        )

        with context.begin_transaction():
            context.run_migrations()
Example #18
0
def run_migrations_online():
    """Run migrations in 'online' mode.

    In this scenario we need to create an Engine
    and associate a connection with the context.

    """
    ini_section = config.get_section(config.config_ini_section)
    overrides = context.get_x_argument(as_dictionary=True)
    for override in overrides:
        ini_section[override] = overrides[override]

    connectable = engine_from_config(ini_section,
                                     prefix='sqlalchemy.',
                                     poolclass=pool.NullPool)

    with connectable.connect() as connection:
        context.configure(
            connection=connection,
            target_metadata=target_metadata,
            compare_type=True,
            compare_server_default=True,
        )

        with context.begin_transaction():
            context.run_migrations()
Example #19
0
def run_migrations_online():
    """Run migrations in 'online' mode.

    In this scenario we need to create an Engine
    and associate a connection with the context.

    """
    connectable = CONFIG.attributes.get('connectable', None)

    if connectable is None:
        ini_section = CONFIG.get_section(CONFIG.config_ini_section)

        sqlalchemy_url = context.get_x_argument(as_dictionary=True).get(
            'sqlalchemy.url', None)

        if sqlalchemy_url:
            ini_section['sqlalchemy.url'] = sqlalchemy_url

            connectable = engine_from_config(ini_section,
                                             prefix='sqlalchemy.',
                                             poolclass=pool.NullPool)
        else:
            raise RuntimeError(
                'No database URL provided, pass one with -x sqlalchemy.url=')

    with connectable.connect() as connection:
        context.configure(connection=connection,
                          target_metadata=TARGET_METADATA,
                          render_as_batch=True)

        with context.begin_transaction():
            context.run_migrations()
Example #20
0
def run_migrations_offline():
    """Run migrations in 'offline' mode.

    This configures the context with just a URL
    and not an Engine, though an Engine is acceptable
    here as well.  By skipping the Engine creation
    we don't even need a DBAPI to be available.

    Calls to context.execute() here emit the given string to the
    script output.

    """
    url = context.get_x_argument(as_dictionary=True).get(
        'sqlalchemy.url', None)

    if url:
        context.configure(url=url,
                          target_metadata=TARGET_METADATA,
                          literal_binds=True,
                          render_as_batch=True)

        with context.begin_transaction():
            context.run_migrations()
    else:
        raise RuntimeError(
            'No database URL provided, pass one with -x sqlalchemy.url=')
Example #21
0
def run_migrations_online():
    """Run migrations in 'online' mode.

    In this scenario we need to create an Engine
    and associate a connection with the context.

    """

    # Based on Alembic documentation
    # See http://alembic.zzzcomputing.com/en/latest/api/runtime.html#alembic.runtime.environment.EnvironmentContext.get_x_argument
    cmd_line_url = context.get_x_argument(as_dictionary=True).get('dbname')
    if cmd_line_url:
        connectable = create_engine(cmd_line_url)
    else:
        connectable = engine_from_config(config.get_section(
            config.config_ini_section),
                                         prefix='sqlalchemy.',
                                         poolclass=pool.NullPool)

    with connectable.connect() as connection:
        context.configure(connection=connection,
                          target_metadata=target_metadata)

        with context.begin_transaction():
            context.run_migrations()
Example #22
0
def run_migrations_online():
    """Run migrations in 'online' mode.

    In this scenario we need to create an Engine
    and associate a connection with the context.

    """
    url = context.get_x_argument(as_dictionary=True).get('url')
    if url:
        engine = create_engine(url)
    else:
        engine = engine_from_config(
                    config.get_section(config.config_ini_section),
                    prefix='sqlalchemy.',
                    poolclass=pool.NullPool)

    connection = engine.connect()
    context.configure(
                connection=connection,
                target_metadata=target_metadata
                )

    try:
        with context.begin_transaction():
            context.run_migrations()
    finally:
        connection.close()
def upgrade():
    conn = op.get_bind()
    conn.execute(text("set @@lock_wait_timeout = 20;"))
    conn.execute(text("SET FOREIGN_KEY_CHECKS=0;"))

    # Add new columns.
    conn.execute(text("ALTER TABLE genericaccount ADD COLUMN imap_username "
                      "CHAR(255) DEFAULT NULL"))
    conn.execute(text("ALTER TABLE genericaccount ADD COLUMN smtp_username "
                      "CHAR(255) DEFAULT NULL"))

    shard_id = int(context.get_x_argument(as_dictionary=True).get('shard_id'))
    if shard_id == 0:
        conn.execute(text("ALTER TABLE genericaccount ADD COLUMN imap_password_id "
                     "INT(11)"))
        conn.execute(text("ALTER TABLE genericaccount ADD COLUMN smtp_password_id "
                     "INT(11)"))
    else:
        conn.execute(text("ALTER TABLE genericaccount ADD COLUMN imap_password_id "
                          "BIGINT(20)"))
        conn.execute(text("ALTER TABLE genericaccount ADD COLUMN smtp_password_id "
                          "BIGINT(20)"))

    # Add ForeignKey constraints.
    conn.execute(text("ALTER TABLE genericaccount ADD CONSTRAINT "
                      "imap_password_id_ifbk FOREIGN KEY "
                      "(`imap_password_id`) REFERENCES `secret` (`id`)"))
    conn.execute(text("ALTER TABLE genericaccount ADD CONSTRAINT "
                      "smtp_password_id_ifbk FOREIGN KEY "
                      "(`smtp_password_id`) REFERENCES `secret` (`id`)"))
def upgrade():
    conn = op.get_bind()
    conn.execute(text("set @@lock_wait_timeout = 20;"))
    conn.execute(text("set @@foreign_key_checks = 0;"))

    # Add new columns + ForeignKey constraints.
    shard_id = int(context.get_x_argument(as_dictionary=True).get('shard_id'))
    if shard_id == 0:
        conn.execute(text("ALTER TABLE genericaccount "
                          "ADD COLUMN imap_username CHAR(255) DEFAULT NULL, "
                          "ADD COLUMN smtp_username CHAR(255) DEFAULT NULL, "
                          "ADD COLUMN imap_password_id INT(11), "
                          "ADD COLUMN smtp_password_id INT(11), "
                          "ADD CONSTRAINT imap_password_id_ifbk FOREIGN KEY "
                          "(`imap_password_id`) REFERENCES `secret` (`id`), "
                          "ADD CONSTRAINT smtp_password_id_ifbk FOREIGN KEY "
                          "(`smtp_password_id`) REFERENCES `secret` (`id`);"))
    else:
        conn.execute(text("ALTER TABLE genericaccount "
                          "ADD COLUMN imap_username CHAR(255) DEFAULT NULL, "
                          "ADD COLUMN smtp_username CHAR(255) DEFAULT NULL, "
                          "ADD COLUMN imap_password_id BIGINT(20), "
                          "ADD COLUMN smtp_password_id BIGINT(20), "
                          "ADD CONSTRAINT imap_password_id_ifbk FOREIGN KEY "
                          "(`imap_password_id`) REFERENCES `secret` (`id`), "
                          "ADD CONSTRAINT smtp_password_id_ifbk FOREIGN KEY "
                          "(`smtp_password_id`) REFERENCES `secret` (`id`);"))
def _get_default_group_provider():
    try:
        provider = multipass.default_group_provider
    except AttributeError:
        xargs = context.get_x_argument(as_dictionary=True)
        return xargs.get('default_group_provider')
    else:
        return provider.name
def upgrade():
    # ### commands auto generated by Alembic - please adjust! ###
    op.create_index(op.f("ix_cluster_court"),
                    "cluster", ["court"],
                    unique=False)
    # ### end Alembic commands ###
    if context.get_x_argument(as_dictionary=True).get("data", None):
        data_upgrade()
Example #27
0
def upgrade():
    try:
        local_srid = context.get_x_argument(as_dictionary=True)['local-srid']
    except KeyError:
        raise Exception("Missing local srid, please use -x local-srid=...")
    stmt = text(
        importlib.resources.read_text('geonature.migrations.data.core',
                                      'ref_geo.sql'))
    op.get_bind().execute(stmt, {'local_srid': local_srid})
Example #28
0
def downgrade():
    """Converts the data table's engine from MyISAM to InnoDB."""
    x_args = context.get_x_argument(as_dictionary=True)
    db_type = x_args.get('db_type', 'mysql+mysqlconnector')
    if db_type != 'mysql+mysqlconnector':
        return

    conn = op.get_bind()
    conn.execute('alter table data engine=innodb')
def upgrade():
    from inbox.sqlalchemy_ext.util import JSON

    shard_id = int(context.get_x_argument(as_dictionary=True).get("shard_id"))
    namespace_id_type = sa.Integer() if shard_id == 0 else sa.BigInteger()

    op.create_table(
        "metadata",
        sa.Column("public_id", sa.BINARY(length=16), nullable=False),
        sa.Column("created_at", sa.DateTime(), nullable=False),
        sa.Column("updated_at", sa.DateTime(), nullable=False),
        sa.Column("deleted_at", sa.DateTime(), nullable=True),
        sa.Column("id", sa.BigInteger(), nullable=False),
        sa.Column("app_id", sa.Integer(), nullable=True),
        sa.Column("app_client_id", sa.BINARY(length=16), nullable=False),
        sa.Column("app_type", sa.String(length=20), nullable=False),
        sa.Column("namespace_id", namespace_id_type, nullable=False),
        sa.Column("object_public_id", sa.String(length=191), nullable=False),
        sa.Column("object_type", sa.String(length=20), nullable=False),
        sa.Column("object_id", sa.BigInteger(), nullable=False),
        sa.Column("value", JSON(), nullable=True),
        sa.Column("version", sa.Integer(), server_default="0", nullable=True),
        sa.ForeignKeyConstraint(["namespace_id"], [u"namespace.id"],
                                ondelete="CASCADE"),
        sa.PrimaryKeyConstraint("id"),
    )
    op.create_index(op.f("ix_metadata_created_at"),
                    "metadata", ["created_at"],
                    unique=False)
    op.create_index(op.f("ix_metadata_deleted_at"),
                    "metadata", ["deleted_at"],
                    unique=False)
    op.create_index(op.f("ix_metadata_object_id"),
                    "metadata", ["object_id"],
                    unique=False)
    op.create_index(
        op.f("ix_metadata_object_public_id"),
        "metadata",
        ["object_public_id"],
        unique=False,
    )
    op.create_index(op.f("ix_metadata_public_id"),
                    "metadata", ["public_id"],
                    unique=False)
    op.create_index(op.f("ix_metadata_updated_at"),
                    "metadata", ["updated_at"],
                    unique=False)
    op.create_index(
        "ix_obj_public_id_app_id",
        "metadata",
        ["object_public_id", "app_id"],
        unique=True,
    )

    conn = op.get_bind()
    increment = (shard_id << 48) + 1
    conn.execute("ALTER TABLE metadata AUTO_INCREMENT={}".format(increment))
Example #30
0
File: env.py Project: BBN-Q/bbndb
def get_url():
    url = context.get_x_argument(as_dictionary=True).get('file')
    if url:
        url = "sqlite:///" + url
    else:
        print("Assuming default database location from alembic.ini")
        url = config.get_main_option("sqlalchemy.url")
    # assert url, "Database URL must be specified on command line with -x url=<DB_URL>"
    return url
def upgrade():
    # ### commands auto generated by Alembic - please adjust! ###
    op.add_column('files', sa.Column('size', sa.INTEGER(), nullable=True))
    # https://alembic.sqlalchemy.org/en/latest/cookbook.html
    # http://ominian.com/2019/07/11/data-migration-with-sqlalchemy-and-alembic/
    if context.get_x_argument(as_dictionary=True).get('data', None):
        # Perform data migrations
        storage = context.get_x_argument(as_dictionary=True)['storage']
        session = orm.Session(bind=op.get_bind())
        for i in session.query(File):
            pathname = os.path.join(storage, i.actual_name)
            try:
                i.size = os.path.getsize(pathname)
            except FileNotFoundError:
                print(
                    'WARNING: File %s not found' % repr(pathname),
                    file=sys.stderr,
                )
        session.commit()
Example #32
0
def get_db_uri():
    # pick dev db if specified
    mode = context.get_x_argument(as_dictionary=True).get('mode', 'prod')

    if mode == 'dev':
        env_var = 'DEV_DB_URI'
    else:
        env_var = 'DB_URI'

    return os.getenv(env_var)
Example #33
0
def parse_allowed_packages(current_package):
    """Return list of package names we want to consider from Alembic extra arguments."""

    # Extra -x arguments passed to Alembic
    extra = context.get_x_argument(as_dictionary=True)

    # XXX: Make this a proper command line switch when writing more refined Alembic front end
    packages = extra.get("packages", current_package)
    packages = packages.split(",")

    if packages == "all":
        # Force Alembic to consider all packages
        logger.info("Considering migrations for models in all Python packages")
    else:
        logger.info("Considering migrations for models in Python packages %s", packages)

    return packages
Example #34
0
def run_migrations_online():
    connectable = config.attributes.get('connection', None)

    if connectable is None:
        cmd_line_url = context.get_x_argument(as_dictionary=True).get('url')
        if cmd_line_url:
            connectable = create_engine(cmd_line_url)
        else:
            raise Exception("No connection URL. Use '-x url=<url>'")

    with connectable.connect() as connection:
        context.configure(
            connection=connection,
            target_metadata=target_metadata
        )

        with context.begin_transaction():
            context.run_migrations()
Example #35
0
def run_migrations_offline():
    """Run migrations in 'offline' mode.

    This configures the context with just a URL
    and not an Engine, though an Engine is acceptable
    here as well.  By skipping the Engine creation
    we don't even need a DBAPI to be available.

    Calls to context.execute() here emit the given string to the
    script output.

    """
    url = context.get_x_argument(as_dictionary=True).get('url')
    if not url:
        url = config.get_main_option("sqlalchemy.url")
    context.configure(url=url, target_metadata=target_metadata)

    with context.begin_transaction():
        context.run_migrations()
def upgrade():
    from inbox.sqlalchemy_ext.util import JSON

    op.create_table(
        'metadata',
        sa.Column('public_id', sa.BINARY(length=16), nullable=False),
        sa.Column('created_at', sa.DateTime(), nullable=False),
        sa.Column('updated_at', sa.DateTime(), nullable=False),
        sa.Column('deleted_at', sa.DateTime(), nullable=True),
        sa.Column('id', sa.BigInteger(), nullable=False),
        sa.Column('app_id', sa.Integer(), nullable=True),
        sa.Column('app_client_id', sa.BINARY(length=16), nullable=False),
        sa.Column('app_type', sa.String(length=20), nullable=False),
        sa.Column('namespace_id', sa.BigInteger(), nullable=False),
        sa.Column('object_public_id', sa.String(length=191), nullable=False),
        sa.Column('object_type', sa.String(length=20), nullable=False),
        sa.Column('object_id', sa.BigInteger(), nullable=False),
        sa.Column('value', JSON(), nullable=True),
        sa.Column('version', sa.Integer(), server_default='0', nullable=True),
        sa.ForeignKeyConstraint(['namespace_id'], [u'namespace.id'],
                                ondelete='CASCADE'),
        sa.PrimaryKeyConstraint('id')
    )
    op.create_index(op.f('ix_metadata_created_at'), 'metadata',
                    ['created_at'], unique=False)
    op.create_index(op.f('ix_metadata_deleted_at'), 'metadata',
                    ['deleted_at'], unique=False)
    op.create_index(op.f('ix_metadata_object_id'), 'metadata',
                    ['object_id'], unique=False)
    op.create_index(op.f('ix_metadata_object_public_id'), 'metadata',
                    ['object_public_id'], unique=False)
    op.create_index(op.f('ix_metadata_public_id'), 'metadata',
                    ['public_id'], unique=False)
    op.create_index(op.f('ix_metadata_updated_at'), 'metadata',
                    ['updated_at'], unique=False)
    op.create_index('ix_obj_public_id_app_id', 'metadata',
                    ['object_public_id', 'app_id'], unique=True)

    shard_id = int(context.get_x_argument(as_dictionary=True).get('shard_id'))
    conn = op.get_bind()
    increment = (shard_id << 48) + 1
    conn.execute('ALTER TABLE metadata AUTO_INCREMENT={}'.format(increment))
Example #37
0
def run_migrations_online():
    """Run migrations in 'online' mode.

    In this scenario we need to create an Engine
    and associate a connection with the context.

    """
    env_type =  context.get_x_argument(as_dictionary=True).get('env_type')
    
    if env_type == 'local':
      app = create_app(config=LocalConfig)
    elif env_type == 'staging':
      app = create_app(config=StagingConfig)
    elif env_type == 'prod':
      app = create_app(config=ProdConfig)
    else:
      raise Exception("environment type not specified!")
    
    
    alembic_config = config.get_section(config.config_ini_section)
    alembic_config['sqlalchemy.url'] = app.config['SQLALCHEMY_DATABASE_URI']

    engine = engine_from_config(
                alembic_config,
                prefix='sqlalchemy.',
                poolclass=pool.NullPool)

    connection = engine.connect()
    context.configure(
                connection=connection,
                target_metadata=db.metadata,
                include_object=include_object
                )

    try:
        with context.begin_transaction():
            context.run_migrations()
    finally:
        connection.close()
def upgrade():
    shard_id = int(context.get_x_argument(as_dictionary=True).get("shard_id"))
    namespace_id_type = sa.Integer() if shard_id == 0 else sa.BigInteger()

    op.create_table(
        "accounttransaction",
        sa.Column("created_at", sa.DateTime(), nullable=False),
        sa.Column("updated_at", sa.DateTime(), nullable=False),
        sa.Column("deleted_at", sa.DateTime(), nullable=True),
        sa.Column("id", sa.BigInteger(), nullable=False),
        sa.Column("public_id", sa.BINARY(length=16), nullable=False),
        sa.Column("namespace_id", namespace_id_type, nullable=True),
        sa.Column("object_type", sa.String(20), nullable=False),
        sa.Column("record_id", sa.BigInteger(), nullable=False),
        sa.Column("object_public_id", sa.String(191), nullable=False),
        sa.Column("command", sa.Enum("insert", "update", "delete"), nullable=False),
        sa.PrimaryKeyConstraint("id"),
        sa.ForeignKeyConstraint(["namespace_id"], [u"namespace.id"]),
    )
    op.create_index("ix_accounttransaction_created_at", "accounttransaction", ["created_at"], unique=False)
    op.create_index("ix_accounttransaction_updated_at", "accounttransaction", ["updated_at"], unique=False)
    op.create_index("ix_accounttransaction_deleted_at", "accounttransaction", ["deleted_at"], unique=False)
    op.create_index("ix_accounttransaction_table_name", "accounttransaction", ["object_type"], unique=False)
    op.create_index("ix_accounttransaction_command", "accounttransaction", ["command"], unique=False)
    op.create_index(
        "ix_accounttransaction_object_type_record_id", "accounttransaction", ["object_type", "record_id"], unique=False
    )
    op.create_index(
        "ix_accounttransaction_namespace_id_created_at",
        "accounttransaction",
        ["namespace_id", "created_at"],
        unique=False,
    )

    conn = op.get_bind()
    increment = (shard_id << 48) + 1
    conn.execute("ALTER TABLE accounttransaction AUTO_INCREMENT={}".format(increment))
def upgrade():
    from inbox.sqlalchemy_ext.util import JSON

    shard_id = int(context.get_x_argument(as_dictionary=True).get("shard_id"))
    namespace_id_type = sa.Integer() if shard_id == 0 else sa.BigInteger()

    op.create_table(
        "metadata",
        sa.Column("public_id", sa.BINARY(length=16), nullable=False),
        sa.Column("created_at", sa.DateTime(), nullable=False),
        sa.Column("updated_at", sa.DateTime(), nullable=False),
        sa.Column("deleted_at", sa.DateTime(), nullable=True),
        sa.Column("id", sa.BigInteger(), nullable=False),
        sa.Column("app_id", sa.Integer(), nullable=True),
        sa.Column("app_client_id", sa.BINARY(length=16), nullable=False),
        sa.Column("app_type", sa.String(length=20), nullable=False),
        sa.Column("namespace_id", namespace_id_type, nullable=False),
        sa.Column("object_public_id", sa.String(length=191), nullable=False),
        sa.Column("object_type", sa.String(length=20), nullable=False),
        sa.Column("object_id", sa.BigInteger(), nullable=False),
        sa.Column("value", JSON(), nullable=True),
        sa.Column("version", sa.Integer(), server_default="0", nullable=True),
        sa.ForeignKeyConstraint(["namespace_id"], [u"namespace.id"], ondelete="CASCADE"),
        sa.PrimaryKeyConstraint("id"),
    )
    op.create_index(op.f("ix_metadata_created_at"), "metadata", ["created_at"], unique=False)
    op.create_index(op.f("ix_metadata_deleted_at"), "metadata", ["deleted_at"], unique=False)
    op.create_index(op.f("ix_metadata_object_id"), "metadata", ["object_id"], unique=False)
    op.create_index(op.f("ix_metadata_object_public_id"), "metadata", ["object_public_id"], unique=False)
    op.create_index(op.f("ix_metadata_public_id"), "metadata", ["public_id"], unique=False)
    op.create_index(op.f("ix_metadata_updated_at"), "metadata", ["updated_at"], unique=False)
    op.create_index("ix_obj_public_id_app_id", "metadata", ["object_public_id", "app_id"], unique=True)

    conn = op.get_bind()
    increment = (shard_id << 48) + 1
    conn.execute("ALTER TABLE metadata AUTO_INCREMENT={}".format(increment))
Example #40
0
from alembic import context
from sqlalchemy import create_engine, pool

from cuckoo.distributed.db import db
from cuckoo.distributed.misc import init_settings, settings

from cuckoo.misc import set_cwd

set_cwd(context.get_x_argument(as_dictionary=True)["cwd"])
init_settings()

config = context.config

def run_migrations():
    engine = create_engine(settings.SQLALCHEMY_DATABASE_URI,
                           poolclass=pool.NullPool)

    connection = engine.connect()
    context.configure(connection=connection, target_metadata=db.metadata)

    try:
        with context.begin_transaction():
            context.run_migrations()
    finally:
        connection.close()

run_migrations()
Example #41
0
def downgrade():
    if context.get_x_argument(as_dictionary=True).get('data', None):
        data_downgrades()
    schema_downgrades()
Example #42
0
def upgrade():
    schema_upgrades()
    if context.get_x_argument(as_dictionary=True).get('data', None):
        data_upgrades()
Example #43
0
# target_metadata = {
#       'engine1':mymodel.metadata1,
#       'engine2':mymodel.metadata2
#}

import inspect, os, sys # first update the path to actually lode the base models
sys.path.append(os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))) + "/..")
# then import the models and load them
from app import models

metadata = {
    'development': models.Base.metadata,
    'test': models.Base.metadata,
    'production': models.Base.metadata}

args = context.get_x_argument()
target_metadata = {env:metadata[env] for env in args} if args else metadata

# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.


def run_migrations_offline():
    """Run migrations in 'offline' mode.

    This configures the context with just a URL
    and not an Engine, though an Engine is acceptable
    here as well.  By skipping the Engine creation
    we don't even need a DBAPI to be available.
Example #44
0
target_metadata = MailSyncBase.metadata

from inbox.config import config
from inbox.ignition import EngineManager


# Alembic configuration is confusing. Here we look for a shard id both as a
# "main option" (where it's programmatically set by bin/create-db), and in the
# "x" argument, which is the primary facility for passing additional
# command-line args to alembic. So you would do e.g.
#
# alembic -x shard_id=1 upgrade +1
#
# to target shard 1 for the migration.
config_shard_id = context.config.get_main_option("shard_id")
x_shard_id = context.get_x_argument(as_dictionary=True).get("shard_id")

if config_shard_id is not None:
    shard_id = int(config_shard_id)
elif x_shard_id is not None:
    shard_id = int(x_shard_id)
else:
    raise ValueError(
        "No shard_id is configured for migration; " "run `alembic -x shard_id=<target shard id> upgrade +1`"
    )


def run_migrations_offline():
    """Run migrations in 'offline' mode.

    This configures the context with just a URL
Example #45
0
config = context.config

# Interpret the config file for Python logging.
# This line sets up loggers basically.
fileConfig(config.config_file_name)

# add your model's MetaData object here
# for 'autogenerate' support
# target_metadata = None

# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.

env = context.get_x_argument(as_dictionary=True).get('env', 'dev')

connectable = engine_from_config(
    config.get_section(env),
    prefix='sqlalchemy.',
    poolclass=pool.NullPool)

connection = connectable.connect()
target_metadata = MetaData(
    bind=connection,
    naming_convention=NAMING_CONVENTION
)

def run_migrations_offline():
    """Run migrations in 'offline' mode.
Example #46
0
# access to the values within the .ini file in use.
config = context.config
config_url = config.get_main_option('sqlalchemy.url')

# add your model's MetaData object here
# for 'autogenerate' support
target_metadata = models.Base.metadata

# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.

# support for passing database URL on the command line
# usage:  alembic -x db_url=postgresql://localhost/orcmdb upgrade head
cmd_line_url = context.get_x_argument(as_dictionary=True).get('db_url')
# support passing database URL as tag in upgrade() function call.
# usage:  command.upgrade(Config(alembic_ini), "head", tag=db_url)
tag_url = context.get_tag_argument()

missing_db_url_msg = ("Please set the database connection string in "
                      "either 'PG_DB_URL' environment variable or specify "
                      "it in the schema_migration config file under "
                      "'sqlalchemy.url'.\nConnection string pattern:\n"
                      "postgresql[+<driver>://[<username>[:<password>]]"
                      "@<server>[:<port>]/<database>\n\n"
                      "http://docs.sqlalchemy.org/en/latest/core/"
                      "engines.html#database-urls")

DB_URL_ENVIRONMENT_VAR = 'PG_DB_URL'
Example #47
0
from __future__ import with_statement
from alembic import context
from sqlalchemy import engine_from_config, pool
from logging.config import fileConfig
from mimir import models
from montague.loadwsgi import Loader

# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config

xargs = context.get_x_argument(as_dictionary=True)
loader = Loader(xargs['montague_file'])
montague_env = xargs['montague_env']
app_config = loader.app_config(montague_env).config

# Interpret the config file for Python logging.
# This line sets up loggers basically.
fileConfig(config.config_file_name)

# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
target_metadata = models.Base.metadata

# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
Example #48
0
def run_migrations_online():
    """Run migrations in 'online' mode.

    In this scenario we need to create an Engine
    and associate a connection with the context.

    """
    # use the 2-phase protocol to make sure that the migration is applied
    # correctly to all databases or none.
    twophase_argument = context.get_x_argument(
        as_dictionary=True).get('twophase', True)
    use_two_phase = twophase_argument != 'False' and \
                    twophase_argument != 'false'
    logger.info("Using two-phase commit: %s" % use_two_phase)

    opts = config.cmd_opts
    if opts and 'autogenerate' in opts and opts.autogenerate:
        # when generating migration scripts only check the 'admin' db
        names = ['admin']
    else:
        names = ['public', 'admin']

    # for the direct-to-DB use case, start a transaction on all
    # engines, then run all migrations, then commit all transactions.
    engines = {}
    for name in names:
        settings = config.get_section(config.config_ini_section)
        settings.update(config.get_section('app:{}'.format(name)))
        load_local_settings(settings, name)
        engine = engine_from_config(
            settings,
            prefix='sqlalchemy.',
            poolclass=pool.NullPool)

        try:
            connection = engine.connect()
        except OperationalError as exc:
            if name == 'public':
                # if the 'public' database was not created yet, skip
                logger.warning(
                    'failed to connect to public database (skipping): '
                    '{}'.format(exc))
                continue
            else:
                raise exc

        engines[name] = {
            'engine': engine,
            'connection': connection,
            'transaction': connection.begin_twophase() if use_two_phase
            else connection.begin()
        }

    try:
        for name, rec in engines.items():
            logger.info("Migrating database %s" % name)
            context.configure(
                connection=rec['connection'],
                upgrade_token="upgrades",
                downgrade_token="downgrades",
                target_metadata=target_metadata,
                include_object=include_object,
                include_schemas=True,
            )
            context.run_migrations(engine_name=name)

        if use_two_phase:
            for rec in engines.values():
                rec['transaction'].prepare()

        for rec in engines.values():
            rec['transaction'].commit()
    except:
        for rec in engines.values():
            rec['transaction'].rollback()
        raise
    finally:
        for rec in engines.values():
            rec['connection'].close()