Пример #1
0
def run_migrations_online():
    """Run migrations in 'online' mode.

    In this scenario we need to create an Engine
    and associate a connection with the context.
    """
    engine = api.get_engine()
    with engine.connect() as connection:
        context.configure(connection=connection,
                          render_as_batch=True,
                          target_metadata=target_metadata)
        with context.begin_transaction():
            context.run_migrations()
Пример #2
0
def schema_upgrade(revision=None, config=None, engine=None):
    """Used for upgrading database.

    :param revision: Desired database version
    :type revision: string
    :param config: Instance of alembic config
    :param engine: Instance of DB engine
    """
    revision = revision or "head"
    config = config or _alembic_config()
    engine = engine or api.get_engine()

    if schema_revision() is None:
        schema_stamp(INITIAL_REVISION_UUID, config=config)

    alembic.command.upgrade(config, revision or "head")
Пример #3
0
def schema_upgrade(revision=None, config=None, engine=None):
    """Used for upgrading database.

    :param revision: Desired database version
    :type revision: string
    :param config: Instance of alembic config
    :param engine: Instance of DB engine
    """
    revision = revision or "head"
    config = config or _alembic_config()
    engine = engine or api.get_engine()

    if schema_revision(engine=engine) is None:
        schema_stamp(INITIAL_REVISION_UUID, config=config)

    alembic.command.upgrade(config, revision or "head")
Пример #4
0
def schema_create(config=None, engine=None):
    """Create database schema from models description.

    Can be used for initial installation instead of upgrade('head').
    :param config: Instance of alembic config
    :param engine: Instance of DB engine
    """
    engine = engine or api.get_engine()

    # NOTE(viktors): If we will use metadata.create_all() for non empty db
    #                schema, it will only add the new tables, but leave
    #                existing as is. So we should avoid of this situation.
    if schema_revision(engine=engine) is not None:
        raise exceptions.DBMigrationError("DB schema is already under version"
                                          " control. Use upgrade() instead")

    models.BASE.metadata.create_all(engine)
    schema_stamp("head", config=config)
Пример #5
0
def schema_create(config=None, engine=None):
    """Create database schema from models description.

    Can be used for initial installation instead of upgrade('head').
    :param config: Instance of alembic config
    :param engine: Instance of DB engine
    """
    engine = engine or api.get_engine()

    # NOTE(viktors): If we will use metadata.create_all() for non empty db
    #                schema, it will only add the new tables, but leave
    #                existing as is. So we should avoid of this situation.
    if schema_revision(engine=engine) is not None:
        raise exceptions.DBMigrationError("DB schema is already under version"
                                          " control. Use upgrade() instead")

    models.BASE.metadata.create_all(engine)
    schema_stamp("head", config=config)
Пример #6
0
def schema_cleanup():
    """Drop all database objects.

    Drops all database objects remaining on the default schema of the given
    engine. Per-db implementations will also need to drop items specific to
    those systems, such as sequences, custom types (e.g. pg ENUM), etc.
    """
    engine = api.get_engine()
    with engine.begin() as conn:
        inspector = sa.inspect(engine)
        metadata = sa.schema.MetaData()
        tbs = []
        all_fks = []

        for table_name in inspector.get_table_names():
            fks = []
            for fk in inspector.get_foreign_keys(table_name):
                if not fk["name"]:
                    continue
                fks.append(
                    sa.schema.ForeignKeyConstraint((), (), name=fk["name"]))
            table = sa.schema.Table(table_name, metadata, *fks)
            tbs.append(table)
            all_fks.extend(fks)

        if engine.name != "sqlite":
            for fkc in all_fks:
                conn.execute(sa.schema.DropConstraint(fkc))
        for table in tbs:
            conn.execute(sa.schema.DropTable(table))

        if engine.name == "postgresql":
            sqla_100 = int(sa.__version__.split(".")[0]) >= 1

            if sqla_100:
                enums = [e["name"] for e in sa.inspect(conn).get_enums()]
            else:
                enums = conn.dialect._load_enums(conn).keys()

            for e in enums:
                conn.execute("DROP TYPE %s" % e)
Пример #7
0
def schema_cleanup():
    """Drop all database objects.

    Drops all database objects remaining on the default schema of the given
    engine. Per-db implementations will also need to drop items specific to
    those systems, such as sequences, custom types (e.g. pg ENUM), etc.
    """
    engine = api.get_engine()
    with engine.begin() as conn:
        inspector = sa.inspect(engine)
        metadata = sa.schema.MetaData()
        tbs = []
        all_fks = []

        for table_name in inspector.get_table_names():
            fks = []
            for fk in inspector.get_foreign_keys(table_name):
                if not fk["name"]:
                    continue
                fks.append(
                    sa.schema.ForeignKeyConstraint((), (), name=fk["name"]))
            table = sa.schema.Table(table_name, metadata, *fks)
            tbs.append(table)
            all_fks.extend(fks)

        if engine.name != "sqlite":
            for fkc in all_fks:
                conn.execute(sa.schema.DropConstraint(fkc))
        for table in tbs:
            conn.execute(sa.schema.DropTable(table))

        if engine.name == "postgresql":
            sqla_100 = int(sa.__version__.split(".")[0]) >= 1

            if sqla_100:
                enums = [e["name"] for e in sa.inspect(conn).get_enums()]
            else:
                enums = conn.dialect._load_enums(conn).keys()

            for e in enums:
                conn.execute("DROP TYPE %s" % e)
Пример #8
0
def schema_revision(config=None, engine=None, detailed=False):
    """Current database revision.

    :param config: Instance of alembic config
    :param engine: Instance of DB engine
    :param detailed: whether to return a dict with detailed data
    :rtype detailed: bool
    :returns: Database revision
    :rtype: string
    :rtype: dict
    """
    engine = engine or api.get_engine()
    with engine.connect() as conn:
        context = alembic.migration.MigrationContext.configure(conn)
        revision = context.get_current_revision()
    if detailed:
        config = config or _alembic_config()
        sc_dir = alembic.script.ScriptDirectory.from_config(config)
        return {"revision": revision,
                "current_head": sc_dir.get_current_head()}
    return revision
Пример #9
0
def schema_revision(config=None, engine=None, detailed=False):
    """Current database revision.

    :param config: Instance of alembic config
    :param engine: Instance of DB engine
    :param detailed: whether to return a dict with detailed data
    :rtype detailed: bool
    :returns: Database revision
    :rtype: string
    :rtype: dict
    """
    engine = engine or api.get_engine()
    with engine.connect() as conn:
        context = alembic.migration.MigrationContext.configure(conn)
        revision = context.get_current_revision()
    if detailed:
        config = config or _alembic_config()
        sc_dir = alembic.script.ScriptDirectory.from_config(config)
        return {"revision": revision,
                "current_head": sc_dir.get_current_head()}
    return revision
def upgrade():
    dialect = api.get_engine().dialect

    deployments_columns = [
        sa.Column("created_at", sa.DateTime(), nullable=True),
        sa.Column("updated_at", sa.DateTime(), nullable=True),
        sa.Column("id", sa.Integer(), nullable=False),
        sa.Column("uuid", sa.String(length=36), nullable=False),
        sa.Column("parent_uuid", sa.String(length=36), nullable=True),
        sa.Column("name", sa.String(length=255), nullable=True),
        sa.Column("started_at", sa.DateTime(), nullable=True),
        sa.Column("completed_at", sa.DateTime(), nullable=True),
        sa.Column("config", sa_types.MutableJSONEncodedDict(), nullable=False),
        sa.Column("admin", sa.PickleType(), nullable=True),
        sa.Column("users", sa.PickleType(), nullable=False),
        sa.Column("enum_deployments_status",
                  sa.Enum("cleanup->failed",
                          "cleanup->finished",
                          "cleanup->started",
                          "deploy->failed",
                          "deploy->finished",
                          "deploy->inconsistent",
                          "deploy->init",
                          "deploy->started",
                          "deploy->subdeploy",
                          name="enum_deploy_status"),
                  nullable=False),
        sa.PrimaryKeyConstraint("id"),
        sa.UniqueConstraint("name")
    ]

    if dialect.name.startswith("sqlite"):
        deployments_columns.append(
            sa.ForeignKeyConstraint(["parent_uuid"], [u"deployments.uuid"],
                                    name="fk_parent_uuid",
                                    use_alter=True))

    # commands auto generated by Alembic - please adjust!
    op.create_table("deployments", *deployments_columns)

    op.create_index("deployment_parent_uuid",
                    "deployments", ["parent_uuid"],
                    unique=False)

    op.create_index("deployment_uuid", "deployments", ["uuid"], unique=True)

    if not dialect.name.startswith("sqlite"):
        op.create_foreign_key("fk_parent_uuid", "deployments", "deployments",
                              ["parent_uuid"], ["uuid"])

    op.create_table(
        "workers", sa.Column("created_at", sa.DateTime(), nullable=True),
        sa.Column("updated_at", sa.DateTime(), nullable=True),
        sa.Column("id", sa.Integer(), nullable=False),
        sa.Column("hostname", sa.String(length=255), nullable=True),
        sa.PrimaryKeyConstraint("id"),
        sa.UniqueConstraint("hostname", name="uniq_worker@hostname"))

    op.create_table(
        "resources", sa.Column("created_at", sa.DateTime(), nullable=True),
        sa.Column("updated_at", sa.DateTime(), nullable=True),
        sa.Column("id", sa.Integer(), nullable=False),
        sa.Column("provider_name", sa.String(length=255), nullable=True),
        sa.Column("type", sa.String(length=255), nullable=True),
        sa.Column("info", sa_types.MutableJSONEncodedDict(), nullable=False),
        sa.Column("deployment_uuid", sa.String(length=36), nullable=False),
        sa.ForeignKeyConstraint(["deployment_uuid"], [u"deployments.uuid"]),
        sa.PrimaryKeyConstraint("id"))
    op.create_index("resource_deployment_uuid",
                    "resources", ["deployment_uuid"],
                    unique=False)

    op.create_index("resource_provider_name",
                    "resources", ["deployment_uuid", "provider_name"],
                    unique=False)

    op.create_index("resource_provider_name_and_type",
                    "resources", ["deployment_uuid", "provider_name", "type"],
                    unique=False)

    op.create_index("resource_type",
                    "resources", ["deployment_uuid", "type"],
                    unique=False)

    op.create_table(
        "tasks", sa.Column("created_at", sa.DateTime(), nullable=True),
        sa.Column("updated_at", sa.DateTime(), nullable=True),
        sa.Column("id", sa.Integer(), nullable=False),
        sa.Column("uuid", sa.String(length=36), nullable=False),
        sa.Column("status",
                  sa.Enum("aborted",
                          "aborting",
                          "cleaning up",
                          "failed",
                          "finished",
                          "init",
                          "paused",
                          "running",
                          "setting up",
                          "soft_aborting",
                          "verifying",
                          name="enum_tasks_status"),
                  nullable=False),
        sa.Column("verification_log", sa.Text(), nullable=True),
        sa.Column("tag", sa.String(length=64), nullable=True),
        sa.Column("deployment_uuid", sa.String(length=36), nullable=False),
        sa.ForeignKeyConstraint(
            ["deployment_uuid"],
            [u"deployments.uuid"],
        ), sa.PrimaryKeyConstraint("id"))

    op.create_index("task_deployment",
                    "tasks", ["deployment_uuid"],
                    unique=False)

    op.create_index("task_status", "tasks", ["status"], unique=False)

    op.create_index("task_uuid", "tasks", ["uuid"], unique=True)

    op.create_table(
        "verifications", sa.Column("created_at", sa.DateTime(), nullable=True),
        sa.Column("updated_at", sa.DateTime(), nullable=True),
        sa.Column("id", sa.Integer(), nullable=False),
        sa.Column("uuid", sa.String(length=36), nullable=False),
        sa.Column("deployment_uuid", sa.String(length=36), nullable=False),
        sa.Column("status",
                  sa.Enum("aborted",
                          "aborting",
                          "cleaning up",
                          "failed",
                          "finished",
                          "init",
                          "paused",
                          "running",
                          "setting up",
                          "soft_aborting",
                          "verifying",
                          name="enum_tasks_status"),
                  nullable=False),
        sa.Column("set_name", sa.String(length=20), nullable=True),
        sa.Column("tests", sa.Integer(), nullable=True),
        sa.Column("errors", sa.Integer(), nullable=True),
        sa.Column("failures", sa.Integer(), nullable=True),
        sa.Column("time", sa.Float(), nullable=True),
        sa.ForeignKeyConstraint(
            ["deployment_uuid"],
            [u"deployments.uuid"],
        ), sa.PrimaryKeyConstraint("id"))

    op.create_index("verification_uuid",
                    "verifications", ["uuid"],
                    unique=True)

    op.create_table(
        "task_results", sa.Column("created_at", sa.DateTime(), nullable=True),
        sa.Column("updated_at", sa.DateTime(), nullable=True),
        sa.Column("id", sa.Integer(), nullable=False),
        sa.Column("key", sa_types.MutableJSONEncodedDict(), nullable=False),
        sa.Column("data", sa_types.MutableJSONEncodedDict(), nullable=False),
        sa.Column("task_uuid", sa.String(length=36), nullable=True),
        sa.ForeignKeyConstraint(
            ["task_uuid"],
            ["tasks.uuid"],
        ), sa.PrimaryKeyConstraint("id"))

    op.create_table(
        "verification_results",
        sa.Column("created_at", sa.DateTime(), nullable=True),
        sa.Column("updated_at", sa.DateTime(), nullable=True),
        sa.Column("id", sa.Integer(), nullable=False),
        sa.Column("verification_uuid", sa.String(length=36), nullable=True),
        sa.Column("data", sa_types.MutableJSONEncodedDict(), nullable=False),
        sa.ForeignKeyConstraint(["verification_uuid"], ["verifications.uuid"]),
        sa.PrimaryKeyConstraint("id"))