def test_stamp_api_creates_table(self):
        context = self.make_one(connection=self.connection)
        assert ('alembic_version'
                not in Inspector(self.connection).get_table_names())

        script = mock.Mock(
            _stamp_revs=lambda revision, heads:
            [_up(None, 'a', True), _up(None, 'b', True)])

        context.stamp(script, 'b')
        eq_(context.get_current_heads(), ('a', 'b'))
        assert ('alembic_version'
                in Inspector(self.connection).get_table_names())
Esempio n. 2
0
def find_pending(dburl):
    db = sa.create_engine(dburl)
    inspector = Inspector(db)
    # Newer buildbot has a "buildrequest_claims" table
    if "buildrequest_claims" in inspector.get_table_names():
        query = sa.text("""
        SELECT buildername, id FROM
               buildrequests WHERE
               complete=0 AND
               submitted_at > :yesterday AND
               submitted_at < :toonew AND
               (select count(brid) from buildrequest_claims
                       where brid=id) = 0""")
    # Older buildbot doesn't
    else:
        query = sa.text("""
        SELECT buildername, id FROM
               buildrequests WHERE
               complete=0 AND
               claimed_at=0 AND
               submitted_at > :yesterday AND
               submitted_at < :toonew""")

    result = db.execute(query,
                        yesterday=time.time() - 86400,
                        toonew=time.time() - 10)
    retval = result.fetchall()
    return retval
 def test_compare_get_schema_names_for_sql_and_odbc(self, engine_name):
     with self.engine_map[engine_name].begin() as c:
         dialect = Inspector(c).dialect
         schema_names_fallback = dialect.get_schema_names(
             connection=c, use_sql_fallback=True)
         schema_names_odbc = dialect.get_schema_names(connection=c)
         assert sorted(schema_names_fallback) == sorted(schema_names_odbc)
Esempio n. 4
0
 def __init__(self, sqla_conn, args, schema=None):
     self.args = args
     self.sqla_conn = sqla_conn
     self.schema = schema
     self.engine = sa.create_engine(sqla_conn)
     self.meta = sa.MetaData(
         bind=self.engine)  # excised schema=schema to prevent errors
     self.meta.reflect(schema=self.schema)
     self.inspector = Inspector(bind=self.engine)
     self.conn = self.engine.connect()
     self.tables = OrderedDict()
     for tbl in self.meta.sorted_tables:
         tbl.db = self
         # TODO: Replace all these monkeypatches with an instance assigment
         tbl.find_n_rows = types.MethodType(_find_n_rows, tbl)
         tbl.random_row_func = types.MethodType(_random_row_func, tbl)
         tbl.fks = self.inspector.get_foreign_keys(tbl.name,
                                                   schema=tbl.schema)
         tbl.pk = self.inspector.get_primary_keys(tbl.name,
                                                  schema=tbl.schema)
         tbl.filtered_by = types.MethodType(_filtered_by, tbl)
         tbl.by_pk = types.MethodType(_by_pk, tbl)
         tbl.pk_val = types.MethodType(_pk_val, tbl)
         tbl.exists = types.MethodType(_exists, tbl)
         tbl.child_fks = []
         tbl.find_n_rows(estimate=True)
         self.tables[(tbl.schema, tbl.name)] = tbl
     for ((tbl_schema, tbl_name), tbl) in self.tables.items():
         for fk in tbl.fks:
             fk['constrained_schema'] = tbl_schema
             fk['constrained_table'] = tbl_name  # TODO: check against constrained_table
             self.tables[(fk['referred_schema'],
                          fk['referred_table'])].child_fks.append(fk)
Esempio n. 5
0
 def test_get_unique_constraints(self):
     meta = MetaData(testing.db)
     t1 = Table('foo', meta, Column('f', Integer),
                UniqueConstraint('f', name='foo_f'))
     t2 = Table('bar',
                meta,
                Column('b', Integer),
                UniqueConstraint('b', name='bar_b'),
                prefixes=['TEMPORARY'])
     meta.create_all()
     from sqlalchemy.engine.reflection import Inspector
     try:
         inspector = Inspector(testing.db)
         eq_(inspector.get_unique_constraints('foo'),
             [{
                 'column_names': [u'f'],
                 'name': u'foo_f'
             }])
         eq_(inspector.get_unique_constraints('bar'),
             [{
                 'column_names': [u'b'],
                 'name': u'bar_b'
             }])
     finally:
         meta.drop_all()
Esempio n. 6
0
    def __init__(self, sqla_conn, args, schemas=[None]):
        self.args = args
        self.sqla_conn = sqla_conn
        self.schemas = schemas
        self.engine = sa.create_engine(sqla_conn)
        self.inspector = Inspector(bind=self.engine)
        self.conn = self.engine.connect()
        self.tables = OrderedDict()

        for schema in self.schemas:
            meta = sa.MetaData(
                bind=self.engine)  # excised schema=schema to prevent errors
            meta.reflect(schema=schema)
            for tbl in meta.sorted_tables:
                if args.tables and not _table_matches_any_pattern(
                        tbl.schema, tbl.name, self.args.tables):
                    continue
                if _table_matches_any_pattern(tbl.schema, tbl.name,
                                              self.args.exclude_tables):
                    continue
                tbl.db = self

                if self.engine.name == 'postgresql':
                    fix_postgres_array_of_enum(self.conn, tbl)

                # TODO: Replace all these monkeypatches with an instance assigment
                tbl.find_n_rows = types.MethodType(_find_n_rows, tbl)
                tbl.random_row_func = types.MethodType(_random_row_func, tbl)
                tbl.fks = self.inspector.get_foreign_keys(tbl.name,
                                                          schema=tbl.schema)
                tbl.pk = self.inspector.get_primary_keys(tbl.name,
                                                         schema=tbl.schema)
                if not tbl.pk:
                    tbl.pk = [
                        d['name']
                        for d in self.inspector.get_columns(tbl.name,
                                                            schema=tbl.schema)
                    ]
                tbl.filtered_by = types.MethodType(_filtered_by, tbl)
                tbl.by_pk = types.MethodType(_by_pk, tbl)
                tbl.pk_val = types.MethodType(_pk_val, tbl)
                tbl.child_fks = []
                estimate_rows = not _table_matches_any_pattern(
                    tbl.schema, tbl.name, self.args.full_tables)
                tbl.find_n_rows(estimate=estimate_rows)
                self.tables[(tbl.schema, tbl.name)] = tbl
        all_constraints = args.config.get('constraints', {})
        for ((tbl_schema, tbl_name), tbl) in self.tables.items():
            qualified = "{}.{}".format(tbl_schema, tbl_name)
            if qualified in all_constraints:
                constraints = all_constraints[qualified]
            else:
                constraints = all_constraints.get(tbl_name, [])
            tbl.constraints = constraints
            for fk in (tbl.fks + constraints):
                fk['constrained_schema'] = tbl_schema
                fk['constrained_table'] = tbl_name  # TODO: check against constrained_table
                self.tables[(fk['referred_schema'],
                             fk['referred_table'])].child_fks.append(fk)
 def test_has_table_table_exists_not(self, use_sql_fallback, engine_name):
     with self.engine_map[engine_name].begin() as c:
         dialect = Inspector(c).dialect
         has_table = dialect.has_table(connection=c,
                                       schema=self.schema,
                                       table_name="not_exist",
                                       use_sql_fallback=use_sql_fallback)
         assert not has_table, "Table %s.not_exist was found, but should not exist" % self.schema
 def test_get_view_names(self, use_sql_fallback, engine_name):
     with self.engine_map[engine_name].begin() as c:
         dialect = Inspector(c).dialect
         view_names = dialect.get_view_names(
             connection=c,
             schema=self.schema,
             use_sql_fallback=use_sql_fallback)
         assert "v" in view_names
 def test_get_columns_table_name_none(self, use_sql_fallback, engine_name):
     with self.engine_map[engine_name].begin() as c:
         dialect = Inspector(c).dialect
         columns = dialect.get_columns(connection=c,
                                       schema=self.schema,
                                       table_name=None,
                                       use_sql_fallback=use_sql_fallback)
         assert columns == []
Esempio n. 10
0
    def detect_changed(self):
        """ Detect the difference between the metadata and the database

        :rtype: MigrationReport instance
        """
        diff = compare_metadata(self.context, self.metadata)
        inspector = Inspector(self.conn)
        diff.extend(self.detect_undetected_constraint_from_alembic(inspector))
        return MigrationReport(self, diff)
Esempio n. 11
0
 def test_deprecated_get_primary_keys(self):
     meta = self.metadata
     users = self.tables.users
     insp = Inspector(meta.bind)
     assert_raises_message(
         sa_exc.SADeprecationWarning,
         "Call to deprecated method get_primary_keys."
         "  Use get_pk_constraint instead.", insp.get_primary_keys,
         users.name)
 def test_get_view_definition(self, use_sql_fallback, engine_name):
     with self.engine_map[engine_name].begin() as c:
         dialect = Inspector(c).dialect
         view_definition = dialect.get_view_definition(
             connection=c,
             schema=self.schema,
             view_name="v",
             use_sql_fallback=use_sql_fallback)
         assert self.view_defintion == view_definition
Esempio n. 13
0
    def setUp(self):
        self.connection = create_engine('sqlite:///:memory:').connect()
        self.metadata = MetaData(self.connection)
        self.table = Table('thingy', self.metadata,
                           Column('thingy_id', Integer, primary_key=True))
        self.metadata.create_all()

        self.migration_context = MigrationContext.configure(self.connection)
        self.op = IdempotentOperations(self.migration_context, self)
        self.inspector = Inspector(self.connection)
 def test_get_pk_constraint(self, use_sql_fallback, engine_name):
     with self.engine_map[engine_name].begin() as c:
         dialect = Inspector(c).dialect
         pk_constraint = dialect.get_pk_constraint(
             connection=c,
             schema=self.schema,
             table_name="t",
             use_sql_fallback=use_sql_fallback)
         assert pk_constraint["constrained_columns"] == ['pid1', 'pid2'] and \
                pk_constraint["name"].startswith("sys_")
 def test_get_pk_constraint_table_name_none(self, use_sql_fallback,
                                            engine_name):
     with self.engine_map[engine_name].begin() as c:
         dialect = Inspector(c).dialect
         pk_constraint = dialect.get_pk_constraint(
             connection=c,
             schema=self.schema,
             table_name=None,
             use_sql_fallback=use_sql_fallback)
         assert pk_constraint is None
 def test_compare_has_table_for_sql_and_odbc(self, schema, engine_name):
     with self.engine_map[engine_name].begin() as c:
         dialect = Inspector(c).dialect
         has_table_fallback = dialect.has_table(connection=c,
                                                schema=schema,
                                                use_sql_fallback=True,
                                                table_name="t")
         has_table_odbc = dialect.has_table(connection=c,
                                            schema=schema,
                                            table_name="t")
         assert has_table_fallback == has_table_odbc, "Expected table %s.t with odbc and fallback" % schema
Esempio n. 17
0
def _check_schema_consistency(config, db_name, schema_name, parsed_schema,
                              schema_version, schema_description,
                              target_engine):
    # Connect to the server that has database that is being added
    from sqlalchemy.engine.reflection import Inspector

    inspector = Inspector(target_engine)

    if schema_name not in inspector.get_schema_names():
        config.log.error("Schema '%s' not found.", db_name)
        raise MetaBException(MetaBException.DB_DOES_NOT_EXIST, db_name)

    db_tables = inspector.get_table_names(schema=schema_name)

    for table_name, parsed_table in parsed_schema.items():
        # Check parsed tables - we allow other tables in schema
        if table_name not in db_tables:
            config.log.error(
                "Table '%s' not found in db, present in ascii file.",
                table_name)
            raise MetaBException(MetaBException.TB_NOT_IN_DB, table_name)

        db_columns = inspector.get_columns(table_name=table_name,
                                           schema=schema_name)
        parsed_columns = parsed_table["columns"]
        if len(parsed_columns) != len(db_columns):
            config.log.error(
                "Number of columns in db for table %s (%d) "
                "differs from number columns in schema (%d)", table_name,
                len(db_columns), len(parsed_columns))
            raise MetaBException(MetaBException.NOT_MATCHING)

        for column in parsed_columns:
            column_name = column["name"]
            if column_name not in db_columns:
                config.log.error(
                    "Column '%s.%s' not found in db, "
                    "but exists in schema DDL", table_name, column_name)
                raise MetaBException(MetaBException.COL_NOT_IN_TB, column_name,
                                     table_name)

    # Get schema description and version, it is ok if it is missing
    ret = target_engine.execute(
        "SELECT version, descr FROM %s.ZZZ_Schema_Description" % db_name)
    if ret.rowcount != 1:
        config.log.error("Db '%s' does not contain schema version/description",
                         db_name)
    else:
        (found_schema_version, found_schema_description) = ret.first()
        if found_schema_version != schema_version or \
                found_schema_description != schema_description:
            raise MetaBException(
                MetaBException.NOT_MATCHING,
                "Schema name or description does not match defined values.")
 def test_compare_get_view_names_for_sql_and_odbc(self, schema,
                                                  engine_name):
     with self.engine_map[engine_name].begin() as c:
         dialect = Inspector(c).dialect
         c.execute("OPEN SCHEMA %s" % self.schema)
         view_names_fallback = dialect.get_view_names(connection=c,
                                                      schema=schema,
                                                      use_sql_fallback=True)
         view_names_odbc = dialect.get_view_names(connection=c,
                                                  schema=schema)
         assert view_names_fallback == view_names_odbc
Esempio n. 19
0
 def test_dont_reflect_autoindex(self):
     meta = self.metadata
     Table('foo', meta, Column('bar', String, primary_key=True))
     meta.create_all()
     inspector = Inspector(testing.db)
     eq_(inspector.get_indexes('foo'), [])
     eq_(inspector.get_indexes('foo', include_auto_indexes=True),
         [{
             'unique': 1,
             'name': 'sqlite_autoindex_foo_1',
             'column_names': ['bar']
         }])
def upgrade(migrate_engine):
    meta = MetaData(bind=migrate_engine)
    for prefix in ('', 'shadow_'):
        services = Table(prefix + 'services', meta, autoload=True)
        if not hasattr(services.c, 'uuid'):
            services.create_column(Column('uuid', String(36), nullable=True))

    uuid_index_name = 'services_uuid_idx'
    indexes = Inspector(migrate_engine).get_indexes('services')
    if uuid_index_name not in (i['name'] for i in indexes):
        services = Table('services', meta, autoload=True)
        Index(uuid_index_name, services.c.uuid, unique=True).create()
 def test_dont_reflect_autoindex(self):
     meta = MetaData(testing.db)
     t = Table('foo', meta, Column('bar', String, primary_key=True))
     meta.create_all()
     from sqlalchemy.engine.reflection import Inspector
     try:
         inspector = Inspector(testing.db)
         eq_(inspector.get_indexes('foo'), [])
         eq_(inspector.get_indexes('foo',
             include_auto_indexes=True), [{'unique': 1, 'name'
             : u'sqlite_autoindex_foo_1', 'column_names': [u'bar']}])
     finally:
         meta.drop_all()
Esempio n. 22
0
    def test_create_index_with_schema(self):
        """Test creation of index with explicit schema"""

        meta = self.metadata
        Table('foo', meta, Column('bar', String, index=True), schema='main')
        meta.create_all()
        inspector = Inspector(testing.db)
        eq_(inspector.get_indexes('foo', schema='main'),
            [{
                'unique': 0,
                'name': u'ix_main_foo_bar',
                'column_names': [u'bar']
            }])
Esempio n. 23
0
    def detect_changed(self, schema_only=False):
        """ Detect the difference between the metadata and the database

        :rtype: MigrationReport instance
        """
        inspector = Inspector(self.conn)
        if schema_only:
            diff = self.detect_added_new_schema(inspector)
        else:
            diff = compare_metadata(self.context, self.metadata)
            diff.extend(
                self.detect_undetected_constraint_from_alembic(inspector))

        return MigrationReport(self, diff)
 def test_compare_get_foreign_keys_for_sql_and_odbc(self, schema, table,
                                                    engine_name):
     with self.engine_map[engine_name].begin() as c:
         if schema is None:
             c.execute("OPEN SCHEMA %s" % self.schema_2)
         dialect = Inspector(c).dialect
         foreign_keys_fallback = dialect.get_foreign_keys(
             connection=c,
             table_name=table,
             schema=schema,
             use_sql_fallback=True)
         foreign_keys_odbc = dialect.get_foreign_keys(connection=c,
                                                      table_name=table,
                                                      schema=schema)
         assert str(foreign_keys_fallback) == str(foreign_keys_odbc)
 def test_compare_get_columns_for_sql_and_odbc(self, schema, table,
                                               engine_name):
     with self.engine_map[engine_name].begin() as c:
         dialect = Inspector(c).dialect
         if schema is None:
             c.execute("OPEN SCHEMA %s" % self.schema)
         columns_fallback = dialect.get_columns(connection=c,
                                                table_name=table,
                                                schema=schema,
                                                use_sql_fallback=True)
         columns_odbc = dialect.get_columns(connection=c,
                                            table_name=table,
                                            schema=schema)
         assert str(columns_fallback) == str(
             columns_odbc)  # object equality doesn't work for sqltypes
Esempio n. 26
0
    def test_add_index_constraint(self):
        def add_in_registry():
            @register(Model)
            class Address:

                id = Integer(primary_key=True)

            @register(Model)
            class Person:

                name = String(primary_key=True)
                address = Many2One(model=Model.Address, index=True)

        registry = self.init_registry(add_in_registry)
        inspector = Inspector(registry.session.connection())
        indexes = inspector.get_indexes(registry.Person.__tablename__)
        self.assertEqual(len(indexes), 1)
    def test_get_foreign_keys(self, use_sql_fallback, engine_name):
        with self.engine_map[engine_name].begin() as c:
            dialect = Inspector(c).dialect
            foreign_keys = dialect.get_foreign_keys(
                connection=c,
                schema=self.schema,
                table_name="s",
                use_sql_fallback=use_sql_fallback)
            expected = [{
                'name': 'fk_test',
                'constrained_columns': ['fid1', 'fid2'],
                'referred_schema': 'test_get_metadata_functions_schema',
                'referred_table': 't',
                'referred_columns': ['pid1', 'pid2']
            }]

            assert foreign_keys == expected
Esempio n. 28
0
    def test_add_primary_keys_constraint(self):
        def add_in_registry():
            @register(Model)
            class Address:

                id = Integer(primary_key=True)

            @register(Model)
            class Person:

                name = String(primary_key=True)
                address = Many2One(model=Model.Address, primary_key=True)

        registry = self.init_registry(add_in_registry)
        inspector = Inspector(registry.session.connection())
        pks = inspector.get_primary_keys(registry.Person.__tablename__)
        assert 'address_id' in pks
Esempio n. 29
0
def upgrade(migrate_engine):
    """Add service_uuid column to volumes."""
    meta = MetaData(bind=migrate_engine)

    Table('services', meta, autoload=True)
    volumes = Table('volumes', meta, autoload=True)
    if not hasattr(volumes.c, 'service_uuid'):
        volumes.create_column(
            Column('service_uuid',
                   String(36),
                   ForeignKey('services.uuid'),
                   nullable=True))

    index_name = 'volumes_service_uuid_idx'
    indexes = Inspector(migrate_engine).get_indexes('volumes')
    if index_name not in (i['name'] for i in indexes):
        volumes = Table('volumes', meta, autoload=True)
        Index(index_name, volumes.c.service_uuid, volumes.c.deleted).create()
def upgrade(migrate_engine):
    """Add uuid column to services."""
    meta = MetaData(bind=migrate_engine)
    services = Table('services', meta, autoload=True)
    if not hasattr(services.c, 'uuid'):
        services.create_column(Column('uuid', String(36), nullable=True))

    uuid_index_name = 'services_uuid_idx'
    indexes = Inspector(migrate_engine).get_indexes('services')
    if uuid_index_name not in (i['name'] for i in indexes):
        services = Table('services', meta, autoload=True)
        Index(uuid_index_name, services.c.uuid, unique=True).create()

    service_list = list(services.select().execute())
    for s in service_list:
        if not s.uuid:
            services.update().where(services.c.id == s.id).\
                values(uuid=six.text_type(uuid.uuid4())).execute()