def test_refresh_scalar(self): Order = self.classes.Order s = Session() q = s.query(Order).options(orm.nestedload(Order.customer)).\ filter(Order.id == 102) order = q.all() customer = order[0].customer # expire... s.expire(customer) # + load again, should refresh Customer on Order s.query(Order).options(orm.nestedload(Order.customer)).\ filter(Order.id == 102).all() fixture_order = self._orm_fixture(orders=True).orders[1] # avoid comparison/lazy load of 'orders' on the customer del fixture_order.customer.__dict__['orders'] with self.assert_statement_count(0): eq_( fixture_order, order[0] )
def test_load_collection_mixed(self): Customer = self.classes.Customer Order = self.classes.Order s = Session() n = orm_nested(s.query(Order.id, Order).filter(Customer.orders)) q = s.query(Customer, n).filter(Customer.id == 1) with self.assert_statement_count(1): eq_( q.all(), [ (Customer(id=1, name='David McFarlane'), [ (101, Order(customer_id=1, id=101, order_info='apple related')), (102, Order(customer_id=1, id=102, order_info='apple related')), (103, Order(customer_id=1, id=103, order_info='apple related')) ] ) ] )
def _test(self, expr, field="all", overrides=None): t = self.tables.t if field == "all": fields = {"year": 2012, "month": 5, "day": 10, "epoch": 1336652125.0, "hour": 12, "minute": 15} elif field == "time": fields = {"hour": 12, "minute": 15, "second": 25} elif field == 'date': fields = {"year": 2012, "month": 5, "day": 10} elif field == 'all+tz': fields = {"year": 2012, "month": 5, "day": 10, "epoch": 1336637725.0, "hour": 8, "timezone": 0 } else: fields = field if overrides: fields.update(overrides) for field in fields: result = testing.db.scalar( select([extract(field, expr)]).select_from(t)) eq_(result, fields[field])
def test_on_conflict_do_update_special_types_in_set(self): bind_targets = self.tables.bind_targets with testing.db.connect() as conn: i = insert(bind_targets) conn.execute(i, {"id": 1, "data": "initial data"}) eq_( conn.scalar(sql.select([bind_targets.c.data])), "initial data processed" ) i = insert(bind_targets) i = i.on_conflict_do_update( index_elements=[bind_targets.c.id], set_=dict(data="new updated data") ) conn.execute( i, {"id": 1, "data": "new inserted data"} ) eq_( conn.scalar(sql.select([bind_targets.c.data])), "new updated data processed" )
def test_match_across_joins(self): results = matchtable.select().where(and_(cattable.c.id == matchtable.c.category_id, or_(cattable.c.description.match('Ruby'), matchtable.c.title.match('nutshells' )))).order_by(matchtable.c.id).execute().fetchall() eq_([1, 3, 5], [r.id for r in results])
def test_tuple_containment(self): for test, exp in [ ([('a', 'b')], True), ([('a', 'c')], False), ([('f', 'q'), ('a', 'b')], True), ([('f', 'q'), ('a', 'c')], False) ]: eq_( testing.db.execute( select([ tuple_( literal_column("'a'"), literal_column("'b'") ).\ in_([ tuple_(*[ literal_column("'%s'" % letter) for letter in elem ]) for elem in test ]) ]) ).scalar(), exp )
def test_update(self): with self.engine.connect() as conn: conn.execute( self.tables.data.insert(), [ {"x": "x1", "y": "y1"}, {"x": "x2", "y": "y2"}, {"x": "x3", "y": "y3"} ] ) conn.execute( self.tables.data.update(). where(self.tables.data.c.x == bindparam('xval')). values(y=bindparam('yval')), [ {"xval": "x1", "yval": "y5"}, {"xval": "x3", "yval": "y6"} ] ) eq_( conn.execute( select([self.tables.data]). order_by(self.tables.data.c.id)). fetchall(), [ (1, "x1", "y5", 5), (2, "x2", "y2", 5), (3, "x3", "y6", 5) ] )
def test_version_parsing(self): def mock_conn(res): return Mock( execute=Mock(return_value=Mock(scalar=Mock(return_value=res)))) for string, version in [ ( 'PostgreSQL 8.3.8 on i686-redhat-linux-gnu, compiled by ' 'GCC gcc (GCC) 4.1.2 20070925 (Red Hat 4.1.2-33)', (8, 3, 8)), ( 'PostgreSQL 8.5devel on x86_64-unknown-linux-gnu, ' 'compiled by GCC gcc (GCC) 4.4.2, 64-bit', (8, 5)), ( 'EnterpriseDB 9.1.2.2 on x86_64-unknown-linux-gnu, ' 'compiled by gcc (GCC) 4.1.2 20080704 (Red Hat 4.1.2-50), ' '64-bit', (9, 1, 2)), ( '[PostgreSQL 9.2.4 ] VMware vFabric Postgres 9.2.4.0 ' 'release build 1080137', (9, 2, 4)), ( 'PostgreSQL 10devel on x86_64-pc-linux-gnu' 'compiled by gcc (GCC) 6.3.1 20170306, 64-bit', (10,))]: eq_(testing.db.dialect._get_server_version_info(mock_conn(string)), version)
def test_index_reflection_with_access_method(self): """reflect indexes with storage options set""" metadata = self.metadata Table( "t", metadata, Column("id", Integer, primary_key=True), Column("x", ARRAY(Integer)), ) metadata.create_all() with testing.db.connect().execution_options(autocommit=True) as conn: conn.execute("CREATE INDEX idx1 ON t USING gin (x)") ind = testing.db.dialect.get_indexes(conn, "t", None) eq_( ind, [ { "unique": False, "column_names": ["x"], "name": "idx1", "dialect_options": {"postgresql_using": "gin"}, } ], ) m = MetaData() t1 = Table("t", m, autoload_with=conn) eq_( list(t1.indexes)[0].dialect_options["postgresql"]["using"], "gin", )
def test_on_conflict_do_update_exotic_targets_four(self): users = self.tables.users_xtra with testing.db.connect() as conn: self._exotic_targets_fixture(conn) # try unique constraint by name: cause an # upsert on target login_email, not id i = insert(users) i = i.on_conflict_do_update( constraint=self.unique_constraint.name, set_=dict( id=i.excluded.id, name=i.excluded.name, login_email=i.excluded.login_email) ) # note: lets_index_this value totally ignored in SET clause. conn.execute(i, dict( id=43, name='nameunique2', login_email='*****@*****.**', lets_index_this='unique') ) eq_( conn.execute( users.select(). where(users.c.login_email == '*****@*****.**') ).fetchall(), [(43, 'nameunique2', '*****@*****.**', 'not')] )
def test_cross_schema_reflection_metadata_uses_schema(self): # test [ticket:3716] metadata = self.metadata Table( "some_table", metadata, Column("id", Integer, primary_key=True), Column("sid", Integer, ForeignKey("some_other_table.id")), schema="test_schema", ) Table( "some_other_table", metadata, Column("id", Integer, primary_key=True), schema=None, ) metadata.create_all() with testing.db.connect() as conn: meta2 = MetaData(conn, schema="test_schema") meta2.reflect() eq_( set(meta2.tables), set(["some_other_table", "test_schema.some_table"]), )
def test_reflect_check_constraint(self): meta = self.metadata cc_table = Table( "pgsql_cc", meta, Column("a", Integer()), CheckConstraint("a > 1 AND a < 5", name="cc1"), CheckConstraint("a = 1 OR (a > 2 AND a < 5)", name="cc2"), ) cc_table.create() reflected = Table("pgsql_cc", MetaData(testing.db), autoload=True) check_constraints = dict( (uc.name, uc.sqltext.text) for uc in reflected.constraints if isinstance(uc, CheckConstraint) ) eq_( check_constraints, { u"cc1": u"(a > 1) AND (a < 5)", u"cc2": u"(a = 1) OR ((a > 2) AND (a < 5))", }, )
def test_reflection_with_exclude_constraint(self): m = self.metadata Table( "t", m, Column("id", Integer, primary_key=True), Column("period", TSRANGE), ExcludeConstraint(("period", "&&"), name="quarters_period_excl"), ) m.create_all() insp = inspect(testing.db) # PostgreSQL will create an implicit index for an exclude constraint. # we don't reflect the EXCLUDE yet. eq_( insp.get_indexes("t"), [ { "unique": False, "name": "quarters_period_excl", "duplicates_constraint": "quarters_period_excl", "dialect_options": {"postgresql_using": "gist"}, "column_names": ["period"], } ], ) # reflection corrects for the dupe reflected = Table("t", MetaData(testing.db), autoload=True) eq_(set(reflected.indexes), set())
def test_autocommit_isolation_level(self): extensions = __import__('psycopg2.extensions').extensions c = testing.db.connect() c = c.execution_options(isolation_level='AUTOCOMMIT') eq_(c.connection.connection.isolation_level, extensions.ISOLATION_LEVEL_AUTOCOMMIT)
def test_serial_integer(self): for version, type_, expected in [ (None, Integer, 'SERIAL'), (None, BigInteger, 'BIGSERIAL'), ((9, 1), SmallInteger, 'SMALLINT'), ((9, 2), SmallInteger, 'SMALLSERIAL'), (None, postgresql.INTEGER, 'SERIAL'), (None, postgresql.BIGINT, 'BIGSERIAL'), ]: m = MetaData() t = Table('t', m, Column('c', type_, primary_key=True)) if version: dialect = postgresql.dialect() dialect._get_server_version_info = Mock(return_value=version) dialect.initialize(testing.db.connect()) else: dialect = testing.db.dialect ddl_compiler = dialect.ddl_compiler(dialect, schema.CreateTable(t)) eq_( ddl_compiler.get_column_specification(t.c.c), "c %s NOT NULL" % expected )
def test_serial_integer(self): class BITD(TypeDecorator): impl = Integer def load_dialect_impl(self, dialect): if dialect.name == "postgresql": return BigInteger() else: return Integer() for version, type_, expected in [ (None, Integer, "SERIAL"), (None, BigInteger, "BIGSERIAL"), ((9, 1), SmallInteger, "SMALLINT"), ((9, 2), SmallInteger, "SMALLSERIAL"), (None, postgresql.INTEGER, "SERIAL"), (None, postgresql.BIGINT, "BIGSERIAL"), (None, Integer().with_variant(BigInteger(), "postgresql"), "BIGSERIAL"), (None, Integer().with_variant(postgresql.BIGINT, "postgresql"), "BIGSERIAL"), ((9, 2), Integer().with_variant(SmallInteger, "postgresql"), "SMALLSERIAL"), (None, BITD(), "BIGSERIAL"), ]: m = MetaData() t = Table("t", m, Column("c", type_, primary_key=True)) if version: dialect = postgresql.dialect() dialect._get_server_version_info = Mock(return_value=version) dialect.initialize(testing.db.connect()) else: dialect = testing.db.dialect ddl_compiler = dialect.ddl_compiler(dialect, schema.CreateTable(t)) eq_(ddl_compiler.get_column_specification(t.c.c), "c %s NOT NULL" % expected)
def test_inspect_enums_star(self): enum_type = postgresql.ENUM( 'cat', 'dog', 'rat', name='pet', metadata=self.metadata) schema_enum_type = postgresql.ENUM( 'sad', 'ok', 'happy', name='mood', schema='test_schema', metadata=self.metadata) enum_type.create(testing.db) schema_enum_type.create(testing.db) inspector = reflection.Inspector.from_engine(testing.db) eq_(inspector.get_enums(), [ { 'visible': True, 'labels': ['cat', 'dog', 'rat'], 'name': 'pet', 'schema': 'public' }]) eq_(inspector.get_enums('*'), [ { 'visible': True, 'labels': ['cat', 'dog', 'rat'], 'name': 'pet', 'schema': 'public' }, { 'visible': False, 'name': 'mood', 'schema': 'test_schema', 'labels': ['sad', 'ok', 'happy'] }])
def assert_compiled(element, assert_string, dialect=None): dialect = _get_dialect(dialect) eq_( text_type(element.compile(dialect=dialect)). replace("\n", "").replace("\t", ""), assert_string.replace("\n", "").replace("\t", "") )
def test_crosschema_domain_is_reflected(self): metadata = MetaData(testing.db) table = Table('crosschema', metadata, autoload=True) eq_(str(table.columns.answer.server_default.arg), '0', "Reflected default value didn't equal expected value") assert table.columns.answer.nullable, \ 'Expected reflected column to be nullable.'
def test_cross_schema_reflection_seven(self): # test that the search path *is* taken into account # by default meta1 = self.metadata Table('some_table', meta1, Column('id', Integer, primary_key=True), schema='test_schema' ) Table('some_other_table', meta1, Column('id', Integer, primary_key=True), Column('sid', Integer, ForeignKey('test_schema.some_table.id')), schema='test_schema_2' ) meta1.create_all() with testing.db.connect() as conn: conn.detach() conn.execute( "set search_path to test_schema_2, test_schema, public") meta2 = MetaData(conn) meta2.reflect(schema="test_schema_2") eq_(set(meta2.tables), set( ['test_schema_2.some_other_table', 'some_table'])) meta3 = MetaData(conn) meta3.reflect( schema="test_schema_2", postgresql_ignore_search_path=True) eq_(set(meta3.tables), set( ['test_schema_2.some_other_table', 'test_schema.some_table']))
def test_enum_domain_is_reflected(self): metadata = MetaData(testing.db) table = Table('enum_test', metadata, autoload=True) eq_( table.c.data.type.enums, ('test', ) )
def test_index_reflection_with_storage_options(self): """reflect indexes with storage options set""" metadata = self.metadata Table( 't', metadata, Column('id', Integer, primary_key=True), Column('x', Integer) ) metadata.create_all() with testing.db.connect().execution_options(autocommit=True) as conn: conn.execute("CREATE INDEX idx1 ON t (x) WITH (fillfactor = 50)") ind = testing.db.dialect.get_indexes(conn, "t", None) eq_(ind, [{'unique': False, 'column_names': ['x'], 'name': 'idx1', 'dialect_options': {"postgresql_with": {"fillfactor": "50"}}}]) m = MetaData() t1 = Table('t', m, autoload_with=conn) eq_( list(t1.indexes)[0].dialect_options['postgresql']['with'], {"fillfactor": "50"} )
def test_mview_select(self): metadata = MetaData(testing.db) table = Table('test_mview', metadata, autoload=True) eq_( table.select().execute().fetchall(), [(89, 'd1',)] )
def _exotic_targets_fixture(self, conn): users = self.tables.users_xtra conn.execute( insert(users), dict( id=1, name="name1", login_email="*****@*****.**", lets_index_this="not", ), ) conn.execute( users.insert(), dict( id=2, name="name2", login_email="*****@*****.**", lets_index_this="not", ), ) eq_( conn.execute(users.select().where(users.c.id == 1)).fetchall(), [(1, "name1", "*****@*****.**", "not")], )
def test_on_conflict_do_nothing_target(self): users = self.tables.users with testing.db.connect() as conn: result = conn.execute( insert(users).on_conflict_do_nothing( index_elements=users.primary_key.columns ), dict(id=1, name="name1"), ) eq_(result.inserted_primary_key, [1]) eq_(result.returned_defaults, None) result = conn.execute( insert(users).on_conflict_do_nothing( index_elements=users.primary_key.columns ), dict(id=1, name="name2"), ) eq_(result.inserted_primary_key, [1]) eq_(result.returned_defaults, None) eq_( conn.execute(users.select().where(users.c.id == 1)).fetchall(), [(1, "name1")], )
def test_on_conflict_do_nothing_connectionless(self): users = self.tables.users_xtra with testing.db.connect() as conn: result = conn.execute( insert(users).on_conflict_do_nothing( constraint="uq_login_email" ), dict(name="name1", login_email="email1"), ) eq_(result.inserted_primary_key, [1]) eq_(result.returned_defaults, (1,)) result = testing.db.execute( insert(users).on_conflict_do_nothing(constraint="uq_login_email"), dict(name="name2", login_email="email1"), ) eq_(result.inserted_primary_key, None) eq_(result.returned_defaults, None) eq_( testing.db.execute( users.select().where(users.c.id == 1) ).fetchall(), [(1, "name1", "email1", None)], )
def test_reflection_with_exclude_constraint(self): m = self.metadata Table( 't', m, Column('id', Integer, primary_key=True), Column('period', TSRANGE), ExcludeConstraint(('period', '&&'), name='quarters_period_excl') ) m.create_all() insp = inspect(testing.db) # PostgreSQL will create an implicit index for an exclude constraint. # we don't reflect the EXCLUDE yet. eq_( insp.get_indexes('t'), [{'unique': False, 'name': 'quarters_period_excl', 'duplicates_constraint': 'quarters_period_excl', 'dialect_options': {'postgresql_using': 'gist'}, 'column_names': ['period']}] ) # reflection corrects for the dupe reflected = Table('t', MetaData(testing.db), autoload=True) eq_(set(reflected.indexes), set())
def test_nested_type_trans(self): customer = self.tables.customer order = self.tables.order item = self.tables.item class SpecialType(TypeDecorator): impl = Integer def process_result_value(self, value, dialect): return str(value) + "_processed" sub_sub_stmt = nested(select([type_coerce(item.c.price, SpecialType)]).\ where(item.c.order_id == order.c.id)).label('i') sub_stmt = nested(select([sub_sub_stmt]).where(order.c.customer_id == customer.c.id)).label('o') stmt = select([sub_stmt]).where(customer.c.id == 1) r = config.db.execute(stmt) row = r.fetchone() sub_result = row['o'] sub_sub_result = sub_result.fetchone()['i'] eq_( list(sub_sub_result), [('9.99_processed',), ('19.99_processed',)] )
def test_table_is_reflected_test_schema(self): metadata = MetaData(testing.db) table = Table('testtable', metadata, autoload=True, schema='test_schema') eq_(set(table.columns.keys()), set(['question', 'answer', 'anything']), "Columns of reflected table didn't equal expected columns") assert isinstance(table.c.anything.type, Integer)
def test_get_view_definition(self): insp = inspect(testing.db) eq_( re.sub( r'[\n\t ]+', ' ', insp.get_view_definition("test_mview").strip()), "SELECT testtable.id, testtable.data FROM testtable;" )
def test_mview_is_reflected(self): metadata = MetaData(testing.db) table = Table('test_mview', metadata, autoload=True) eq_(set(table.columns.keys()), set(['id', 'data']), "Columns of reflected mview didn't equal expected columns")
def test_get_view_names(self): insp = inspect(testing.db) eq_(set(insp.get_view_names()), set(['test_regview', 'test_mview']))