def test_no_instance_level_collections(self): @event.listens_for(self.Target, "event_one") def listen_one(x, y): pass t1 = self.Target() t2 = self.Target() t1.dispatch.event_one(5, 6) t2.dispatch.event_one(5, 6) is_( self.Target.dispatch._empty_listener_reg[self.Target]["event_one"], t1.dispatch.event_one, ) @event.listens_for(t1, "event_one") def listen_two(x, y): pass is_not_( self.Target.dispatch._empty_listener_reg[self.Target]["event_one"], t1.dispatch.event_one, ) is_( self.Target.dispatch._empty_listener_reg[self.Target]["event_one"], t2.dispatch.event_one, )
def test_backref_events(self): User, Address = self._user_address_fixture(addresses_args={"backref": "user"}) u1 = User() a1 = Address() u1.addresses.append(a1) is_(a1.user, u1)
def test_cyclical(self): """A circular eager relationship breaks the cycle with a lazy loader""" Address, addresses, users, User = ( self.classes.Address, self.tables.addresses, self.tables.users, self.classes.User, ) mapper(Address, addresses) mapper( User, users, properties=dict( addresses=relationship( Address, lazy="subquery", backref=sa.orm.backref("user", lazy="subquery"), order_by=Address.id ) ), ) is_(sa.orm.class_mapper(User).get_property("addresses").lazy, "subquery") is_(sa.orm.class_mapper(Address).get_property("user").lazy, "subquery") sess = create_session() eq_(self.static.user_address_result, sess.query(User).order_by(User.id).all())
def test_select(self): t = Table("t", MetaData(), Column("x", Integer)) s = t.select() is_(inspect(s), s) assert s.is_selectable is_(s.selectable, s)
def test_inplace_add(self): User = self.classes.User session = Session() def l1(): return session.query(User) def l2(q): return q.filter(User.name == bindparam('name')) q1 = self.bakery(l1) self._assert_cache_key( q1._cache_key, [l1] ) eq_(q1.steps, [l1]) q2 = q1.add_criteria(l2) is_(q2, q1) self._assert_cache_key( q1._cache_key, [l1, l2] ) eq_(q1.steps, [l1, l2])
def _run_test(self, specs, attributes): columns = [Column("c%i" % (i + 1), t[0]) for i, t in enumerate(specs)] # Early 5.0 releases seem to report more "general" for columns # in a view, e.g. char -> varchar, tinyblob -> mediumblob use_views = testing.db.dialect.server_version_info > (5, 0, 10) m = self.metadata Table("mysql_types", m, *columns) if use_views: event.listen(m, "after_create", DDL("CREATE OR REPLACE VIEW mysql_types_v " "AS SELECT * from mysql_types")) event.listen(m, "before_drop", DDL("DROP VIEW IF EXISTS mysql_types_v")) m.create_all() m2 = MetaData(testing.db) tables = [Table("mysql_types", m2, autoload=True)] if use_views: tables.append(Table("mysql_types_v", m2, autoload=True)) for table in tables: for i, (reflected_col, spec) in enumerate(zip(table.c, specs)): expected_spec = spec[1] reflected_type = reflected_col.type is_(type(reflected_type), type(expected_spec)) for attr in attributes: eq_( getattr(reflected_type, attr), getattr(expected_spec, attr), "Column %s: Attribute %s value of %s does not " "match %s for type %s" % ("c%i" % (i + 1), attr, getattr(reflected_type, attr), getattr(expected_spec, attr), spec[0]), )
def test_synonym_filter(self): User = self.classes.User syn = inspect(User).synonyms eq_(list(syn.keys()), ["name_syn"]) is_(syn.name_syn, User.name_syn.original_property) eq_(dict(syn), {"name_syn": User.name_syn.original_property})
def _test_round_trip(self, fixture, warnings=False): from sqlalchemy import inspect conn = testing.db.connect() for from_, to_ in self._fixture_as_string(fixture): inspector = inspect(conn) conn.execute("CREATE TABLE foo (data %s)" % from_) try: if warnings: def go(): return inspector.get_columns("foo")[0] col_info = testing.assert_warnings(go, ["Could not instantiate"], regex=True) else: col_info = inspector.get_columns("foo")[0] expected_type = type(to_) is_(type(col_info['type']), expected_type) # test args for attr in ("scale", "precision", "length"): if getattr(to_, attr, None) is not None: eq_( getattr(col_info['type'], attr), getattr(to_, attr, None) ) finally: conn.execute("DROP TABLE foo")
def test_invocation_per_mapper(self): """test that BakedLazyLoader is getting invoked with the "baked_select" lazy setting. """ User, Address = self._o2m_fixture(lazy="baked_select") sess = Session() q = sess.query(User).options(lazyload(User.addresses)) with mock.patch.object(BakedLazyLoader, "_emit_lazyload") as el: u1 = q.first() u1.addresses # not invoked eq_(el.mock_calls, []) sess = Session() q = sess.query(User) with mock.patch.object(BakedLazyLoader, "_emit_lazyload") as el: u1 = q.first() u1.addresses # invoked is_( el.mock_calls[0][1][1], u1._sa_instance_state )
def test_sets(self): # start Py2K # import sets # end Py2K class SetLike(object): def add(self): pass class ForcedSet(list): __emulates__ = set for type_ in (set, # start Py2K # sets.Set, # end Py2K SetLike, ForcedSet): eq_(util.duck_type_collection(type_), set) instance = type_() eq_(util.duck_type_collection(instance), set) for type_ in (frozenset, # start Py2K # sets.ImmutableSet # end Py2K ): is_(util.duck_type_collection(type_), None) instance = type_() is_(util.duck_type_collection(instance), None)
def test_invocation_systemwide_loaders(self): baked.bake_lazy_loaders() try: User, Address = self._o2m_fixture() sess = Session() q = sess.query(User).options(lazyload(User.addresses)) with mock.patch.object(BakedLazyLoader, "_emit_lazyload") as el: u1 = q.first() u1.addresses # invoked is_( el.mock_calls[0][1][1], u1._sa_instance_state ) finally: baked.unbake_lazy_loaders() clear_mappers() User, Address = self._o2m_fixture() sess = Session() q = sess.query(User).options(lazyload(User.addresses)) with mock.patch.object(BakedLazyLoader, "_emit_lazyload") as el: u1 = q.first() u1.addresses # not invoked eq_(el.mock_calls, [])
def test_indexed_entity(self): umapper = inspect(self.classes.User) amapper = inspect(self.classes.Address) path = PathRegistry.coerce((umapper, umapper.attrs.addresses, amapper, amapper.attrs.email_address)) is_(path[0], umapper) is_(path[2], amapper)
def test_determine_direction_compound_2(self): joincond = self._join_fixture_compound_expression_2( support_sync=False) is_( joincond.direction, ONETOMANY )
def test_cast_type(self): Json = self.classes.Json s = Session(testing.db) j = Json(json={'field': 10}) s.add(j) s.commit() jq = s.query(Json).filter(Json.int_field == 10).one() eq_(j.id, jq.id) jq = s.query(Json).filter(Json.text_field == '10').one() eq_(j.id, jq.id) jq = s.query(Json).filter(Json.json_field.astext == '10').one() eq_(j.id, jq.id) jq = s.query(Json).filter(Json.text_field == 'wrong').first() is_(jq, None) j.json = {'field': True} s.commit() jq = s.query(Json).filter(Json.text_field == 'true').one() eq_(j.id, jq.id)
def test_no_instance_level_collections(self): @event.listens_for(self.Target, "event_one") def listen_one(x, y): pass t1 = self.Target() t2 = self.Target() t1.dispatch.event_one(5, 6) t2.dispatch.event_one(5, 6) is_( t1.dispatch.__dict__['event_one'], self.Target.dispatch.event_one.\ _empty_listeners[self.Target] ) @event.listens_for(t1, "event_one") def listen_two(x, y): pass is_not_( t1.dispatch.__dict__['event_one'], self.Target.dispatch.event_one.\ _empty_listeners[self.Target] ) is_( t2.dispatch.__dict__['event_one'], self.Target.dispatch.event_one.\ _empty_listeners[self.Target] )
def test_mapper_selectable(self): User = self.classes.User user_table = self.tables.users insp = inspect(User) is_(insp.selectable, user_table) assert not insp.is_selectable assert not insp.is_aliased_class
def test_insp_column_prop(self): User = self.classes.User prop = inspect(User.name) is_(prop, User.name) is_(prop.parent, class_mapper(User)) assert not hasattr(prop, "mapper")
def test_column_error_printing(self): result = testing.db.execute(select([1])) row = result.first() class unprintable(object): def __str__(self): raise ValueError("nope") msg = r"Could not locate column in row for column '%s'" for accessor, repl in [ ("x", "x"), (Column("q", Integer), "q"), (Column("q", Integer) + 12, r"q \+ :q_1"), (unprintable(), "unprintable element.*"), ]: assert_raises_message( exc.NoSuchColumnError, msg % repl, result._getter, accessor ) is_(result._getter(accessor, False), None) assert_raises_message( exc.NoSuchColumnError, msg % repl, lambda: row[accessor] )
def test_column_in_mapper_args_used_multiple_times(self): class MyMixin(object): version_id = Column(Integer) __mapper_args__ = {'version_id_col': version_id} class ModelOne(Base, MyMixin): __tablename__ = 'm1' id = Column(Integer, primary_key=True) class ModelTwo(Base, MyMixin): __tablename__ = 'm2' id = Column(Integer, primary_key=True) is_( ModelOne.__mapper__.version_id_col, ModelOne.__table__.c.version_id ) is_( ModelTwo.__mapper__.version_id_col, ModelTwo.__table__.c.version_id )
def _run_test(self, specs, attributes): columns = [Column("c%i" % (i + 1), t[0]) for i, t in enumerate(specs)] m = self.metadata Table("oracle_types", m, *columns) m.create_all() m2 = MetaData(testing.db) table = Table("oracle_types", m2, autoload=True) for i, (reflected_col, spec) in enumerate(zip(table.c, specs)): expected_spec = spec[1] reflected_type = reflected_col.type is_(type(reflected_type), type(expected_spec)) for attr in attributes: eq_( getattr(reflected_type, attr), getattr(expected_spec, attr), "Column %s: Attribute %s value of %s does not " "match %s for type %s" % ( "c%i" % (i + 1), attr, getattr(reflected_type, attr), getattr(expected_spec, attr), spec[0], ), )
def insert_values(engine, table_, values): """ Inserts a row into a table, returns the full list of values INSERTed including defaults that fired off on the DB side and detects rows that had defaults and post-fetches. """ # verify implicit_returning is working if engine.dialect.implicit_returning: ins = table_.insert() comp = ins.compile(engine, column_keys=list(values)) if not set(values).issuperset( c.key for c in table_.primary_key): is_(bool(comp.returning), True) result = engine.execute(table_.insert(), **values) ret = values.copy() for col, id in zip( table_.primary_key, result.inserted_primary_key): ret[col.key] = id if result.lastrow_has_defaults(): criterion = and_( *[ col == id for col, id in zip(table_.primary_key, result.inserted_primary_key)]) row = engine.execute(table_.select(criterion)).first() for c in table_.c: ret[c.key] = row[c] return ret
def test_savepoint_lost_still_runs(self): User = self.classes.User s = self.session(bind=self.bind) trans = s.begin_nested() s.connection() u1 = User(name='ed') s.add(u1) # kill off the transaction nested_trans = trans._connections[self.bind][1] nested_trans._do_commit() is_(s.transaction, trans) assert_raises( sa_exc.DBAPIError, s.rollback ) assert u1 not in s.new is_(trans._state, _session.CLOSED) is_not_(s.transaction, trans) is_(s.transaction._state, _session.ACTIVE) is_(s.transaction.nested, False) is_(s.transaction._parent, None)
def test_post_update_m2o_detect_none(self): person, ball, Ball, Person = ( self.tables.person, self.tables.ball, self.classes.Ball, self.classes.Person) mapper(Ball, ball, properties={ 'person': relationship( Person, post_update=True, primaryjoin=person.c.id == ball.c.person_id) }) mapper(Person, person) sess = create_session(autocommit=False, expire_on_commit=True) sess.add(Ball(person=Person())) sess.commit() b1 = sess.query(Ball).first() # needs to be unloaded assert 'person' not in b1.__dict__ b1.person = None self.assert_sql_execution( testing.db, sess.flush, CompiledSQL( "UPDATE ball SET person_id=:person_id " "WHERE ball.id = :ball_id", lambda ctx: {'person_id': None, 'ball_id': b1.id}) ) is_(b1.person, None)
def test_password_custom_obj(self): class SecurePassword(str): def __init__(self, value): self.value = value def __str__(self): return self.value sp = SecurePassword("secured_password") u = url.URL("dbtype", username="******", password=sp, host="localhost") eq_(u.password, "secured_password") eq_(str(u), "dbtype://*****:*****@localhost") # test in-place modification sp.value = "new_secured_password" eq_(u.password, "new_secured_password") eq_(str(u), "dbtype://*****:*****@localhost") u.password = "******" eq_(u.password, "hi") eq_(str(u), "dbtype://*****:*****@localhost") u.password = None is_(u.password, None) eq_(str(u), "dbtype://x@localhost")
def test_info_from_hybrid(self): A = self._fixture() A._value.info['foo'] = 'bar' A.value.info['bar'] = 'hoho' insp = inspect(A) is_(insp.all_orm_descriptors['value'].info, A.value.info)
def test_any_literal(self): stuff = self.tables.stuff stmt = select([4 == any_(select([stuff.c.value]))]) is_( testing.db.execute(stmt).scalar(), True )
def test_deprecated_dialect_name_still_loads(self): dialects.registry.clear() with expect_deprecated( "The 'postgres' dialect name " "has been renamed to 'postgresql'" ): dialect = url.URL("postgres").get_dialect() is_(dialect, postgresql.dialect)
def test_aliased_class(self): Address = self.classes.Address ualias = aliased(Address) insp = inspect(ualias) is_(insp.mapper, inspect(Address)) is_(insp.selectable, ualias._AliasedClass__adapter.selectable) assert not insp.is_selectable assert insp.is_aliased_class
def test_reflection(self): Table("mysql_json", self.metadata, Column("foo", mysql.JSON)) self.metadata.create_all() reflected = Table("mysql_json", MetaData(), autoload_with=testing.db) is_(reflected.c.foo.type._type_affinity, sqltypes.JSON) assert isinstance(reflected.c.foo.type, mysql.JSON)
def test_table(self): t = Table('t', MetaData(), Column('x', Integer) ) is_(inspect(t), t) assert t.is_selectable is_(t.selectable, t)
async def test_get_raw_connection(self, async_connection): pooled = await async_connection.get_raw_connection() is_(pooled, async_connection.sync_connection.connection)
def test_extension_types(self): from sqlalchemy.ext.associationproxy import ( association_proxy, AssociationProxyExtensionType, ) from sqlalchemy.ext.hybrid import ( hybrid_property, hybrid_method, HybridExtensionType, ) from sqlalchemy import Table, MetaData, Integer, Column from sqlalchemy.orm.interfaces import NotExtension class SomeClass(self.classes.User): some_assoc = association_proxy("addresses", "email_address") @hybrid_property def upper_name(self): raise NotImplementedError() @hybrid_method def conv(self, fn): raise NotImplementedError() class Address(self.classes.Address): pass class SomeSubClass(SomeClass): @hybrid_property def upper_name(self): raise NotImplementedError() @hybrid_property def foo(self): raise NotImplementedError() m = MetaData() t = Table("sometable", m, Column("id", Integer, primary_key=True)) ta = Table( "address_t", m, Column("id", Integer, primary_key=True), Column("s_id", ForeignKey("sometable.id")), ) self.mapper_registry.map_imperatively( SomeClass, t, properties={"addresses": relationship(Address)} ) self.mapper_registry.map_imperatively(Address, ta) self.mapper_registry.map_imperatively(SomeSubClass, inherits=SomeClass) insp = inspect(SomeSubClass) eq_( dict( (k, v.extension_type) for k, v in list(insp.all_orm_descriptors.items()) ), { "id": NotExtension.NOT_EXTENSION, "name": NotExtension.NOT_EXTENSION, "name_syn": NotExtension.NOT_EXTENSION, "addresses": NotExtension.NOT_EXTENSION, "orders": NotExtension.NOT_EXTENSION, "upper_name": HybridExtensionType.HYBRID_PROPERTY, "foo": HybridExtensionType.HYBRID_PROPERTY, "conv": HybridExtensionType.HYBRID_METHOD, "some_assoc": AssociationProxyExtensionType.ASSOCIATION_PROXY, }, ) is_( insp.all_orm_descriptors.upper_name, SomeSubClass.__dict__["upper_name"], ) is_(insp.all_orm_descriptors.some_assoc, SomeClass.some_assoc.parent) is_( inspect(SomeClass).all_orm_descriptors.upper_name, SomeClass.__dict__["upper_name"], )
def test_insp_aliased_column_prop(self): User = self.classes.User ua = aliased(User) prop = inspect(ua.name) is_(prop, ua.name) is_(prop.property.parent.mapper, class_mapper(User)) assert not hasattr(prop.property, "mapper") is_(prop.parent.entity, ua) is_(prop.parent.class_, User) is_(prop._parentmapper, class_mapper(User)) assert not hasattr(prop, "mapper") is_(prop._parententity, inspect(ua))
def test_column_collection_iterate(self): User = self.classes.User user_table = self.tables.users insp = inspect(User) eq_(list(insp.columns), [user_table.c.id, user_table.c.name]) is_(insp.columns.id, user_table.c.id)
def test_instance_state(self): User = self.classes.User u1 = User() insp = inspect(u1) is_(insp, instance_state(u1))
def test_roundtrip(self): if with_polymorphic == 'unions': if include_base: person_join = polymorphic_union( { 'engineer': people.join(engineers), 'manager': people.join(managers), 'person': people.select(people.c.type == 'person'), }, None, 'pjoin') else: person_join = polymorphic_union( { 'engineer': people.join(engineers), 'manager': people.join(managers), }, None, 'pjoin') manager_join = people.join(managers).outerjoin(boss) person_with_polymorphic = ['*', person_join] manager_with_polymorphic = ['*', manager_join] elif with_polymorphic == 'joins': person_join = people.outerjoin(engineers).outerjoin(managers).\ outerjoin(boss) manager_join = people.join(managers).outerjoin(boss) person_with_polymorphic = ['*', person_join] manager_with_polymorphic = ['*', manager_join] elif with_polymorphic == 'auto': person_with_polymorphic = '*' manager_with_polymorphic = '*' else: person_with_polymorphic = None manager_with_polymorphic = None if redefine_colprop: person_mapper = mapper(Person, people, with_polymorphic=person_with_polymorphic, polymorphic_on=people.c.type, polymorphic_identity='person', properties={'person_name': people.c.name}) else: person_mapper = mapper(Person, people, with_polymorphic=person_with_polymorphic, polymorphic_on=people.c.type, polymorphic_identity='person') mapper(Engineer, engineers, inherits=person_mapper, polymorphic_identity='engineer') mapper(Manager, managers, inherits=person_mapper, with_polymorphic=manager_with_polymorphic, polymorphic_identity='manager') mapper(Boss, boss, inherits=Manager, polymorphic_identity='boss') mapper(Company, companies, properties={ 'employees': relationship(Person, lazy=lazy_relationship, cascade="all, delete-orphan", backref="company", order_by=people.c.person_id) }) if redefine_colprop: person_attribute_name = 'person_name' else: person_attribute_name = 'name' employees = [ Manager(status='AAB', manager_name='manager1', **{person_attribute_name: 'pointy haired boss'}), Engineer(status='BBA', engineer_name='engineer1', primary_language='java', **{person_attribute_name: 'dilbert'}), ] if include_base: employees.append(Person(**{person_attribute_name: 'joesmith'})) employees += [ Engineer(status='CGG', engineer_name='engineer2', primary_language='python', **{person_attribute_name: 'wally'}), Manager(status='ABA', manager_name='manager2', **{person_attribute_name: 'jsmith'}) ] pointy = employees[0] jsmith = employees[-1] dilbert = employees[1] session = create_session() c = Company(name='company1') c.employees = employees session.add(c) session.flush() session.expunge_all() eq_(session.query(Person).get(dilbert.person_id), dilbert) session.expunge_all() eq_( session.query(Person).filter( Person.person_id == dilbert.person_id).one(), dilbert) session.expunge_all() def go(): cc = session.query(Company).get(c.company_id) eq_(cc.employees, employees) if not lazy_relationship: if with_polymorphic != 'none': self.assert_sql_count(testing.db, go, 1) else: self.assert_sql_count(testing.db, go, 5) else: if with_polymorphic != 'none': self.assert_sql_count(testing.db, go, 2) else: self.assert_sql_count(testing.db, go, 6) # test selecting from the query, using the base # mapped table (people) as the selection criterion. # in the case of the polymorphic Person query, # the "people" selectable should be adapted to be "person_join" eq_( session.query(Person).filter( getattr(Person, person_attribute_name) == 'dilbert').first(), dilbert) assert session.query(Person).filter( getattr(Person, person_attribute_name) == 'dilbert').first().person_id eq_( session.query(Engineer).filter( getattr(Person, person_attribute_name) == 'dilbert').first(), dilbert) # test selecting from the query, joining against # an alias of the base "people" table. test that # the "palias" alias does *not* get sucked up # into the "person_join" conversion. palias = people.alias("palias") dilbert = session.query(Person).get(dilbert.person_id) is_( dilbert, session.query(Person).filter((palias.c.name == 'dilbert') & ( palias.c.person_id == Person.person_id)).first()) is_( dilbert, session.query(Engineer).filter((palias.c.name == 'dilbert') & ( palias.c.person_id == Person.person_id)).first()) is_( dilbert, session.query(Person).filter( (Engineer.engineer_name == "engineer1") & (engineers.c.person_id == people.c.person_id)).first()) is_( dilbert, session.query(Engineer).filter( Engineer.engineer_name == "engineer1")[0]) session.flush() session.expunge_all() def go(): session.query(Person).filter( getattr(Person, person_attribute_name) == 'dilbert').first() self.assert_sql_count(testing.db, go, 1) session.expunge_all() dilbert = session.query(Person).filter( getattr(Person, person_attribute_name) == 'dilbert').first() def go(): # assert that only primary table is queried for # already-present-in-session d = session.query(Person).filter( getattr(Person, person_attribute_name) == 'dilbert').first() self.assert_sql_count(testing.db, go, 1) # test standalone orphans daboss = Boss(status='BBB', manager_name='boss', golf_swing='fore', **{person_attribute_name: 'daboss'}) session.add(daboss) assert_raises(sa_exc.DBAPIError, session.flush) c = session.query(Company).first() daboss.company = c manager_list = [e for e in c.employees if isinstance(e, Manager)] session.flush() session.expunge_all() eq_( session.query(Manager).order_by(Manager.person_id).all(), manager_list) c = session.query(Company).first() session.delete(c) session.flush() eq_(people.count().scalar(), 0)
def test_parententity_vs_parentmapper(self): class Point(object): pass self._fixture(Point, properties={"x_syn": synonym("x")}) pa = aliased(Point) is_(Point.x_syn._parententity, inspect(Point)) is_(Point.x._parententity, inspect(Point)) is_(Point.x_syn._parentmapper, inspect(Point)) is_(Point.x._parentmapper, inspect(Point)) is_( Point.x_syn.__clause_element__()._annotations["parententity"], inspect(Point), ) is_( Point.x.__clause_element__()._annotations["parententity"], inspect(Point), ) is_( Point.x_syn.__clause_element__()._annotations["parentmapper"], inspect(Point), ) is_( Point.x.__clause_element__()._annotations["parentmapper"], inspect(Point), ) pa = aliased(Point) is_(pa.x_syn._parententity, inspect(pa)) is_(pa.x._parententity, inspect(pa)) is_(pa.x_syn._parentmapper, inspect(Point)) is_(pa.x._parentmapper, inspect(Point)) is_( pa.x_syn.__clause_element__()._annotations["parententity"], inspect(pa), ) is_( pa.x.__clause_element__()._annotations["parententity"], inspect(pa) ) is_( pa.x_syn.__clause_element__()._annotations["parentmapper"], inspect(Point), ) is_( pa.x.__clause_element__()._annotations["parentmapper"], inspect(Point), )
def test_insp_aliased_relationship_prop(self): User = self.classes.User Address = self.classes.Address ua = aliased(User) prop = inspect(ua.addresses) is_(prop, ua.addresses) is_(prop.property.parent.mapper, class_mapper(User)) is_(prop.property.mapper, class_mapper(Address)) is_(prop.parent.entity, ua) is_(prop.parent.class_, User) is_(prop._parentmapper, class_mapper(User)) is_(prop.mapper, class_mapper(Address)) is_(prop._parententity, inspect(ua))
def test_instance_three(self): BankAccount, Amount = self.BankAccount, self.Amount account = BankAccount(balance=Amount(4000, "usd")) # 3d. perform currency-agnostic comparisons, math is_(account.balance > Amount(500, "cad"), True)
def test_eq(self): umapper = inspect(self.classes.User) amapper = inspect(self.classes.Address) u_alias = inspect(aliased(self.classes.User)) p1 = PathRegistry.coerce((umapper, umapper.attrs.addresses)) p2 = PathRegistry.coerce((umapper, umapper.attrs.addresses)) p3 = PathRegistry.coerce((umapper, umapper.attrs.name)) p4 = PathRegistry.coerce((u_alias, umapper.attrs.addresses)) p5 = PathRegistry.coerce((umapper, umapper.attrs.addresses, amapper)) p6 = PathRegistry.coerce( (amapper, amapper.attrs.user, umapper, umapper.attrs.addresses) ) p7 = PathRegistry.coerce( ( amapper, amapper.attrs.user, umapper, umapper.attrs.addresses, amapper, amapper.attrs.email_address, ) ) is_(p1 == p2, True) is_(p1 == p3, False) is_(p1 == p4, False) is_(p1 == p5, False) is_(p6 == p7, False) is_(p6 == p7.parent.parent, True) is_(p1 != p2, False) is_(p1 != p3, True) is_(p1 != p4, True) is_(p1 != p5, True)
async def test_get_transaction(self, async_engine): async with async_engine.connect() as conn: async with conn.begin() as trans: is_(trans.connection, conn) is_(conn.get_transaction(), trans)
def test_class(self): A = self._fixture() is_(A.value.class_, A._value.class_)
def test_local_table(self): User = self.classes.User user_table = self.tables.users insp = inspect(User) is_(insp.local_table, user_table)
def test_root_registry(self): umapper = inspect(self.classes.User) is_(RootRegistry()[umapper], umapper._path_registry) eq_(RootRegistry()[umapper], PathRegistry.coerce((umapper,)))
def test_persist_selectable(self): User = self.classes.User user_table = self.tables.users insp = inspect(User) is_(insp.persist_selectable, user_table)
def test_instance_state_ident_transient(self): User = self.classes.User u1 = User(name="ed") insp = inspect(u1) is_(insp.identity, None)
def test_property(self): User = self.classes.User insp = inspect(User) is_(insp.attrs.id, class_mapper(User).get_property("id"))
def test_legacy_schema_flag(self, cfg, expected): with testing.expect_deprecated("The legacy_schema_aliasing parameter"): e = engine_from_config( cfg, module=Mock(version="MS SQL Server 11.0.92") ) is_(e.dialect.legacy_schema_aliasing, expected)
async def test_get_raw_connection(self, async_engine): conn = await async_engine.connect() pooled = await conn.get_raw_connection() is_(pooled, conn.sync_connection.connection)
def test_object_accessor(self): User = self.classes.User u1 = User(name="ed") insp = inspect(u1) is_(insp.object, u1)
class MappedColumnTest(fixtures.TestBase, testing.AssertsCompiledSQL): __dialect__ = "default" def test_legacy_declarative_base(self): typ = VARCHAR(50) Base = declarative_base(type_annotation_map={str: typ}) class MyClass(Base): __tablename__ = "mytable" id: Mapped[int] = mapped_column(primary_key=True) data: Mapped[str] x: Mapped[int] is_(MyClass.__table__.c.data.type, typ) is_true(MyClass.__table__.c.id.primary_key) def test_required_no_arg(self, decl_base): with expect_raises_message( sa_exc.ArgumentError, r"Python typing annotation is required for attribute " r'"A.data" when primary ' r'argument\(s\) for "MappedColumn" construct are None or ' r"not present", ): class A(decl_base): __tablename__ = "a" id: Mapped[int] = mapped_column(primary_key=True) data = mapped_column() @testing.combinations("key", "name", "both", argnames="case") @testing.combinations(True, False, argnames="deferred") @testing.combinations(True, False, argnames="use_add_property") def test_separate_name(self, decl_base, case, deferred, use_add_property): if case == "key": args = {"key": "data_"} elif case == "name": args = {"name": "data_"} else: args = {"name": "data_", "key": "data_"} if deferred: args["deferred"] = True class A(decl_base): __tablename__ = "a" id: Mapped[int] = mapped_column(primary_key=True) if not use_add_property: data: Mapped[str] = mapped_column(**args) if use_add_property: args["type_"] = String() A.data = mapped_column(**args) assert not hasattr(A, "data_") is_(A.data.property.expression, A.__table__.c.data_) eq_(A.__table__.c.data_.key, "data_")
def test_property(self): A = self._fixture() is_(A.value.property, A._value.property)
def test_employee_joined_inh(self, decl_base: Type[DeclarativeBase]): str50 = Annotated[str, 50] str30 = Annotated[str, 30] opt_str50 = Optional[str50] decl_base.registry.update_type_annotation_map( {str50: String(50), str30: String(30)} ) class Company(decl_base): __tablename__ = "company" company_id: Mapped[int] = mapped_column(Integer, primary_key=True) name: Mapped[str50] employees: Mapped[Set["Person"]] = relationship() # noqa: F821 class Person(decl_base): __tablename__ = "person" person_id: Mapped[int] = mapped_column(primary_key=True) company_id: Mapped[int] = mapped_column( ForeignKey("company.company_id") ) name: Mapped[str50] type: Mapped[str30] = mapped_column() __mapper_args__ = {"polymorphic_on": type} class Engineer(Person): __tablename__ = "engineer" person_id: Mapped[int] = mapped_column( ForeignKey("person.person_id"), primary_key=True ) status: Mapped[str] = mapped_column(String(30)) engineer_name: Mapped[opt_str50] primary_language: Mapped[opt_str50] class Manager(Person): __tablename__ = "manager" person_id: Mapped[int] = mapped_column( ForeignKey("person.person_id"), primary_key=True ) status: Mapped[str] = mapped_column(String(30)) manager_name: Mapped[str50] is_(Person.__mapper__.polymorphic_on, Person.__table__.c.type) # the SELECT statements here confirm the columns present and their # ordering self.assert_compile( select(Person), "SELECT person.person_id, person.company_id, person.name, " "person.type FROM person", ) self.assert_compile( select(Manager), "SELECT manager.person_id, person.person_id AS person_id_1, " "person.company_id, person.name, person.type, manager.status, " "manager.manager_name FROM person " "JOIN manager ON person.person_id = manager.person_id", ) self.assert_compile( select(Company).join(Company.employees.of_type(Engineer)), "SELECT company.company_id, company.name FROM company JOIN " "(person JOIN engineer ON person.person_id = engineer.person_id) " "ON company.company_id = person.company_id", )
def test_unions(self): our_type = Numeric(10, 2) class Base(DeclarativeBase): type_annotation_map = {Union[float, Decimal]: our_type} class User(Base): __tablename__ = "users" __table__: Table id: Mapped[int] = mapped_column(primary_key=True) data: Mapped[Union[float, Decimal]] = mapped_column() reverse_data: Mapped[Union[Decimal, float]] = mapped_column() optional_data: Mapped[ Optional[Union[float, Decimal]] ] = mapped_column() # use Optional directly reverse_optional_data: Mapped[ Optional[Union[Decimal, float]] ] = mapped_column() # use Union with None, same as Optional but presents differently # (Optional object with __origin__ Union vs. Union) reverse_u_optional_data: Mapped[ Union[Decimal, float, None] ] = mapped_column() float_data: Mapped[float] = mapped_column() decimal_data: Mapped[Decimal] = mapped_column() is_(User.__table__.c.data.type, our_type) is_false(User.__table__.c.data.nullable) is_(User.__table__.c.reverse_data.type, our_type) is_(User.__table__.c.optional_data.type, our_type) is_true(User.__table__.c.optional_data.nullable) is_(User.__table__.c.reverse_optional_data.type, our_type) is_(User.__table__.c.reverse_u_optional_data.type, our_type) is_true(User.__table__.c.reverse_optional_data.nullable) is_true(User.__table__.c.reverse_u_optional_data.nullable) is_(User.__table__.c.float_data.type, our_type) is_(User.__table__.c.decimal_data.type, our_type)
def test_determine_direction_m2o_composite_selfref(self): joincond = self._join_fixture_m2o_composite_selfref() is_(joincond.direction, MANYTOONE)
def test_determine_direction_purely_single_m2o(self): joincond = self._join_fixture_purely_single_m2o() is_(joincond.direction, MANYTOONE)
def test_entity_boolean(self): umapper = inspect(self.classes.User) path = PathRegistry.coerce((umapper,)) is_(bool(path), True)
def test_determine_direction_purely_single_o2m(self): joincond = self._join_fixture_purely_single_o2m() is_(joincond.direction, ONETOMANY)
def test_key_boolean(self): umapper = inspect(self.classes.User) path = PathRegistry.coerce((umapper, umapper.attrs.addresses)) is_(bool(path), True)
def test_determine_direction_o2m_composite_selfref(self): joincond = self._join_fixture_o2m_composite_selfref() is_(joincond.direction, ONETOMANY)