Example #1
0
    def test_deleted_flag(self):
        users, User = self.tables.users, self.classes.User

        mapper(User, users)

        sess = sessionmaker()()

        u1 = User(name='u1')
        sess.add(u1)
        sess.commit()

        sess.delete(u1)
        sess.flush()
        assert u1 not in sess
        assert_raises(sa.exc.InvalidRequestError, sess.add, u1)
        sess.rollback()
        assert u1 in sess

        sess.delete(u1)
        sess.commit()
        assert u1 not in sess
        assert_raises(sa.exc.InvalidRequestError, sess.add, u1)

        make_transient(u1)
        sess.add(u1)
        sess.commit()

        eq_(sess.query(User).count(), 1)
    def test_update_from_multitable_same_names(self):
        Document = self.classes.Document
        User = self.classes.User

        s = Session()

        s.query(Document).\
            filter(User.id == Document.user_id).\
            filter(User.id == 2).update({
                Document.samename: 'd_samename',
                User.samename: 'u_samename'
            }, synchronize_session=False)
        eq_(
            s.query(User.id, Document.samename, User.samename).
            filter(User.id == Document.user_id).
            order_by(User.id).all(),
            [
                (1, None, None),
                (1, None, None),
                (2, 'd_samename', 'u_samename'),
                (2, 'd_samename', 'u_samename'),
                (3, None, None),
                (3, None, None),
            ]
        )
    def test_delete_against_metadata(self):
        User = self.classes.User
        users = self.tables.users

        sess = Session()
        sess.query(users).delete(synchronize_session=False)
        eq_(sess.query(User).count(), 0)
Example #4
0
    def test_autoflush_expressions(self):
        """test that an expression which is dependent on object state is
        evaluated after the session autoflushes.   This is the lambda
        inside of strategies.py lazy_clause.

        """

        users, Address, addresses, User = (self.tables.users,
                                self.classes.Address,
                                self.tables.addresses,
                                self.classes.User)

        mapper(User, users, properties={
            'addresses': relationship(Address, backref="user")})
        mapper(Address, addresses)

        sess = create_session(autoflush=True, autocommit=False)
        u = User(name='ed', addresses=[Address(email_address='foo')])
        sess.add(u)
        eq_(sess.query(Address).filter(Address.user == u).one(),
            Address(email_address='foo'))

        # still works after "u" is garbage collected
        sess.commit()
        sess.close()
        u = sess.query(User).get(u.id)
        q = sess.query(Address).filter(Address.user == u)
        del u
        gc_collect()
        eq_(q.one(), Address(email_address='foo'))
    def test_update_changes_resets_dirty(self):
        User = self.classes.User

        sess = Session(autoflush=False)

        john, jack, jill, jane = sess.query(User).order_by(User.id).all()

        john.age = 50
        jack.age = 37

        # autoflush is false.  therefore our '50' and '37' are getting
        # blown away by this operation.

        sess.query(User).filter(User.age > 29).\
            update({'age': User.age - 10}, synchronize_session='evaluate')

        for x in (john, jack, jill, jane):
            assert not sess.is_modified(x)

        eq_([john.age, jack.age, jill.age, jane.age], [25, 37, 29, 27])

        john.age = 25
        assert john in sess.dirty
        assert jack in sess.dirty
        assert jill not in sess.dirty
        assert not sess.is_modified(john)
        assert not sess.is_modified(jack)
Example #6
0
    def test_date_roundtrip(self):
        t = Table(
            'test_dates', metadata,
            Column('id', Integer,
                   Sequence('datetest_id_seq', optional=True),
                   primary_key=True),
            Column('adate', Date),
            Column('atime', Time),
            Column('adatetime', DateTime))
        metadata.create_all()
        d1 = datetime.date(2007, 10, 30)
        t1 = datetime.time(11, 2, 32)
        d2 = datetime.datetime(2007, 10, 30, 11, 2, 32)
        t.insert().execute(adate=d1, adatetime=d2, atime=t1)
        t.insert().execute(adate=d2, adatetime=d2, atime=d2)

        x = t.select().execute().fetchall()[0]
        self.assert_(x.adate.__class__ == datetime.date)
        self.assert_(x.atime.__class__ == datetime.time)
        self.assert_(x.adatetime.__class__ == datetime.datetime)

        t.delete().execute()

        t.insert().execute(adate=d1, adatetime=d2, atime=t1)

        eq_(select([t.c.adate, t.c.atime, t.c.adatetime], t.c.adate
            == d1).execute().fetchall(), [(d1, t1, d2)])
Example #7
0
    def test_defaults_second_table(self):
        users, addresses = self.tables.users, self.tables.addresses

        values = {
            addresses.c.email_address: users.c.name,
            users.c.name: 'ed2'
        }

        ret = testing.db.execute(
            addresses.update().
                values(values).
                where(users.c.id == addresses.c.user_id).
                where(users.c.name == 'ed'))

        eq_(set(ret.prefetch_cols()), set([users.c.some_update]))

        expected = [
            (2, 8, 'ed'),
            (3, 8, 'ed'),
            (4, 9, '*****@*****.**')]
        self._assert_addresses(addresses, expected)

        expected = [
            (8, 'ed2', 'im the update'),
            (9, 'fred', 'value')]
        self._assert_users(users, expected)
Example #8
0
    def test_char(self):
        """Exercise COLLATE-ish options on string types."""

        columns = [
            (mssql.MSChar, [], {},
             'CHAR'),
            (mssql.MSChar, [1], {},
             'CHAR(1)'),
            (mssql.MSChar, [1], {'collation': 'Latin1_General_CI_AS'},
             'CHAR(1) COLLATE Latin1_General_CI_AS'),

            (mssql.MSNChar, [], {},
             'NCHAR'),
            (mssql.MSNChar, [1], {},
             'NCHAR(1)'),
            (mssql.MSNChar, [1], {'collation': 'Latin1_General_CI_AS'},
             'NCHAR(1) COLLATE Latin1_General_CI_AS'),

            (mssql.MSString, [], {},
             'VARCHAR(max)'),
            (mssql.MSString, [1], {},
             'VARCHAR(1)'),
            (mssql.MSString, [1], {'collation': 'Latin1_General_CI_AS'},
             'VARCHAR(1) COLLATE Latin1_General_CI_AS'),

            (mssql.MSNVarchar, [], {},
             'NVARCHAR(max)'),
            (mssql.MSNVarchar, [1], {},
             'NVARCHAR(1)'),
            (mssql.MSNVarchar, [1], {'collation': 'Latin1_General_CI_AS'},
             'NVARCHAR(1) COLLATE Latin1_General_CI_AS'),

            (mssql.MSText, [], {},
             'TEXT'),
            (mssql.MSText, [], {'collation': 'Latin1_General_CI_AS'},
             'TEXT COLLATE Latin1_General_CI_AS'),

            (mssql.MSNText, [], {},
             'NTEXT'),
            (mssql.MSNText, [], {'collation': 'Latin1_General_CI_AS'},
             'NTEXT COLLATE Latin1_General_CI_AS'),
        ]

        metadata = MetaData()
        table_args = ['test_mssql_charset', metadata]
        for index, spec in enumerate(columns):
            type_, args, kw, res = spec
            table_args.append(
                Column('c%s' % index, type_(*args, **kw), nullable=None))

        charset_table = Table(*table_args)
        dialect = mssql.dialect()
        gen = dialect.ddl_compiler(dialect, schema.CreateTable(charset_table))

        for col in charset_table.c:
            index = int(col.name[1:])
            testing.eq_(
                gen.get_column_specification(col),
                "%s %s" % (col.name, columns[index][3]))
            self.assert_(repr(col))
Example #9
0
    def test_boolean(self):
        "Exercise type specification for boolean type."

        columns = [
            # column type, args, kwargs, expected ddl
            (Boolean, [], {},
             'BIT'),
        ]

        metadata = MetaData()
        table_args = ['test_mssql_boolean', metadata]
        for index, spec in enumerate(columns):
            type_, args, kw, res = spec
            table_args.append(
                Column('c%s' % index, type_(*args, **kw), nullable=None))

        boolean_table = Table(*table_args)
        dialect = mssql.dialect()
        gen = dialect.ddl_compiler(dialect, schema.CreateTable(boolean_table))

        for col in boolean_table.c:
            index = int(col.name[1:])
            testing.eq_(
                gen.get_column_specification(col),
                "%s %s" % (col.name, columns[index][3]))
            self.assert_(repr(col))
Example #10
0
    def test_boolean(self):
        """Test that the boolean only treats 1 as True

        """

        meta = MetaData(testing.db)
        t = Table('bool_table', meta, Column('id', Integer,
                  primary_key=True), Column('boo',
                  Boolean(create_constraint=False)))
        try:
            meta.create_all()
            testing.db.execute("INSERT INTO bool_table (id, boo) "
                               "VALUES (1, 'false');")
            testing.db.execute("INSERT INTO bool_table (id, boo) "
                               "VALUES (2, 'true');")
            testing.db.execute("INSERT INTO bool_table (id, boo) "
                               "VALUES (3, '1');")
            testing.db.execute("INSERT INTO bool_table (id, boo) "
                               "VALUES (4, '0');")
            testing.db.execute('INSERT INTO bool_table (id, boo) '
                               'VALUES (5, 1);')
            testing.db.execute('INSERT INTO bool_table (id, boo) '
                               'VALUES (6, 0);')
            eq_(t.select(t.c.boo).order_by(t.c.id).execute().fetchall(),
                [(3, True), (5, True)])
        finally:
            meta.drop_all()
Example #11
0
    def test_attached_as_schema(self):
        cx = testing.db.connect()
        try:
            cx.execute('ATTACH DATABASE ":memory:" AS  test_schema')
            dialect = cx.dialect
            assert dialect.get_table_names(cx, 'test_schema') == []
            meta = MetaData(cx)
            Table('created', meta, Column('id', Integer),
                  schema='test_schema')
            alt_master = Table('sqlite_master', meta, autoload=True,
                               schema='test_schema')
            meta.create_all(cx)
            eq_(dialect.get_table_names(cx, 'test_schema'), ['created'])
            assert len(alt_master.c) > 0
            meta.clear()
            reflected = Table('created', meta, autoload=True,
                              schema='test_schema')
            assert len(reflected.c) == 1
            cx.execute(reflected.insert(), dict(id=1))
            r = cx.execute(reflected.select()).fetchall()
            assert list(r) == [(1, )]
            cx.execute(reflected.update(), dict(id=2))
            r = cx.execute(reflected.select()).fetchall()
            assert list(r) == [(2, )]
            cx.execute(reflected.delete(reflected.c.id == 2))
            r = cx.execute(reflected.select()).fetchall()
            assert list(r) == []

            # note that sqlite_master is cleared, above

            meta.drop_all()
            assert dialect.get_table_names(cx, 'test_schema') == []
        finally:
            cx.execute('DETACH DATABASE test_schema')
Example #12
0
    def test_basic(self):
        for threadlocal in False, True:
            engine = engines.reconnecting_engine(
                options={'pool_threadlocal': threadlocal})

            conn = engine.contextual_connect()
            eq_(conn.execute(select([1])).scalar(), 1)
            conn.close()

            # set the pool recycle down to 1.
            # we aren't doing this inline with the
            # engine create since cx_oracle takes way
            # too long to create the 1st connection and don't
            # want to build a huge delay into this test.

            engine.pool._recycle = 1

            # kill the DB connection
            engine.test_shutdown()

            # wait until past the recycle period
            time.sleep(2)

            # can connect, no exception
            conn = engine.contextual_connect()
            eq_(conn.execute(select([1])).scalar(), 1)
            conn.close()
Example #13
0
    def _test_round_trip(self, fixture, warnings=False):
        from sqlalchemy import inspect
        conn = testing.db.connect()
        for from_, to_ in self._fixture_as_string(fixture):
            inspector = inspect(conn)
            conn.execute("CREATE TABLE foo (data %s)" % from_)
            try:
                if warnings:
                    def go():
                        return inspector.get_columns("foo")[0]
                    col_info = testing.assert_warnings(go,
                                    ["Could not instantiate"], regex=True)
                else:
                    col_info = inspector.get_columns("foo")[0]
                expected_type = type(to_)
                is_(type(col_info['type']), expected_type)

                # test args
                for attr in ("scale", "precision", "length"):
                    if getattr(to_, attr, None) is not None:
                        eq_(
                            getattr(col_info['type'], attr),
                            getattr(to_, attr, None)
                        )
            finally:
                conn.execute("DROP TABLE foo")
Example #14
0
    def _test_queuepool(self, q, dispose=True):
        conn = q.connect()
        conn.close()
        conn = None

        conn = q.connect()
        conn.close()
        conn = None

        conn = q.connect()
        conn = None
        del conn
        lazy_gc()
        q.dispose()

        eq_(
            [buf.msg for buf in self.buf.buffer],
            [
                "Created new connection %r",
                "Connection %r checked out from pool",
                "Connection %r being returned to pool",
                "Connection %s rollback-on-return%s",
                "Connection %r checked out from pool",
                "Connection %r being returned to pool",
                "Connection %s rollback-on-return%s",
                "Connection %r checked out from pool",
                "Connection %r being returned to pool",
                "Connection %s rollback-on-return%s",
                "Closing connection %r",
            ]
            + (["Pool disposed. %s"] if dispose else []),
        )
Example #15
0
 def test_with_transaction(self):
     conn = self.engine.connect()
     trans = conn.begin()
     eq_(conn.execute(select([1])).scalar(), 1)
     assert not conn.closed
     self.engine.test_shutdown()
     _assert_invalidated(conn.execute, select([1]))
     assert not conn.closed
     assert conn.invalidated
     assert trans.is_active
     assert_raises_message(
         tsa.exc.StatementError,
         "Can't reconnect until invalid transaction is rolled back",
         conn.execute, select([1]))
     assert trans.is_active
     assert_raises_message(
         tsa.exc.InvalidRequestError,
         "Can't reconnect until invalid transaction is rolled back",
         trans.commit
     )
     assert trans.is_active
     trans.rollback()
     assert not trans.is_active
     assert conn.invalidated
     eq_(conn.execute(select([1])).scalar(), 1)
     assert not conn.invalidated
Example #16
0
    def test_invocation_per_mapper(self):
        """test that BakedLazyLoader is getting invoked with the
        "baked_select" lazy setting.

        """
        User, Address = self._o2m_fixture(lazy="baked_select")

        sess = Session()
        q = sess.query(User).options(lazyload(User.addresses))

        with mock.patch.object(BakedLazyLoader, "_emit_lazyload") as el:
            u1 = q.first()
            u1.addresses
            # not invoked
            eq_(el.mock_calls, [])

        sess = Session()
        q = sess.query(User)
        with mock.patch.object(BakedLazyLoader, "_emit_lazyload") as el:
            u1 = q.first()
            u1.addresses
            # invoked
            is_(
                el.mock_calls[0][1][1],
                u1._sa_instance_state
            )
Example #17
0
    def test_invocation_systemwide_loaders(self):
        baked.bake_lazy_loaders()
        try:
            User, Address = self._o2m_fixture()

            sess = Session()
            q = sess.query(User).options(lazyload(User.addresses))
            with mock.patch.object(BakedLazyLoader, "_emit_lazyload") as el:
                u1 = q.first()
                u1.addresses
                # invoked
                is_(
                    el.mock_calls[0][1][1],
                    u1._sa_instance_state
                )
        finally:
            baked.unbake_lazy_loaders()

        clear_mappers()
        User, Address = self._o2m_fixture()
        sess = Session()
        q = sess.query(User).options(lazyload(User.addresses))

        with mock.patch.object(BakedLazyLoader, "_emit_lazyload") as el:
            u1 = q.first()
            u1.addresses
            # not invoked
            eq_(el.mock_calls, [])
Example #18
0
 def test_match_across_joins(self):
     results = matchtable.select().where(and_(cattable.c.id
             == matchtable.c.category_id,
             or_(cattable.c.description.match('Ruby'),
             matchtable.c.title.match('nutshell'
             )))).order_by(matchtable.c.id).execute().fetchall()
     eq_([1, 3, 5], [r.id for r in results])
Example #19
0
    def test_spoiled_full_w_params(self):
        User = self.classes.User

        canary = mock.Mock()

        def fn1(s):
            canary.fn1()
            return s.query(User.id, User.name).order_by(User.id)

        def fn2(q):
            canary.fn2()
            return q.filter(User.id == bindparam('id'))

        def fn3(q):
            canary.fn3()
            return q

        for x in range(3):
            bq = self.bakery(fn1)

            bq += fn2

            sess = Session(autocommit=True)
            eq_(
                bq.spoil(full=True).add_criteria(fn3)(sess).params(id=7).all(),
                [(7, 'jack')]
            )

        eq_(
            canary.mock_calls,
            [mock.call.fn1(), mock.call.fn2(), mock.call.fn3(),
             mock.call.fn1(), mock.call.fn2(), mock.call.fn3(),
             mock.call.fn1(), mock.call.fn2(), mock.call.fn3()]
        )
Example #20
0
        def go():
            class A(fixtures.ComparableEntity):
                pass

            class B(A):
                pass

            mapper(
                A,
                table1,
                polymorphic_on=table1.c.col2,
                polymorphic_identity="a",
            )
            mapper(B, table2, inherits=A, polymorphic_identity="b")

            sess = create_session()
            a1 = A()
            a2 = A()
            b1 = B(col3="b1")
            b2 = B(col3="b2")
            for x in [a1, a2, b1, b2]:
                sess.add(x)
            sess.flush()
            sess.expunge_all()

            alist = sess.query(A).order_by(A.col1).all()
            eq_([A(), A(), B(col3="b1"), B(col3="b2")], alist)

            for a in alist:
                sess.delete(a)
            sess.flush()

            # don't need to clear_mappers()
            del B
            del A
Example #21
0
    def test_deep_options(self):
        users, items, order_items, Order, Item, User, orders = (self.tables.users,
                                self.tables.items,
                                self.tables.order_items,
                                self.classes.Order,
                                self.classes.Item,
                                self.classes.User,
                                self.tables.orders)

        mapper(Item, items, properties=dict(
            description=deferred(items.c.description)))
        mapper(Order, orders, properties=dict(
            items=relationship(Item, secondary=order_items)))
        mapper(User, users, properties=dict(
            orders=relationship(Order, order_by=orders.c.id)))

        sess = create_session()
        q = sess.query(User).order_by(User.id)
        l = q.all()
        item = l[0].orders[1].items[1]
        def go():
            eq_(item.description, 'item 4')
        self.sql_count_(1, go)
        eq_(item.description, 'item 4')

        sess.expunge_all()
        l = q.options(undefer('orders.items.description')).all()
        item = l[0].orders[1].items[1]
        def go():
            eq_(item.description, 'item 4')
        self.sql_count_(0, go)
        eq_(item.description, 'item 4')
Example #22
0
    def test_limit_offset_with_correlated_order_by(self):
        t1 = table('t1', column('x', Integer), column('y', Integer))
        t2 = table('t2', column('x', Integer), column('y', Integer))

        order_by = select([t2.c.y]).where(t1.c.x == t2.c.x).as_scalar()
        s = select([t1]).where(t1.c.x == 5).order_by(order_by) \
            .limit(10).offset(20)

        self.assert_compile(
            s,
            "SELECT anon_1.x, anon_1.y "
            "FROM (SELECT t1.x AS x, t1.y AS y, "
            "ROW_NUMBER() OVER (ORDER BY "
            "(SELECT t2.y FROM t2 WHERE t1.x = t2.x)"
            ") AS mssql_rn "
            "FROM t1 "
            "WHERE t1.x = :x_1) AS anon_1 "
            "WHERE mssql_rn > :param_1 AND mssql_rn <= :param_2 + :param_1",
            checkparams={'param_1': 20, 'param_2': 10, 'x_1': 5}
        )

        c = s.compile(dialect=mssql.MSDialect())
        eq_(len(c._result_columns), 2)
        assert t1.c.x in set(c._create_result_map()['x'][1])
        assert t1.c.y in set(c._create_result_map()['y'][1])
Example #23
0
    def test_selective_relationships(self):
        sub, base_mtom, Related, Base, related, sub_mtom, base, Sub = (self.tables.sub,
                                self.tables.base_mtom,
                                self.classes.Related,
                                self.classes.Base,
                                self.tables.related,
                                self.tables.sub_mtom,
                                self.tables.base,
                                self.classes.Sub)

        mapper(Base, base, properties={'related': relationship(Related,
               secondary=base_mtom, backref='bases',
               order_by=related.c.id)})
        mapper(Sub, sub, inherits=Base, concrete=True,
               properties={'related': relationship(Related,
               secondary=sub_mtom, backref='subs',
               order_by=related.c.id)})
        mapper(Related, related)
        sess = sessionmaker()()
        b1, s1, r1, r2, r3 = Base(), Sub(), Related(), Related(), \
            Related()
        b1.related.append(r1)
        b1.related.append(r2)
        s1.related.append(r2)
        s1.related.append(r3)
        sess.add_all([b1, s1])
        sess.commit()
        eq_(s1.related, [r2, r3])
        eq_(b1.related, [r1, r2])
Example #24
0
    def test_set_composite_attrs_via_selectable(self):
        Values, CustomValues, values, Descriptions, descriptions = (self.classes.Values,
                                self.classes.CustomValues,
                                self.tables.values,
                                self.classes.Descriptions,
                                self.tables.descriptions)

        session = Session()
        d = Descriptions(
            custom_descriptions = CustomValues('Color', 'Number'),
            values =[
                Values(custom_values = CustomValues('Red', '5')),
                Values(custom_values=CustomValues('Blue', '1'))
            ]
        )

        session.add(d)
        session.commit()
        eq_(
            testing.db.execute(descriptions.select()).fetchall(),
            [(1, 'Color', 'Number')]
        )
        eq_(
            testing.db.execute(values.select()).fetchall(),
            [(1, 1, 'Red', '5'), (2, 1, 'Blue', '1')]
        )
Example #25
0
 def test_basic(self):
     pjoin = polymorphic_union({'manager': managers_table, 'engineer'
                               : engineers_table}, 'type', 'pjoin')
     employee_mapper = mapper(Employee, pjoin,
                              polymorphic_on=pjoin.c.type)
     manager_mapper = mapper(Manager, managers_table,
                             inherits=employee_mapper,
                             concrete=True,
                             polymorphic_identity='manager')
     engineer_mapper = mapper(Engineer, engineers_table,
                              inherits=employee_mapper,
                              concrete=True,
                              polymorphic_identity='engineer')
     session = create_session()
     session.add(Manager('Tom', 'knows how to manage things'))
     session.add(Engineer('Kurt', 'knows how to hack'))
     session.flush()
     session.expunge_all()
     assert set([repr(x) for x in session.query(Employee)]) \
         == set(['Engineer Kurt knows how to hack',
                'Manager Tom knows how to manage things'])
     assert set([repr(x) for x in session.query(Manager)]) \
         == set(['Manager Tom knows how to manage things'])
     assert set([repr(x) for x in session.query(Engineer)]) \
         == set(['Engineer Kurt knows how to hack'])
     manager = session.query(Manager).one()
     session.expire(manager, ['manager_data'])
     eq_(manager.manager_data, 'knows how to manage things')
Example #26
0
 def test_determine_remote_columns_m2o_composite_selfref(self):
     joincond = self._join_fixture_m2o_composite_selfref()
     eq_(
         joincond.remote_columns,
         set([self.composite_selfref.c.id,
             self.composite_selfref.c.group_id])
     )
Example #27
0
 def test_determine_local_remote_pairs_m2m(self):
     joincond = self._join_fixture_m2m()
     eq_(
         joincond.local_remote_pairs,
         [(self.m2mleft.c.id, self.m2msecondary.c.lid),
         (self.m2mright.c.id, self.m2msecondary.c.rid)]
     )
Example #28
0
 def test_time_result_processor(self):
     eq_(
         mysql.TIME().result_processor(None, None)(
             datetime.timedelta(seconds=35, minutes=517,
                                microseconds=450)),
         datetime.time(8, 37, 35, 450)
     )
Example #29
0
 def test_determine_remote_columns_compound_2(self):
     joincond = self._join_fixture_compound_expression_2(
                             support_sync=False)
     eq_(
         joincond.remote_columns,
         set([self.right.c.x, self.right.c.y])
     )
Example #30
0
        def go():
            sess = create_session()
            a1 = A(col2="a1")
            a2 = A(col2="a2")
            a3 = A(col2="a3")
            a1.bs.append(B(col2="b1"))
            a1.bs.append(B(col2="b2"))
            a3.bs.append(B(col2="b3"))
            for x in [a1, a2, a3]:
                sess.add(x)
            sess.flush()
            sess.expunge_all()

            alist = sess.query(A).order_by(A.col1).all()
            eq_(
                [
                    A(col2="a1", bs=[B(col2="b1"), B(col2="b2")]),
                    A(col2="a2", bs=[]),
                    A(col2="a3", bs=[B(col2="b3")]),
                ],
                alist,
            )

            for a in alist:
                sess.delete(a)
            sess.flush()
Example #31
0
 def test_int_to_bool_none(self):
     eq_(self.module.int_to_boolean(None), None)
Example #32
0
    def _test_get_foreign_keys(self, schema=None):
        meta = self.metadata
        users, addresses, dingalings = self.tables.users, \
                    self.tables.email_addresses, self.tables.dingalings
        insp = inspect(meta.bind)
        expected_schema = schema
        # users
        users_fkeys = insp.get_foreign_keys(users.name,
                                            schema=schema)
        fkey1 = users_fkeys[0]

        with testing.requires.named_constraints.fail_if():
            self.assert_(fkey1['name'] is not None)

        eq_(fkey1['referred_schema'], expected_schema)
        eq_(fkey1['referred_table'], users.name)
        eq_(fkey1['referred_columns'], ['user_id', ])
        if testing.requires.self_referential_foreign_keys.enabled:
            eq_(fkey1['constrained_columns'], ['parent_user_id'])

        #addresses
        addr_fkeys = insp.get_foreign_keys(addresses.name,
                                           schema=schema)
        fkey1 = addr_fkeys[0]

        with testing.requires.named_constraints.fail_if():
            self.assert_(fkey1['name'] is not None)

        eq_(fkey1['referred_schema'], expected_schema)
        eq_(fkey1['referred_table'], users.name)
        eq_(fkey1['referred_columns'], ['user_id', ])
        eq_(fkey1['constrained_columns'], ['remote_user_id'])
Example #33
0
 def test_get_default_schema_name(self):
     insp = inspect(testing.db)
     eq_(insp.default_schema_name, testing.db.dialect.default_schema_name)
Example #34
0
    def test_tuple_labeling(self):
        sess = create_session()

        # test pickle + all the protocols !
        for pickled in False, -1, 0, 1, 2:
            for row in sess.query(User, Address).join(User.addresses).all():
                if pickled is not False:
                    row = pickle.loads(pickle.dumps(row, pickled))

                eq_(list(row.keys()), ["User", "Address"])
                eq_(row.User, row[0])
                eq_(row.Address, row[1])

            for row in sess.query(User.name, User.id.label("foobar")):
                if pickled is not False:
                    row = pickle.loads(pickle.dumps(row, pickled))
                eq_(list(row.keys()), ["name", "foobar"])
                eq_(row.name, row[0])
                eq_(row.foobar, row[1])

            for row in sess.query(User).values(User.name,
                                               User.id.label("foobar")):
                if pickled is not False:
                    row = pickle.loads(pickle.dumps(row, pickled))
                eq_(list(row.keys()), ["name", "foobar"])
                eq_(row.name, row[0])
                eq_(row.foobar, row[1])

            oalias = aliased(Order)
            for row in (sess.query(User, oalias).join(
                    User.orders.of_type(oalias)).all()):
                if pickled is not False:
                    row = pickle.loads(pickle.dumps(row, pickled))
                eq_(list(row.keys()), ["User"])
                eq_(row.User, row[0])

            oalias = aliased(Order, name="orders")
            for row in (sess.query(User, oalias).join(oalias,
                                                      User.orders).all()):
                if pickled is not False:
                    row = pickle.loads(pickle.dumps(row, pickled))
                eq_(list(row.keys()), ["User", "orders"])
                eq_(row.User, row[0])
                eq_(row.orders, row[1])

            for row in sess.query(User.name + "hoho", User.name):
                eq_(list(row.keys()), ["name"])
                eq_(row[0], row.name + "hoho")

            if pickled is not False:
                ret = sess.query(User, Address).join(User.addresses).all()
                pickle.loads(pickle.dumps(ret, pickled))
Example #35
0
    def test_instance_deferred_cols(self):
        users, addresses = (self.tables.users, self.tables.addresses)

        mapper(
            User,
            users,
            properties={"addresses": relationship(Address, backref="user")},
        )
        mapper(Address, addresses)

        sess = create_session()
        u1 = User(name="ed")
        u1.addresses.append(Address(email_address="*****@*****.**"))
        sess.add(u1)
        sess.flush()
        sess.expunge_all()

        u1 = (sess.query(User).options(
            sa.orm.defer("name"),
            sa.orm.defer("addresses.email_address")).get(u1.id))
        assert "name" not in u1.__dict__
        assert "addresses" not in u1.__dict__

        u2 = pickle.loads(pickle.dumps(u1))
        sess2 = create_session()
        sess2.add(u2)
        eq_(u2.name, "ed")
        assert "addresses" not in u2.__dict__
        ad = u2.addresses[0]
        assert "email_address" not in ad.__dict__
        eq_(ad.email_address, "*****@*****.**")
        eq_(
            u2,
            User(name="ed", addresses=[Address(email_address="*****@*****.**")]),
        )

        u2 = pickle.loads(pickle.dumps(u1))
        sess2 = create_session()
        u2 = sess2.merge(u2, load=False)
        eq_(u2.name, "ed")
        assert "addresses" not in u2.__dict__
        ad = u2.addresses[0]

        # mapper options now transmit over merge(),
        # new as of 0.6, so email_address is deferred.
        assert "email_address" not in ad.__dict__

        eq_(ad.email_address, "*****@*****.**")
        eq_(
            u2,
            User(name="ed", addresses=[Address(email_address="*****@*****.**")]),
        )
Example #36
0
 def test_column_collection_ordered(self):
     t = text("select a, b, c from foo").columns(
         column("a"), column("b"), column("c")
     )
     eq_(t.selected_columns.keys(), ["a", "b", "c"])
Example #37
0
 def test_int_to_bool_zero(self):
     eq_(self.module.int_to_boolean(0), False)
Example #38
0
 def test_distill_multi_string_tuple(self):
     eq_(
         self.module._distill_params((("arg", "arg"), ), {}),
         [("arg", "arg")],
     )
Example #39
0
 def test_distill_multi_strings(self):
     eq_(self.module._distill_params(("foo", "bar"), {}), [("foo", "bar")])
Example #40
0
 def test_int_to_bool_negative_int(self):
     eq_(self.module.int_to_boolean(-4), True)
Example #41
0
 def test_distill_multi_list_tuple(self):
     eq_(
         self.module._distill_params(([("foo", "bar")], [("bar", "bat")]),
                                     {}),
         ([("foo", "bar")], [("bar", "bat")]),
     )
Example #42
0
 def test_distill_single_string(self):
     eq_(self.module._distill_params(("arg", ), {}), [["arg"]])
Example #43
0
 def test_distill_single_list_strings(self):
     eq_(
         self.module._distill_params((["foo", "bar"], ), {}),
         [["foo", "bar"]],
     )
Example #44
0
 def test_int_to_bool_positive_int(self):
     eq_(self.module.int_to_boolean(12), True)
Example #45
0
 def test_distill_dict_multi_empty_param(self):
     eq_(self.module._distill_params((), {"foo": "bar"}), [{"foo": "bar"}])
Example #46
0
 def test_distill_single_list_tuple(self):
     eq_(
         self.module._distill_params(([("foo", "bar")], ), {}),
         [("foo", "bar")],
     )
Example #47
0
 def test_distill_no_multi_no_param(self):
     eq_(self.module._distill_params((), {}), [])
Example #48
0
 def test_int_to_bool_one(self):
     eq_(self.module.int_to_boolean(1), True)
Example #49
0
    def _test_onetomany(self, passive_updates):
        User, Address, users, addresses = (self.classes.User,
                                           self.classes.Address,
                                           self.tables.users,
                                           self.tables.addresses)

        mapper(User,
               users,
               properties={
                   'addresses':
                   relationship(Address, passive_updates=passive_updates)
               })
        mapper(Address, addresses)

        sess = create_session()
        u1 = User(username='******', fullname='jack')
        u1.addresses.append(Address(email='jack1'))
        u1.addresses.append(Address(email='jack2'))
        sess.add(u1)
        sess.flush()
        a1 = u1.addresses[0]

        eq_(
            sa.select([addresses.c.username]).execute().fetchall(),
            [('jack', ), ('jack', )])

        assert sess.query(Address).get(a1.id) is u1.addresses[0]

        u1.username = '******'
        sess.flush()
        assert u1.addresses[0].username == 'ed'
        eq_(
            sa.select([addresses.c.username]).execute().fetchall(), [('ed', ),
                                                                     ('ed', )])

        sess.expunge_all()
        eq_([Address(username='******'),
             Address(username='******')],
            sess.query(Address).all())

        u1 = sess.query(User).get(u1.id)
        u1.username = '******'

        def go():
            sess.flush()

        if not passive_updates:
            # test passive_updates=False; load addresses,
            #  update user, update 2 addresses (in one executemany)
            self.assert_sql_count(testing.db, go, 3)
        else:
            # test passive_updates=True; update user
            self.assert_sql_count(testing.db, go, 1)
        sess.expunge_all()
        assert User(username='******',
                    addresses=[
                        Address(username='******'),
                        Address(username='******')
                    ]) == sess.query(User).get(u1.id)
        sess.expunge_all()

        u1 = sess.query(User).get(u1.id)
        u1.addresses = []
        u1.username = '******'
        sess.flush()
        sess.expunge_all()
        a1 = sess.query(Address).get(a1.id)
        eq_(a1.username, None)

        eq_(
            sa.select([addresses.c.username]).execute().fetchall(), [(None, ),
                                                                     (None, )])

        u1 = sess.query(User).get(u1.id)
        eq_(User(username='******', fullname='jack'), u1)
Example #50
0
 def test_distill_dict_multi_none_param(self):
     eq_(self.module._distill_params(None, {"foo": "bar"}), [{
         "foo": "bar"
     }])
Example #51
0
    def _test_manytomany(self, passive_updates):
        users, items, Item, User, users_to_items = (self.tables.users,
                                                    self.tables.items,
                                                    self.classes.Item,
                                                    self.classes.User,
                                                    self.tables.users_to_items)

        mapper(User,
               users,
               properties={
                   'items':
                   relationship(Item,
                                secondary=users_to_items,
                                backref='users',
                                passive_updates=passive_updates)
               })
        mapper(Item, items)

        sess = create_session()
        u1 = User(username='******')
        u2 = User(username='******')
        i1 = Item(itemname='item1')
        i2 = Item(itemname='item2')

        u1.items.append(i1)
        u1.items.append(i2)
        i2.users.append(u2)
        sess.add(u1)
        sess.add(u2)
        sess.flush()

        r = sess.query(Item).all()
        # ComparableEntity can't handle a comparison with the backrefs
        # involved....
        eq_(Item(itemname='item1'), r[0])
        eq_(['jack'], [u.username for u in r[0].users])
        eq_(Item(itemname='item2'), r[1])
        eq_(['jack', 'fred'], [u.username for u in r[1].users])

        u2.username = '******'

        def go():
            sess.flush()

        go()

        def go():
            sess.flush()

        self.assert_sql_count(testing.db, go, 0)

        sess.expunge_all()
        r = sess.query(Item).all()
        eq_(Item(itemname='item1'), r[0])
        eq_(['jack'], [u.username for u in r[0].users])
        eq_(Item(itemname='item2'), r[1])
        eq_(['ed', 'jack'], sorted([u.username for u in r[1].users]))

        sess.expunge_all()
        u2 = sess.query(User).get(u2.username)
        u2.username = '******'
        sess.flush()
        r = sess.query(Item).with_parent(u2).all()
        eq_(Item(itemname='item2'), r[0])
Example #52
0
 def test_distill_none(self):
     eq_(self.module._distill_params(None, None), [])
Example #53
0
    def _test_onetomany(self, passive_updates):
        """Change the PK of a related entity via foreign key cascade.

        For databases that require "on update cascade", the mapper
        has to identify the row by the new value, not the old, when
        it does the update.

        """

        User, Address, users, addresses = (self.classes.User,
                                           self.classes.Address,
                                           self.tables.users,
                                           self.tables.addresses)

        mapper(User,
               users,
               properties={
                   'addresses':
                   relationship(Address, passive_updates=passive_updates)
               })
        mapper(Address, addresses)

        sess = create_session()
        a1, a2 = Address(username='******', email='ed@host1'), \
            Address(username='******', email='ed@host2')
        u1 = User(username='******', addresses=[a1, a2])
        sess.add(u1)
        sess.flush()
        eq_(a1.username, 'ed')
        eq_(a2.username, 'ed')
        eq_(
            sa.select([addresses.c.username]).execute().fetchall(), [('ed', ),
                                                                     ('ed', )])

        u1.username = '******'
        a2.email = 'ed@host3'
        sess.flush()

        eq_(a1.username, 'jack')
        eq_(a2.username, 'jack')
        eq_(
            sa.select([addresses.c.username]).execute().fetchall(),
            [('jack', ), ('jack', )])
Example #54
0
    def test_manytoone_deferred_relationship_expr(self):
        """for [ticket:4359], test that updates to the columns embedded
        in an object expression are also updated."""
        users, Address, addresses, User = (self.tables.users,
                                           self.classes.Address,
                                           self.tables.addresses,
                                           self.classes.User)

        mapper(User, users)
        mapper(
            Address,
            addresses,
            properties={
                'user':
                relationship(
                    User,
                    passive_updates=testing.requires.on_update_cascade.enabled)
            })

        s = Session()
        a1 = Address(email='jack1')
        u1 = User(username='******', fullname='jack')

        a1.user = u1

        # scenario 1.  object is still transient, we get a value.
        expr = Address.user == u1

        eq_(expr.left.callable(), 'jack')

        # scenario 2.  value has been changed while we are transient.
        # we get the updated value.
        u1.username = '******'
        eq_(expr.left.callable(), 'ed')

        s.add_all([u1, a1])
        s.commit()

        eq_(a1.username, 'ed')

        # scenario 3.  the value is changed and flushed, we get the new value.
        u1.username = '******'
        s.flush()

        eq_(expr.left.callable(), 'fred')

        # scenario 4.  the value is changed, flushed, and expired.
        # the callable goes out to get that value.
        u1.username = '******'
        s.commit()
        assert 'username' not in u1.__dict__

        eq_(expr.left.callable(), 'wendy')

        # scenario 5.  the value is changed flushed, expired,
        # and then when we hit the callable, we are detached.
        u1.username = '******'
        s.commit()
        assert 'username' not in u1.__dict__

        s.expunge(u1)

        # InstanceState has a "last known values" feature we use
        # to pick up on this
        eq_(expr.left.callable(), 'jack')

        # doesn't unexpire the attribute
        assert 'username' not in u1.__dict__

        # once we are persistent again, we check the DB
        s.add(u1)
        eq_(expr.left.callable(), 'jack')
        assert 'username' in u1.__dict__

        # scenario 6.  we are using del
        u2 = User(username='******', fullname='jack')
        expr = Address.user == u2

        eq_(expr.left.callable(), 'jack')

        del u2.username

        assert_raises_message(sa.exc.InvalidRequestError,
                              "Can't resolve value for column users.username",
                              expr.left.callable)

        u2.username = '******'
        eq_(expr.left.callable(), 'ed')

        s.add(u2)
        s.commit()

        eq_(expr.left.callable(), 'ed')

        del u2.username

        assert_raises_message(sa.exc.InvalidRequestError,
                              "Can't resolve value for column users.username",
                              expr.left.callable)
 def test_insert_returning(self, connection):
     table = self.tables.test_table
     result = connection.execute(table.insert().returning(table.c.y),
                                 {"x": "xvalue"})
     eq_(result.first(), ("yvalue", ))
Example #56
0
    def _test_bidirectional(self, passive_updates):
        users, Address, addresses, User = (self.tables.users,
                                           self.classes.Address,
                                           self.tables.addresses,
                                           self.classes.User)

        mapper(User, users)
        mapper(Address,
               addresses,
               properties={
                   'user':
                   relationship(User,
                                passive_updates=passive_updates,
                                backref='addresses')
               })

        sess = create_session()
        a1 = Address(email='jack1')
        a2 = Address(email='jack2')

        u1 = User(username='******', fullname='jack')
        a1.user = u1
        a2.user = u1
        sess.add(a1)
        sess.add(a2)
        sess.flush()

        u1.username = '******'
        (ad1, ad2) = sess.query(Address).all()
        eq_([Address(username='******'), Address(username='******')], [ad1, ad2])

        def go():
            sess.flush()

        if passive_updates:
            self.assert_sql_count(testing.db, go, 1)
        else:
            # two updates bundled
            self.assert_sql_count(testing.db, go, 2)
        eq_([Address(username='******'), Address(username='******')], [ad1, ad2])
        sess.expunge_all()
        eq_([Address(username='******'),
             Address(username='******')],
            sess.query(Address).all())

        u1 = sess.query(User).get('ed')
        assert len(u1.addresses) == 2  # load addresses
        u1.username = '******'

        def go():
            sess.flush()

        # check that the passive_updates is on on the other side
        if passive_updates:
            self.assert_sql_count(testing.db, go, 1)
        else:
            # two updates bundled
            self.assert_sql_count(testing.db, go, 2)
        sess.expunge_all()
        eq_([Address(username='******'),
             Address(username='******')],
            sess.query(Address).all())
Example #57
0
 def go():
     eq_(q.all(), self._fixture())
 def test_legacy_setter(self):
     t = table("t", column("c"))
     s = select([t])
     s.for_update = "nowait"
     eq_(s._for_update_arg.nowait, True)
Example #59
0
 def test_any_five(self):
     sess = fixture_session()
     any_ = Company.employees.of_type(Engineer).any(
         and_(Engineer.primary_language == "cobol")
     )
     eq_(sess.query(Company).filter(any_).one(), self.c2)
    def test_case(self, connection):
        inner = select(
            case(
                (info_table.c.pk < 3, "lessthan3"),
                (and_(info_table.c.pk >= 3, info_table.c.pk < 7), "gt3"),
            ).label("x"),
            info_table.c.pk,
            info_table.c.info,
        ).select_from(info_table)

        inner_result = connection.execute(inner).all()

        # Outputs:
        # lessthan3 1 pk_1_data
        # lessthan3 2 pk_2_data
        # gt3 3 pk_3_data
        # gt3 4 pk_4_data
        # gt3 5 pk_5_data
        # gt3 6 pk_6_data
        eq_(
            inner_result,
            [
                ("lessthan3", 1, "pk_1_data"),
                ("lessthan3", 2, "pk_2_data"),
                ("gt3", 3, "pk_3_data"),
                ("gt3", 4, "pk_4_data"),
                ("gt3", 5, "pk_5_data"),
                ("gt3", 6, "pk_6_data"),
            ],
        )

        outer = select(inner.alias("q_inner"))

        outer_result = connection.execute(outer).all()

        assert outer_result == [
            ("lessthan3", 1, "pk_1_data"),
            ("lessthan3", 2, "pk_2_data"),
            ("gt3", 3, "pk_3_data"),
            ("gt3", 4, "pk_4_data"),
            ("gt3", 5, "pk_5_data"),
            ("gt3", 6, "pk_6_data"),
        ]

        w_else = select(
            case(
                [info_table.c.pk < 3, cast(3, Integer)],
                [and_(info_table.c.pk >= 3, info_table.c.pk < 6), 6],
                else_=0,
            ).label("x"),
            info_table.c.pk,
            info_table.c.info,
        ).select_from(info_table)

        else_result = connection.execute(w_else).all()

        eq_(
            else_result,
            [
                (3, 1, "pk_1_data"),
                (3, 2, "pk_2_data"),
                (6, 3, "pk_3_data"),
                (6, 4, "pk_4_data"),
                (6, 5, "pk_5_data"),
                (0, 6, "pk_6_data"),
            ],
        )