def test_engine_param_stays(self):

        eng = testing_engine()
        isolation_level = eng.dialect.get_isolation_level(
                                eng.connect().connection)
        level = self._non_default_isolation_level()

        ne_(isolation_level, level)

        eng = testing_engine(options=dict(isolation_level=level))
        eq_(
            eng.dialect.get_isolation_level(
                                eng.connect().connection),
            level
        )

        # check that it stays
        conn = eng.connect()
        eq_(
            eng.dialect.get_isolation_level(conn.connection),
            level
        )
        conn.close()

        conn = eng.connect()
        eq_(
            eng.dialect.get_isolation_level(conn.connection),
            level
        )
        conn.close()
Beispiel #2
0
    def test_flush(self):
        users, Address, addresses, User = (self.tables.users,
                                self.classes.Address,
                                self.tables.addresses,
                                self.classes.User)

        mapper(User, users, properties={
            'addresses':dynamic_loader(mapper(Address, addresses))
        })
        sess = create_session()
        u1 = User(name='jack')
        u2 = User(name='ed')
        u2.addresses.append(Address(email_address='*****@*****.**'))
        u1.addresses.append(Address(email_address='*****@*****.**'))
        sess.add_all((u1, u2))
        sess.flush()

        from sqlalchemy.orm import attributes
        eq_(attributes.get_history(u1, 'addresses'), ([], [Address(email_address='*****@*****.**')], []))

        sess.expunge_all()

        # test the test fixture a little bit
        ne_(User(name='jack', addresses=[Address(email_address='wrong')]),
            sess.query(User).first())
        eq_(User(name='jack', addresses=[Address(email_address='*****@*****.**')]),
            sess.query(User).first())

        eq_([
            User(name='jack', addresses=[Address(email_address='*****@*****.**')]),
            User(name='ed', addresses=[Address(email_address='*****@*****.**')])
            ],
            sess.query(User).all())
    def test_transient_exception(self):
        """An object that goes from a pk value to transient/pending
        doesn't count as a "pk" switch.

        """

        users, Address, addresses, User = (
            self.tables.users,
            self.classes.Address,
            self.tables.addresses,
            self.classes.User,
        )

        mapper(User, users)
        mapper(Address, addresses, properties={"user": relationship(User)})

        sess = create_session()
        u1 = User(id=5, name="u1")
        ad1 = Address(email_address="e1", user=u1)
        sess.add_all([u1, ad1])
        sess.flush()

        make_transient(u1)
        u1.id = None
        u1.username = "******"
        sess.add(u1)
        sess.flush()

        eq_(ad1.user_id, 5)

        sess.expire_all()
        eq_(ad1.user_id, 5)
        ne_(u1.id, 5)
        ne_(u1.id, None)
        eq_(sess.query(User).count(), 2)
    def _assert_eq(self, cls, create, dc_arguments):
        a1 = create("d1", 5)
        a2 = create("d2", 10)
        a3 = create("d1", 5)

        eq_(a1, a3)
        ne_(a1, a2)
    def test_nativeext_interfaceexact(self):
        class A(object):
            __sa_instrumentation_manager__ = (
                instrumentation.InstrumentationManager)

        register_class(A)
        ne_(type(manager_of_class(A)), instrumentation.ClassManager)
Beispiel #6
0
    def test_transient_exception(self):
        """An object that goes from a pk value to transient/pending
        doesn't count as a "pk" switch.

        """

        users, Address, addresses, User = (
            self.tables.users,
            self.classes.Address,
            self.tables.addresses,
            self.classes.User,
        )

        mapper(User, users)
        mapper(Address, addresses, properties={"user": relationship(User)})

        sess = create_session()
        u1 = User(id=5, name="u1")
        ad1 = Address(email_address="e1", user=u1)
        sess.add_all([u1, ad1])
        sess.flush()

        make_transient(u1)
        u1.id = None
        u1.username = "******"
        sess.add(u1)
        sess.flush()

        eq_(ad1.user_id, 5)

        sess.expire_all()
        eq_(ad1.user_id, 5)
        ne_(u1.id, 5)
        ne_(u1.id, None)
        eq_(sess.query(User).count(), 2)
Beispiel #7
0
    def test_engine_param_stays(self):

        eng = testing_engine()
        isolation_level = eng.dialect.get_isolation_level(
                                eng.connect().connection)
        level = self._non_default_isolation_level()

        ne_(isolation_level, level)

        eng = testing_engine(options=dict(isolation_level=level))
        eq_(
            eng.dialect.get_isolation_level(
                                eng.connect().connection),
            level
        )

        # check that it stays
        conn = eng.connect()
        eq_(
            eng.dialect.get_isolation_level(conn.connection),
            level
        )
        conn.close()

        conn = eng.connect()
        eq_(
            eng.dialect.get_isolation_level(conn.connection),
            level
        )
        conn.close()
Beispiel #8
0
    def test_more_with_entities_sanity_checks(self):
        """test issue #6503"""
        User, Address, Keyword, Order, Item = self.classes(
            "User", "Address", "Keyword", "Order", "Item"
        )

        sess = fixture_session()

        q1 = (
            sess.query(Address, Order)
            .with_entities(Address.id)
            ._statement_20()
        )
        q2 = (
            sess.query(Address, User).with_entities(Address.id)._statement_20()
        )

        assert not q1._memoized_select_entities
        assert not q2._memoized_select_entities

        # no joins or options, so q1 and q2 have the same cache key as Order/
        # User are discarded.  Note Address is first so propagate_attrs->orm is
        # Address.
        eq_(q1._generate_cache_key(), q2._generate_cache_key())

        q3 = sess.query(Order).with_entities(Address.id)._statement_20()
        q4 = sess.query(User).with_entities(Address.id)._statement_20()

        # with Order/User as lead entity, this affects propagate_attrs->orm
        # so keys are different
        ne_(q3._generate_cache_key(), q4._generate_cache_key())

        # confirm by deleting propagate attrs and memoized key and
        # running again
        q3._propagate_attrs = None
        q4._propagate_attrs = None
        del q3.__dict__["_generate_cache_key"]
        del q4.__dict__["_generate_cache_key"]
        eq_(q3._generate_cache_key(), q4._generate_cache_key())

        # once there's a join() or options() prior to with_entities, now they
        # are not discarded from the key; Order and User are in the
        # _MemoizedSelectEntities
        q5 = (
            sess.query(Address, Order)
            .join(Address.dingaling)
            .with_entities(Address.id)
            ._statement_20()
        )
        q6 = (
            sess.query(Address, User)
            .join(Address.dingaling)
            .with_entities(Address.id)
            ._statement_20()
        )

        assert q5._memoized_select_entities
        assert q6._memoized_select_entities
        ne_(q5._generate_cache_key(), q6._generate_cache_key())
Beispiel #9
0
    def test_nativeext_interfaceexact(self):
        class A(object):
            __sa_instrumentation_manager__ = (
                instrumentation.InstrumentationManager
            )

        register_class(A)
        ne_(type(manager_of_class(A)), instrumentation.ClassManager)
Beispiel #10
0
    async def test_engine_eq_ne(self, async_engine):
        e2 = _async_engine.AsyncEngine(async_engine.sync_engine)
        e3 = engines.testing_engine(asyncio=True, transfer_staticpool=True)

        eq_(async_engine, e2)
        ne_(async_engine, e3)

        is_false(async_engine == None)
 def register(self, cls, canary):
     original_init = cls.__init__
     instrumentation.register_class(cls)
     ne_(cls.__init__, original_init)
     manager = instrumentation.manager_of_class(cls)
     def init(state, args, kwargs):
         canary.append((cls, 'init', state.class_))
     event.listen(manager, 'init', init, raw=True)
    async def test_engine_eq_ne(self, async_engine):
        e2 = _async_engine.AsyncEngine(async_engine.sync_engine)
        e3 = testing.engines.testing_engine(asyncio=True)

        eq_(async_engine, e2)
        ne_(async_engine, e3)

        is_false(async_engine == None)
Beispiel #13
0
    def register(self, cls, canary):
        original_init = cls.__init__
        instrumentation.register_class(cls)
        ne_(cls.__init__, original_init)
        manager = instrumentation.manager_of_class(cls)

        def init(state, args, kwargs):
            canary.append((cls, 'init', state.class_))

        event.listen(manager, 'init', init, raw=True)
Beispiel #14
0
    def test_nativeext_interfaceexact(self):
        class A:
            __sa_instrumentation_manager__ = (
                instrumentation.InstrumentationManager)

        register_class(A)
        ne_(
            type(attributes.opt_manager_of_class(A)),
            instrumentation.ClassManager,
        )
Beispiel #15
0
    async def test_connection_eq_ne(self, async_engine):

        async with async_engine.connect() as conn:
            c2 = _async_engine.AsyncConnection(async_engine,
                                               conn.sync_connection)

            eq_(conn, c2)

            async with async_engine.connect() as c3:
                ne_(conn, c3)

            is_false(conn == None)
Beispiel #16
0
    def test_cache_key_gather_bindparams(self):
        for fixture in self.fixtures:
            case_a = fixture()
            case_b = fixture()

            # in the "bindparams" case, the cache keys for bound parameters
            # with only different values will be the same, but the params
            # themselves are gathered into a collection.
            for a, b in itertools.combinations_with_replacement(
                range(len(case_a)), 2
            ):
                a_params = {"bindparams": []}
                b_params = {"bindparams": []}
                if a == b:
                    a_key = case_a[a]._cache_key(**a_params)
                    b_key = case_b[b]._cache_key(**b_params)
                    eq_(a_key, b_key)

                    if a_params["bindparams"]:
                        for a_param, b_param in zip(
                            a_params["bindparams"], b_params["bindparams"]
                        ):
                            assert a_param.compare(b_param)
                else:
                    a_key = case_a[a]._cache_key(**a_params)
                    b_key = case_b[b]._cache_key(**b_params)

                    if a_key == b_key:
                        for a_param, b_param in zip(
                            a_params["bindparams"], b_params["bindparams"]
                        ):
                            if not a_param.compare(b_param):
                                break
                        else:
                            assert False, "Bound parameters are all the same"
                    else:
                        ne_(a_key, b_key)

                assert_a_params = []
                assert_b_params = []
                visitors.traverse_depthfirst(
                    case_a[a], {}, {"bindparam": assert_a_params.append}
                )
                visitors.traverse_depthfirst(
                    case_b[b], {}, {"bindparam": assert_b_params.append}
                )

                # note we're asserting the order of the params as well as
                # if there are dupes or not.  ordering has to be deterministic
                # and matches what a traversal would provide.
                eq_(a_params["bindparams"], assert_a_params)
                eq_(b_params["bindparams"], assert_b_params)
Beispiel #17
0
    def test_cache_key_gather_bindparams(self):
        for fixture in self.fixtures:
            case_a = fixture()
            case_b = fixture()

            # in the "bindparams" case, the cache keys for bound parameters
            # with only different values will be the same, but the params
            # themselves are gathered into a collection.
            for a, b in itertools.combinations_with_replacement(
                range(len(case_a)), 2
            ):
                a_params = {"bindparams": []}
                b_params = {"bindparams": []}
                if a == b:
                    a_key = case_a[a]._cache_key(**a_params)
                    b_key = case_b[b]._cache_key(**b_params)
                    eq_(a_key, b_key)

                    if a_params["bindparams"]:
                        for a_param, b_param in zip(
                            a_params["bindparams"], b_params["bindparams"]
                        ):
                            assert a_param.compare(b_param)
                else:
                    a_key = case_a[a]._cache_key(**a_params)
                    b_key = case_b[b]._cache_key(**b_params)

                    if a_key == b_key:
                        for a_param, b_param in zip(
                            a_params["bindparams"], b_params["bindparams"]
                        ):
                            if not a_param.compare(b_param):
                                break
                        else:
                            assert False, "Bound parameters are all the same"
                    else:
                        ne_(a_key, b_key)

                assert_a_params = []
                assert_b_params = []
                visitors.traverse_depthfirst(
                    case_a[a], {}, {"bindparam": assert_a_params.append}
                )
                visitors.traverse_depthfirst(
                    case_b[b], {}, {"bindparam": assert_b_params.append}
                )

                # note we're asserting the order of the params as well as
                # if there are dupes or not.  ordering has to be deterministic
                # and matches what a traversal would provide.
                eq_(a_params["bindparams"], assert_a_params)
                eq_(b_params["bindparams"], assert_b_params)
Beispiel #18
0
    def test_compare_metadata_tables(self):
        # metadata Table objects cache on their own identity, not their
        # structure.   This is mainly to reduce the size of cache keys
        # as well as reduce computational overhead, as Table objects have
        # very large internal state and they are also generally global
        # objects.

        t1 = Table("a", MetaData(), Column("q", Integer), Column("p", Integer))
        t2 = Table("a", MetaData(), Column("q", Integer), Column("p", Integer))

        ne_(t1._generate_cache_key(), t2._generate_cache_key())

        eq_(t1._generate_cache_key().key, (t1,))
    def test_generative_cache_key_regen(self):
        t1 = table("t1", column("a"), column("b"))

        s1 = select([t1])

        ck1 = s1._generate_cache_key()

        s2 = s1.where(t1.c.a == 5)

        ck2 = s2._generate_cache_key()

        ne_(ck1, ck2)
        is_not_(ck1, None)
        is_not_(ck2, None)
Beispiel #20
0
    def test_cache_key(self):
        def assert_params_append(assert_params):
            def append(param):
                if param._value_required_for_cache:
                    assert_params.append(param)
                else:
                    is_(param.value, None)

            return append

        for fixture in self.fixtures:
            case_a = fixture()
            case_b = fixture()

            for a, b in itertools.combinations_with_replacement(
                range(len(case_a)), 2
            ):

                assert_a_params = []
                assert_b_params = []

                visitors.traverse_depthfirst(
                    case_a[a],
                    {},
                    {"bindparam": assert_params_append(assert_a_params)},
                )
                visitors.traverse_depthfirst(
                    case_b[b],
                    {},
                    {"bindparam": assert_params_append(assert_b_params)},
                )
                if assert_a_params:
                    assert_raises_message(
                        NotImplementedError,
                        "bindparams collection argument required ",
                        case_a[a]._cache_key,
                    )
                if assert_b_params:
                    assert_raises_message(
                        NotImplementedError,
                        "bindparams collection argument required ",
                        case_b[b]._cache_key,
                    )

                if not assert_a_params and not assert_b_params:
                    if a == b:
                        eq_(case_a[a]._cache_key(), case_b[b]._cache_key())
                    else:
                        ne_(case_a[a]._cache_key(), case_b[b]._cache_key())
Beispiel #21
0
    def test_cache_key(self):
        def assert_params_append(assert_params):
            def append(param):
                if param._value_required_for_cache:
                    assert_params.append(param)
                else:
                    is_(param.value, None)

            return append

        for fixture in self.fixtures:
            case_a = fixture()
            case_b = fixture()

            for a, b in itertools.combinations_with_replacement(
                range(len(case_a)), 2
            ):

                assert_a_params = []
                assert_b_params = []

                visitors.traverse_depthfirst(
                    case_a[a],
                    {},
                    {"bindparam": assert_params_append(assert_a_params)},
                )
                visitors.traverse_depthfirst(
                    case_b[b],
                    {},
                    {"bindparam": assert_params_append(assert_b_params)},
                )
                if assert_a_params:
                    assert_raises_message(
                        NotImplementedError,
                        "bindparams collection argument required ",
                        case_a[a]._cache_key,
                    )
                if assert_b_params:
                    assert_raises_message(
                        NotImplementedError,
                        "bindparams collection argument required ",
                        case_b[b]._cache_key,
                    )

                if not assert_a_params and not assert_b_params:
                    if a == b:
                        eq_(case_a[a]._cache_key(), case_b[b]._cache_key())
                    else:
                        ne_(case_a[a]._cache_key(), case_b[b]._cache_key())
    def test_compare_metadata_tables_annotations_two(self):

        t1 = Table("a", MetaData(), Column("q", Integer), Column("p", Integer))
        t2 = Table("a", MetaData(), Column("q", Integer), Column("p", Integer))

        eq_(t2._generate_cache_key().key, (t2, ))

        t1 = t1._annotate({"orm": True})
        t2 = t2._annotate({"orm": True})

        ne_(t1._generate_cache_key(), t2._generate_cache_key())

        eq_(
            t1._generate_cache_key().key,
            (t1, "_annotations", (("orm", True), )),
        )
Beispiel #23
0
    def test_memoized_instancemethod(self):
        val = [20]

        class Foo(object):
            @util.memoized_instancemethod
            def bar(self):
                v = val[0]
                val[0] += 1
                return v

        ne_(Foo.bar, None)
        f1 = Foo()
        assert 'bar' not in f1.__dict__
        eq_(f1.bar(), 20)
        eq_(f1.bar(), 20)
        eq_(val[0], 21)
Beispiel #24
0
    def test_memoized_instancemethod(self):
        val = [20]

        class Foo(object):
            @util.memoized_instancemethod
            def bar(self):
                v = val[0]
                val[0] += 1
                return v

        ne_(Foo.bar, None)
        f1 = Foo()
        assert 'bar' not in f1.__dict__
        eq_(f1.bar(), 20)
        eq_(f1.bar(), 20)
        eq_(val[0], 21)
Beispiel #25
0
    def test_memoized_property(self):
        val = [20]

        class Foo(object):
            @util.memoized_property
            def bar(self):
                v = val[0]
                val[0] += 1
                return v

        ne_(Foo.bar, None)
        f1 = Foo()
        assert 'bar' not in f1.__dict__
        eq_(f1.bar, 20)
        eq_(f1.bar, 20)
        eq_(val[0], 21)
        eq_(f1.__dict__['bar'], 20)
Beispiel #26
0
    def test_row_comparison(self):
        users = self.tables.users

        users.insert().execute(user_id=7, user_name='jack')
        rp = users.select().execute().first()

        eq_(rp, rp)
        is_(not (rp != rp), True)

        equal = (7, 'jack')

        eq_(rp, equal)
        eq_(equal, rp)
        is_((not (rp != equal)), True)
        is_(not (equal != equal), True)

        def endless():
            while True:
                yield 1

        ne_(rp, endless())
        ne_(endless(), rp)

        # test that everything compares the same
        # as it would against a tuple
        for compare in [False, 8, endless(), 'xyz', (7, 'jack')]:
            for op in [
                    operator.eq, operator.ne, operator.gt, operator.lt,
                    operator.ge, operator.le
            ]:

                try:
                    control = op(equal, compare)
                except TypeError:
                    # Py3K raises TypeError for some invalid comparisons
                    assert_raises(TypeError, op, rp, compare)
                else:
                    eq_(control, op(rp, compare))

                try:
                    control = op(compare, equal)
                except TypeError:
                    # Py3K raises TypeError for some invalid comparisons
                    assert_raises(TypeError, op, compare, rp)
                else:
                    eq_(control, op(compare, rp))
Beispiel #27
0
    def test_memoized_property(self):
        val = [20]

        class Foo(object):
            @util.memoized_property
            def bar(self):
                v = val[0]
                val[0] += 1
                return v

        ne_(Foo.bar, None)
        f1 = Foo()
        assert 'bar' not in f1.__dict__
        eq_(f1.bar, 20)
        eq_(f1.bar, 20)
        eq_(val[0], 21)
        eq_(f1.__dict__['bar'], 20)
Beispiel #28
0
    def test_row_comparison(self):
        users = self.tables.users

        users.insert().execute(user_id=7, user_name='jack')
        rp = users.select().execute().first()

        eq_(rp, rp)
        is_(not(rp != rp), True)

        equal = (7, 'jack')

        eq_(rp, equal)
        eq_(equal, rp)
        is_((not (rp != equal)), True)
        is_(not (equal != equal), True)

        def endless():
            while True:
                yield 1
        ne_(rp, endless())
        ne_(endless(), rp)

        # test that everything compares the same
        # as it would against a tuple
        for compare in [False, 8, endless(), 'xyz', (7, 'jack')]:
            for op in [
                operator.eq, operator.ne, operator.gt,
                operator.lt, operator.ge, operator.le
            ]:

                try:
                    control = op(equal, compare)
                except TypeError:
                    # Py3K raises TypeError for some invalid comparisons
                    assert_raises(TypeError, op, rp, compare)
                else:
                    eq_(control, op(rp, compare))

                try:
                    control = op(compare, equal)
                except TypeError:
                    # Py3K raises TypeError for some invalid comparisons
                    assert_raises(TypeError, op, compare, rp)
                else:
                    eq_(control, op(compare, rp))
Beispiel #29
0
    def test_query(self):
        Array = self.classes.Array
        s = Session(testing.db)

        s.add_all([
            Array(),
            Array(array=[1, 2, 3], array0=[1, 2, 3]),
            Array(array=[4, 5, 6], array0=[4, 5, 6])])
        s.commit()

        a1 = s.query(Array).filter(Array.array == [1, 2, 3]).one()
        a2 = s.query(Array).filter(Array.first == 1).one()
        eq_(a1.id, a2.id)
        a3 = s.query(Array).filter(Array.first == 4).one()
        ne_(a1.id, a3.id)
        a4 = s.query(Array).filter(Array.first0 == 1).one()
        eq_(a1.id, a4.id)
        a5 = s.query(Array).filter(Array.first0 == 4).one()
        ne_(a1.id, a5.id)
Beispiel #30
0
    def test_stmt_lambda_w_additional_hashable_variants(self):
        # note a Python 2 old style class would fail here because it
        # isn't hashable.   right now we do a hard check for __hash__ which
        # will raise if the attr isn't present
        class Thing(object):
            def __init__(self, col_expr):
                self.col_expr = col_expr

        def go(thing, q):
            stmt = lambdas.lambda_stmt(lambda: select(thing.col_expr))
            stmt += lambda stmt: stmt.where(thing.col_expr == q)

            return stmt

        c1 = Thing(column("x"))
        c2 = Thing(column("y"))

        s1 = go(c1, 5)
        s2 = go(c2, 10)
        s3 = go(c1, 8)
        s4 = go(c2, 12)

        self.assert_compile(s1,
                            "SELECT x WHERE x = :q_1",
                            checkparams={"q_1": 5})
        self.assert_compile(s2,
                            "SELECT y WHERE y = :q_1",
                            checkparams={"q_1": 10})
        self.assert_compile(s3,
                            "SELECT x WHERE x = :q_1",
                            checkparams={"q_1": 8})
        self.assert_compile(s4,
                            "SELECT y WHERE y = :q_1",
                            checkparams={"q_1": 12})

        s1key = s1._generate_cache_key()
        s2key = s2._generate_cache_key()
        s3key = s3._generate_cache_key()
        s4key = s4._generate_cache_key()

        eq_(s1key[0], s3key[0])
        eq_(s2key[0], s4key[0])
        ne_(s1key[0], s2key[0])
Beispiel #31
0
    def test_unique_identifiers_across_deletes(self):
        """Ensure unique integer values are used for the primary table.

        Checks whether the database assigns the same identifier twice
        within the span of a table.  SQLite will do this if
        sqlite_autoincrement is not set (e.g. SQLite's AUTOINCREMENT flag).

        """

        class SomeClass(Versioned, self.Base, ComparableEntity):
            __tablename__ = "sometable"

            id = Column(Integer, primary_key=True)
            name = Column(String(50))

        self.create_tables()
        sess = self.session
        sc = SomeClass(name="sc1")
        sess.add(sc)
        sess.commit()

        sess.delete(sc)
        sess.commit()

        sc2 = SomeClass(name="sc2")
        sess.add(sc2)
        sess.commit()

        SomeClassHistory = SomeClass.__history_mapper__.class_

        # only one entry should exist in the history table; one()
        # ensures that
        scdeleted = sess.query(SomeClassHistory).one()

        # If sc2 has the same id that deleted sc1 had,
        # it will fail when modified or deleted
        # because of the violation of the uniqueness of the primary key on
        # sometable_history
        ne_(sc2.id, scdeleted.id)

        # If previous assertion fails, this will also fail:
        sc2.name = "sc2 modified"
        sess.commit()
Beispiel #32
0
 def test_default_driver(self):
     successes = 0
     for url_prefix, driver_name in [
         ("mariadb://", "mysqldb"),
         ("mssql://", "pyodbc"),
         ("mysql://", "mysqldb"),
         ("oracle://", "cx_oracle"),
         ("postgresql://", "psycopg2"),
         ("sqlite://", "pysqlite"),
     ]:
         try:
             en = create_engine(url_prefix)
             eq_(en.dialect.driver, driver_name)
             successes += 1
         except ModuleNotFoundError:
             # not all test environments will have every driver installed
             pass
     # but we should at least find one
     ne_(successes, 0, "No default drivers found.")
Beispiel #33
0
    def test_stmt_lambda_w_list_of_opts(self):
        def go(opts):
            stmt = lambdas.lambda_stmt(lambda: select(column("x")))
            stmt += lambda stmt: stmt.options(*opts)

            return stmt

        s1 = go([column("a"), column("b")])

        s2 = go([column("a"), column("b")])

        s3 = go([column("q"), column("b")])

        s1key = s1._generate_cache_key()
        s2key = s2._generate_cache_key()
        s3key = s3._generate_cache_key()

        eq_(s1key.key, s2key.key)
        ne_(s1key.key, s3key.key)
    def test_query(self):
        Array = self.classes.Array
        s = Session(testing.db)

        s.add_all([
            Array(),
            Array(array=[1, 2, 3], array0=[1, 2, 3]),
            Array(array=[4, 5, 6], array0=[4, 5, 6])])
        s.commit()

        a1 = s.query(Array).filter(Array.array == [1, 2, 3]).one()
        a2 = s.query(Array).filter(Array.first == 1).one()
        eq_(a1.id, a2.id)
        a3 = s.query(Array).filter(Array.first == 4).one()
        ne_(a1.id, a3.id)
        a4 = s.query(Array).filter(Array.first0 == 1).one()
        eq_(a1.id, a4.id)
        a5 = s.query(Array).filter(Array.first0 == 4).one()
        ne_(a1.id, a5.id)
    def test_unique_identifiers_across_deletes(self):
        """Ensure unique integer values are used for the primary table.

        Checks whether the database assigns the same identifier twice
        within the span of a table.  SQLite will do this if
        sqlite_autoincrement is not set (e.g. SQLite's AUTOINCREMENT flag).

        """
        class SomeClass(Versioned, self.Base, ComparableEntity):
            __tablename__ = "sometable"

            id = Column(Integer, primary_key=True)
            name = Column(String(50))

        self.create_tables()
        sess = self.session
        sc = SomeClass(name="sc1")
        sess.add(sc)
        sess.commit()

        sess.delete(sc)
        sess.commit()

        sc2 = SomeClass(name="sc2")
        sess.add(sc2)
        sess.commit()

        SomeClassHistory = SomeClass.__history_mapper__.class_

        # only one entry should exist in the history table; one()
        # ensures that
        scdeleted = sess.query(SomeClassHistory).one()

        # If sc2 has the same id that deleted sc1 had,
        # it will fail when modified or deleted
        # because of the violation of the uniqueness of the primary key on
        # sometable_history
        ne_(sc2.id, scdeleted.id)

        # If previous assertion fails, this will also fail:
        sc2.name = "sc2 modified"
        sess.commit()
Beispiel #36
0
    def test_invalidate_conn_w_contextmanager_disconnect(self):
        # test [ticket:3803] change maintains old behavior

        pool = self.db.pool

        conn = self.db.connect()
        self.dbapi.shutdown("execute")

        def go():
            with conn.begin():
                conn.execute(select([1]))

        assert_raises(exc.DBAPIError, go)  # wraps a MockDisconnect

        assert conn.invalidated

        ne_(pool._invalidate_time, 0)  # pool is invalidated

        conn.execute(select([1]))
        assert not conn.invalidated
Beispiel #37
0
    def test_invalidate_conn_w_contextmanager_disconnect(self):
        # test [ticket:3803] change maintains old behavior

        pool = self.db.pool

        conn = self.db.connect()
        self.dbapi.shutdown("execute")

        def go():
            with conn.begin():
                conn.execute(select([1]))

        assert_raises(exc.DBAPIError, go)  # wraps a MockDisconnect

        assert conn.invalidated

        ne_(pool._invalidate_time, 0)  # pool is invalidated

        conn.execute(select([1]))
        assert not conn.invalidated
    def test_query(self):
        Json = self.classes.Json
        s = Session(testing.db)

        s.add_all([Json(), Json(json={'field': 10}), Json(json={'field': 20})])
        s.commit()

        a1 = s.query(Json).filter(Json.json['field'].astext.cast(Integer) == 10)\
            .one()
        a2 = s.query(Json).filter(Json.field.astext == '10').one()
        eq_(a1.id, a2.id)
        a3 = s.query(Json).filter(Json.field.astext == '20').one()
        ne_(a1.id, a3.id)

        a4 = s.query(Json).filter(Json.json_field.astext == '10').one()
        eq_(a2.id, a4.id)
        a5 = s.query(Json).filter(Json.int_field == 10).one()
        eq_(a2.id, a5.id)
        a6 = s.query(Json).filter(Json.text_field == '10').one()
        eq_(a2.id, a6.id)
Beispiel #39
0
    def test_compare_metadata_tables_annotations_one(self):
        # test that cache keys from annotated version of tables refresh
        # properly

        t1 = Table("a", MetaData(), Column("q", Integer), Column("p", Integer))
        t2 = Table("a", MetaData(), Column("q", Integer), Column("p", Integer))

        ne_(t1._generate_cache_key(), t2._generate_cache_key())

        eq_(t1._generate_cache_key().key, (t1,))

        t2 = t1._annotate({"foo": "bar"})
        eq_(
            t2._generate_cache_key().key,
            (t1, "_annotations", (("foo", "bar"),)),
        )
        eq_(
            t2._annotate({"bat": "bar"})._generate_cache_key().key,
            (t1, "_annotations", (("bat", "bar"), ("foo", "bar"))),
        )
    def test_generative_cache_key_regen_w_del(self):
        t1 = table("t1", column("a"), column("b"))

        s1 = select([t1])

        ck1 = s1._generate_cache_key()

        s2 = s1.where(t1.c.a == 5)

        del s1

        # there is now a good chance that id(s3) == id(s1), make sure
        # cache key is regenerated

        s3 = s2.order_by(t1.c.b)

        ck3 = s3._generate_cache_key()

        ne_(ck1, ck3)
        is_not_(ck1, None)
        is_not_(ck3, None)
Beispiel #41
0
    def test_query(self):
        Json = self.classes.Json
        s = Session(testing.db)

        s.add_all([
            Json(),
            Json(json={'field': 10}),
            Json(json={'field': 20})])
        s.commit()

        a1 = s.query(Json).filter(Json.json['field'].astext.cast(Integer) == 10)\
            .one()
        a2 = s.query(Json).filter(Json.field.astext == '10').one()
        eq_(a1.id, a2.id)
        a3 = s.query(Json).filter(Json.field.astext == '20').one()
        ne_(a1.id, a3.id)

        a4 = s.query(Json).filter(Json.json_field.astext == '10').one()
        eq_(a2.id, a4.id)
        a5 = s.query(Json).filter(Json.int_field == 10).one()
        eq_(a2.id, a5.id)
        a6 = s.query(Json).filter(Json.text_field == '10').one()
        eq_(a2.id, a6.id)
Beispiel #42
0
    def test_cache_key_limit_offset_values(self):
        s1 = select([column("q")]).limit(10)
        s2 = select([column("q")]).limit(25)
        s3 = select([column("q")]).limit(25).offset(5)
        s4 = select([column("q")]).limit(25).offset(18)
        s5 = select([column("q")]).limit(7).offset(12)
        s6 = select([column("q")]).limit(literal_column("q")).offset(12)

        for should_eq_left, should_eq_right in [(s1, s2), (s3, s4), (s3, s5)]:
            eq_(
                should_eq_left._generate_cache_key().key,
                should_eq_right._generate_cache_key().key,
            )

        for shouldnt_eq_left, shouldnt_eq_right in [
            (s1, s3),
            (s5, s6),
            (s2, s3),
        ]:
            ne_(
                shouldnt_eq_left._generate_cache_key().key,
                shouldnt_eq_right._generate_cache_key().key,
            )
Beispiel #43
0
    def test_stmt_lambda_w_additional_hascachekey_variants(self):
        def go(col_expr, q):
            stmt = lambdas.lambda_stmt(lambda: select(col_expr))
            stmt += lambda stmt: stmt.where(col_expr == q)

            return stmt

        c1 = column("x")
        c2 = column("y")

        s1 = go(c1, 5)
        s2 = go(c2, 10)
        s3 = go(c1, 8)
        s4 = go(c2, 12)

        self.assert_compile(s1,
                            "SELECT x WHERE x = :q_1",
                            checkparams={"q_1": 5})
        self.assert_compile(s2,
                            "SELECT y WHERE y = :q_1",
                            checkparams={"q_1": 10})
        self.assert_compile(s3,
                            "SELECT x WHERE x = :q_1",
                            checkparams={"q_1": 8})
        self.assert_compile(s4,
                            "SELECT y WHERE y = :q_1",
                            checkparams={"q_1": 12})

        s1key = s1._generate_cache_key()
        s2key = s2._generate_cache_key()
        s3key = s3._generate_cache_key()
        s4key = s4._generate_cache_key()

        eq_(s1key[0], s3key[0])
        eq_(s2key[0], s4key[0])
        ne_(s1key[0], s2key[0])
    def test_compare_adhoc_tables(self):
        # non-metadata tables compare on their structure.  these objects are
        # not commonly used.

        # note this test is a bit redundant as we have a similar test
        # via the fixtures also
        t1 = table("a", Column("q", Integer), Column("p", Integer))
        t2 = table("a", Column("q", Integer), Column("p", Integer))
        t3 = table("b", Column("q", Integer), Column("p", Integer))
        t4 = table("a", Column("q", Integer), Column("x", Integer))

        eq_(t1._generate_cache_key(), t2._generate_cache_key())

        ne_(t1._generate_cache_key(), t3._generate_cache_key())
        ne_(t1._generate_cache_key(), t4._generate_cache_key())
        ne_(t3._generate_cache_key(), t4._generate_cache_key())
Beispiel #45
0
 def validate_name(self, key, name):
     ne_(name, "fred")
     return name + " modified"
Beispiel #46
0
    def test_basic_sanity(self):
        IdentitySet = util.IdentitySet

        o1, o2, o3 = object(), object(), object()
        ids = IdentitySet([o1])
        ids.discard(o1)
        ids.discard(o1)
        ids.add(o1)
        ids.remove(o1)
        assert_raises(KeyError, ids.remove, o1)

        eq_(ids.copy(), ids)

        # explicit __eq__ and __ne__ tests
        assert ids != None
        assert not(ids == None)

        ne_(ids, IdentitySet([o1, o2, o3]))
        ids.clear()
        assert o1 not in ids
        ids.add(o2)
        assert o2 in ids
        eq_(ids.pop(), o2)
        ids.add(o1)
        eq_(len(ids), 1)

        isuper = IdentitySet([o1, o2])
        assert ids < isuper
        assert ids.issubset(isuper)
        assert isuper.issuperset(ids)
        assert isuper > ids

        eq_(ids.union(isuper), isuper)
        eq_(ids | isuper, isuper)
        eq_(isuper - ids, IdentitySet([o2]))
        eq_(isuper.difference(ids), IdentitySet([o2]))
        eq_(ids.intersection(isuper), IdentitySet([o1]))
        eq_(ids & isuper, IdentitySet([o1]))
        eq_(ids.symmetric_difference(isuper), IdentitySet([o2]))
        eq_(ids ^ isuper, IdentitySet([o2]))

        ids.update(isuper)
        ids |= isuper
        ids.difference_update(isuper)
        ids -= isuper
        ids.intersection_update(isuper)
        ids &= isuper
        ids.symmetric_difference_update(isuper)
        ids ^= isuper

        ids.update('foobar')
        try:
            ids |= 'foobar'
            assert False
        except TypeError:
            assert True

        try:
            s = set([o1, o2])
            s |= ids
            assert False
        except TypeError:
            assert True

        assert_raises(TypeError, util.cmp, ids)
        assert_raises(TypeError, hash, ids)
Beispiel #47
0
 def validate_name(self, key, name):
     ne_(name, 'fred')
     return name + ' modified'