Пример #1
0
    def setup(self):
        meta = MetaData(testing.db)
        global table, GoofyType

        class GoofyType(TypeDecorator):
            impl = String

            def process_bind_param(self, value, dialect):
                if value is None:
                    return None
                return "FOO" + value

            def process_result_value(self, value, dialect):
                if value is None:
                    return None
                return value + "BAR"

        table = Table(
            'tables', meta,
            Column(
                'id', Integer, primary_key=True,
                test_needs_autoincrement=True),
            Column('persons', Integer),
            Column('full', Boolean),
            Column('goofy', GoofyType(50)))
        table.create(checkfirst=True)
Пример #2
0
    def test_reflect_nvarchar(self):
        metadata = self.metadata
        Table(
            "tnv",
            metadata,
            Column("nv_data", sqltypes.NVARCHAR(255)),
            Column("c_data", sqltypes.NCHAR(20)),
        )
        metadata.create_all()
        m2 = MetaData(testing.db)
        t2 = Table("tnv", m2, autoload=True)
        assert isinstance(t2.c.nv_data.type, sqltypes.NVARCHAR)
        assert isinstance(t2.c.c_data.type, sqltypes.NCHAR)

        if testing.against("oracle+cx_oracle"):
            assert isinstance(
                t2.c.nv_data.type.dialect_impl(testing.db.dialect),
                cx_oracle._OracleUnicodeStringNCHAR,
            )

            assert isinstance(
                t2.c.c_data.type.dialect_impl(testing.db.dialect),
                cx_oracle._OracleUnicodeStringNCHAR,
            )

        data = u("m’a réveillé.")
        with testing.db.connect() as conn:
            conn.execute(t2.insert(), dict(nv_data=data, c_data=data))
            nv_data, c_data = conn.execute(t2.select()).first()
            eq_(nv_data, data)
            eq_(c_data, data + (" " * 7))  # char is space padded
            assert isinstance(nv_data, util.text_type)
            assert isinstance(c_data, util.text_type)
Пример #3
0
    def test_string_default_none_on_insert(self):
        """Test that without implicit returning, we return None for
        a string server default.

        That is, we don't want to attempt to pre-execute "server_default"
        generically - the user should use a Python side-default for a case
        like this.   Testing that all backends do the same thing here.

        """

        metadata = self.metadata
        t = Table(
            'x', metadata,
            Column(
                'y', String(10), server_default='key_one', primary_key=True),
            Column('data', String(10)),
            implicit_returning=False
        )
        metadata.create_all()
        r = t.insert().execute(data='data')
        eq_(r.inserted_primary_key, [None])
        eq_(
            t.select().execute().fetchall(),
            [('key_one', 'data')]
        )
Пример #4
0
    def test_limit_offset_for_update(self):
        metadata = self.metadata
        # oracle can't actually do the ROWNUM thing with FOR UPDATE
        # very well.

        t = Table(
            "t1",
            metadata,
            Column("id", Integer, primary_key=True),
            Column("data", Integer),
        )
        metadata.create_all()

        t.insert().execute(
            {"id": 1, "data": 1},
            {"id": 2, "data": 7},
            {"id": 3, "data": 12},
            {"id": 4, "data": 15},
            {"id": 5, "data": 32},
        )

        # here, we can't use ORDER BY.
        eq_(
            t.select().with_for_update().limit(2).execute().fetchall(),
            [(1, 1), (2, 7)],
        )

        # here, its impossible.  But we'd prefer it to raise ORA-02014
        # instead of issuing a syntax error.
        assert_raises_message(
            exc.DatabaseError,
            "ORA-02014",
            t.select().with_for_update().limit(2).offset(3).execute,
        )
Пример #5
0
    def test_select_doesnt_pollute_result(self):
        class MyType(TypeDecorator):
            impl = Integer

            def process_result_value(self, value, dialect):
                raise Exception("I have not been selected")

        t1 = Table(
            't1', self.metadata,
            Column('x', MyType())
        )

        t2 = Table(
            't2', self.metadata,
            Column('x', Integer)
        )

        self.metadata.create_all(testing.db)
        with testing.db.connect() as conn:
            conn.execute(t1.insert().values(x=5))

            stmt = t2.insert().values(
                x=select([t1.c.x]).as_scalar()).returning(t2.c.x)

            result = conn.execute(stmt)
            eq_(result.scalar(), 5)
Пример #6
0
    def test_default_exec(self):
        metadata = MetaData(testing.db)
        t1 = Table(
            "t1",
            metadata,
            Column("special_col", Integer, Sequence("special_col"), primary_key=True),
            Column("data", String(50)),  # to appease SQLite without DEFAULT VALUES
        )
        metadata.create_all()

        try:
            engine = metadata.bind

            # reset the identifier preparer, so that we can force it to cache
            # a unicode identifier
            engine.dialect.identifier_preparer = engine.dialect.preparer(engine.dialect)
            select([column("special_col")]).select_from(t1).execute().close()
            assert isinstance(engine.dialect.identifier_preparer.format_sequence(Sequence("special_col")), str)

            # now execute, run the sequence.  it should run in u"Special_col.nextid" or similar as
            # a unicode object; cx_oracle asserts that this is None or a String (postgresql lets it pass thru).
            # ensure that executioncontext._exec_default() is encoding.
            t1.insert().execute(data="foo")
        finally:
            metadata.drop_all()
Пример #7
0
    def test_misordered_lastrow(self):
        metadata = self.metadata

        related = Table(
            'related', metadata,
            Column('id', Integer, primary_key=True),
            mysql_engine='MyISAM'
        )
        t6 = Table(
            "t6", metadata,
            Column(
                'manual_id', Integer, ForeignKey('related.id'),
                primary_key=True),
            Column(
                'auto_id', Integer, primary_key=True,
                test_needs_autoincrement=True),
            mysql_engine='MyISAM'
        )

        metadata.create_all()
        r = related.insert().values(id=12).execute()
        id_ = r.inserted_primary_key[0]
        eq_(id_, 12)

        r = t6.insert().values(manual_id=id_).execute()
        eq_(r.inserted_primary_key, [12, 1])
Пример #8
0
    def test_column_label_overlap_fallback(self):
        content = Table(
            'content', self.metadata,
            Column('type', String(30)),
        )
        bar = Table(
            'bar', self.metadata,
            Column('content_type', String(30))
        )
        self.metadata.create_all(testing.db)
        testing.db.execute(content.insert().values(type="t1"))

        row = testing.db.execute(content.select(use_labels=True)).first()
        in_(content.c.type, row)
        not_in_(bar.c.content_type, row)
        in_(sql.column('content_type'), row)

        row = testing.db.execute(
            select([content.c.type.label("content_type")])).first()
        in_(content.c.type, row)

        not_in_(bar.c.content_type, row)

        in_(sql.column('content_type'), row)

        row = testing.db.execute(select([func.now().label("content_type")])). \
            first()
        not_in_(content.c.type, row)

        not_in_(bar.c.content_type, row)

        in_(sql.column('content_type'), row)
Пример #9
0
    def test_table_overrides_metadata_create(self):
        metadata = self.metadata
        Sequence("s1", metadata=metadata)
        s2 = Sequence("s2", metadata=metadata)
        s3 = Sequence("s3")
        t = Table('t', metadata,
                    Column('c', Integer, s3, primary_key=True))
        assert s3.metadata is metadata


        t.create(testing.db, checkfirst=True)
        s3.drop(testing.db)

        # 't' is created, and 's3' won't be
        # re-created since it's linked to 't'.
        # 's1' and 's2' are, however.
        metadata.create_all(testing.db)
        assert self._has_sequence('s1')
        assert self._has_sequence('s2')
        assert not self._has_sequence('s3')

        s2.drop(testing.db)
        assert self._has_sequence('s1')
        assert not self._has_sequence('s2')

        metadata.drop_all(testing.db)
        assert not self._has_sequence('s1')
        assert not self._has_sequence('s2')
Пример #10
0
    def test_long_type(self):
        metadata = self.metadata

        t = Table("t", metadata, Column("data", oracle.LONG))
        metadata.create_all(testing.db)
        testing.db.execute(t.insert(), data="xyz")
        eq_(testing.db.scalar(select([t.c.data])), "xyz")
Пример #11
0
    def define_tables(cls, metadata):
        foo = Table('foo', metadata,
                    Column('a', String(30), primary_key=1),
                    Column('b', String(30), nullable=0))

        cls.tables.bar = foo.select(foo.c.b == 'bar').alias('bar')
        cls.tables.baz = foo.select(foo.c.b == 'baz').alias('baz')
Пример #12
0
    def _run_test(self, *arg, **kw):
        metadata = self.metadata
        implicit_returning = kw.pop('implicit_returning', True)
        kw['primary_key'] = True
        if kw.get('autoincrement', True):
            kw['test_needs_autoincrement'] = True
        t = Table('x', metadata,
            Column('y', self.MyInteger, *arg, **kw),
            Column('data', Integer),
            implicit_returning=implicit_returning
        )

        t.create()
        r = t.insert().values(data=5).execute()

        # we don't pre-fetch 'server_default'.
        if 'server_default' in kw and (not
                    testing.db.dialect.implicit_returning or
                    not implicit_returning):
            eq_(r.inserted_primary_key, [None])
        else:
            eq_(r.inserted_primary_key, ['INT_1'])
        r.close()

        eq_(
            t.select().execute().first(),
            ('INT_1', 5)
        )
Пример #13
0
    def test_fixed_char(self):
        m = MetaData(testing.db)
        t = Table('t1', m,
                  Column('id', Integer, primary_key=True),
                  Column('data', CHAR(30), nullable=False))

        t.create()
        try:
            t.insert().execute(
                dict(id=1, data="value 1"),
                dict(id=2, data="value 2"),
                dict(id=3, data="value 3")
            )

            eq_(
                t.select().where(t.c.data == 'value 2').execute().fetchall(),
                [(2, 'value 2                       ')]
            )

            m2 = MetaData(testing.db)
            t2 = Table('t1', m2, autoload=True)
            assert type(t2.c.data.type) is CHAR
            eq_(
                t2.select().where(t2.c.data == 'value 2').execute().fetchall(),
                [(2, 'value 2                       ')]
            )

        finally:
            t.drop()
Пример #14
0
    def test_numeric_infinity_float(self):
        m = self.metadata
        t1 = Table('t1', m,
                   Column("intcol", Integer),
                   Column("numericcol", oracle.BINARY_DOUBLE(asdecimal=False)))
        t1.create()
        t1.insert().execute([
            dict(
                intcol=1,
                numericcol=float("inf")
            ),
            dict(
                intcol=2,
                numericcol=float("-inf")
            ),
        ])

        eq_(
            select([t1.c.numericcol]).
            order_by(t1.c.intcol).execute().fetchall(),
            [(float('inf'), ), (float('-inf'), )]
        )

        eq_(
            testing.db.execute(
                "select numericcol from t1 order by intcol").fetchall(),
            [(float('inf'), ), (float('-inf'), )]
        )
Пример #15
0
    def _dont_test_numeric_nan_decimal(self):
        m = self.metadata
        t1 = Table('t1', m,
                   Column("intcol", Integer),
                   Column("numericcol", oracle.BINARY_DOUBLE(asdecimal=True)))
        t1.create()
        t1.insert().execute([
            dict(
                intcol=1,
                numericcol=decimal.Decimal("NaN")
            ),
            dict(
                intcol=2,
                numericcol=decimal.Decimal("-NaN")
            ),
        ])

        eq_(
            select([t1.c.numericcol]).
            order_by(t1.c.intcol).execute().fetchall(),
            [(decimal.Decimal("NaN"), ), (decimal.Decimal("NaN"), )]
        )

        eq_(
            testing.db.execute(
                "select numericcol from t1 order by intcol").fetchall(),
            [(decimal.Decimal("NaN"), ), (decimal.Decimal("NaN"), )]
        )
Пример #16
0
    def _test_get_unique_constraints(self, schema=None):
        uniques = sorted(
            [
                {"name": "unique_a", "column_names": ["a"]},
                {"name": "unique_a_b_c", "column_names": ["a", "b", "c"]},
                {"name": "unique_c_a_b", "column_names": ["c", "a", "b"]},
                {"name": "unique_asc_key", "column_names": ["asc", "key"]},
            ],
            key=operator.itemgetter("name"),
        )
        orig_meta = self.metadata
        table = Table(
            "testtbl",
            orig_meta,
            Column("a", sa.String(20)),
            Column("b", sa.String(30)),
            Column("c", sa.Integer),
            # reserved identifiers
            Column("asc", sa.String(30)),
            Column("key", sa.String(30)),
            schema=schema,
        )
        for uc in uniques:
            table.append_constraint(sa.UniqueConstraint(*uc["column_names"], name=uc["name"]))
        orig_meta.create_all()

        inspector = inspect(orig_meta.bind)
        reflected = sorted(inspector.get_unique_constraints("testtbl", schema=schema), key=operator.itemgetter("name"))

        for orig, refl in zip(uniques, reflected):
            # Different dialects handle duplicate index and constraints
            # differently, so ignore this flag
            refl.pop("duplicates_index", None)
            eq_(orig, refl)
Пример #17
0
 def test_empty_insert(self):
     t1 = Table('t1', self.metadata,
             Column('is_true', Boolean, server_default=('1')))
     self.metadata.create_all()
     t1.insert().execute()
     eq_(1, select([func.count(text('*'))], from_obj=t1).scalar())
     eq_(True, t1.select().scalar())
Пример #18
0
 def test_nullable_reflection(self):
     t = Table("t", self.metadata, Column("a", Integer, nullable=True), Column("b", Integer, nullable=False))
     t.create()
     eq_(
         dict((col["name"], col["nullable"]) for col in inspect(self.metadata.bind).get_columns("t")),
         {"a": True, "b": False},
     )
Пример #19
0
    def test_int_default_none_on_insert_reflected(self):
        metadata = self.metadata
        Table('x', metadata,
                Column('y', Integer,
                        server_default='5', primary_key=True),
                Column('data', String(10)),
                implicit_returning=False
                )
        metadata.create_all()

        m2 = MetaData(metadata.bind)
        t2 = Table('x', m2, autoload=True, implicit_returning=False)

        r = t2.insert().execute(data='data')
        eq_(r.inserted_primary_key, [None])
        if testing.against('sqlite'):
            eq_(
                t2.select().execute().fetchall(),
                [(1, 'data')]
            )
        else:
            eq_(
                t2.select().execute().fetchall(),
                [(5, 'data')]
            )
Пример #20
0
    def test_numeric_nan_float(self):
        m = self.metadata
        t1 = Table(
            "t1",
            m,
            Column("intcol", Integer),
            Column("numericcol", oracle.BINARY_DOUBLE(asdecimal=False)),
        )
        t1.create()
        t1.insert().execute(
            [
                dict(intcol=1, numericcol=float("nan")),
                dict(intcol=2, numericcol=float("-nan")),
            ]
        )

        eq_(
            [
                tuple(str(col) for col in row)
                for row in select([t1.c.numericcol])
                .order_by(t1.c.intcol)
                .execute()
            ],
            [("nan",), ("nan",)],
        )

        eq_(
            [
                tuple(str(col) for col in row)
                for row in testing.db.execute(
                    "select numericcol from t1 order by intcol"
                )
            ],
            [("nan",), ("nan",)],
        )
Пример #21
0
    def test_override_pkfk(self):
        """test that you can override columns which contain foreign keys
        to other reflected tables, where the foreign key column is also
        a primary key column"""

        meta = self.metadata
        users = Table("users", meta, Column("id", sa.Integer, primary_key=True), Column("name", sa.String(30)))
        addresses = Table(
            "addresses", meta, Column("id", sa.Integer, primary_key=True), Column("street", sa.String(30))
        )

        meta.create_all()
        meta2 = MetaData(testing.db)
        a2 = Table(
            "addresses", meta2, Column("id", sa.Integer, sa.ForeignKey("users.id"), primary_key=True), autoload=True
        )
        u2 = Table("users", meta2, autoload=True)

        assert list(a2.primary_key) == [a2.c.id]
        assert list(u2.primary_key) == [u2.c.id]
        assert u2.join(a2).onclause.compare(u2.c.id == a2.c.id)

        meta3 = MetaData(testing.db)
        u3 = Table("users", meta3, autoload=True)
        a3 = Table(
            "addresses", meta3, Column("id", sa.Integer, sa.ForeignKey("users.id"), primary_key=True), autoload=True
        )

        assert list(a3.primary_key) == [a3.c.id]
        assert list(u3.primary_key) == [u3.c.id]
        assert u3.join(a3).onclause.compare(u3.c.id == a3.c.id)
Пример #22
0
    def test_column_accessor_shadow(self):
        shadowed = Table(
            'test_shadowed', self.metadata,
            Column('shadow_id', INT, primary_key=True),
            Column('shadow_name', VARCHAR(20)),
            Column('parent', VARCHAR(20)),
            Column('row', VARCHAR(40)),
            Column('_parent', VARCHAR(20)),
            Column('_row', VARCHAR(20)),
        )
        self.metadata.create_all()
        shadowed.insert().execute(
            shadow_id=1, shadow_name='The Shadow', parent='The Light',
            row='Without light there is no shadow',
            _parent='Hidden parent', _row='Hidden row')
        r = shadowed.select(shadowed.c.shadow_id == 1).execute().first()

        eq_(r.shadow_id, 1)
        eq_(r['shadow_id'], 1)
        eq_(r[shadowed.c.shadow_id], 1)

        eq_(r.shadow_name, 'The Shadow')
        eq_(r['shadow_name'], 'The Shadow')
        eq_(r[shadowed.c.shadow_name], 'The Shadow')

        eq_(r.parent, 'The Light')
        eq_(r['parent'], 'The Light')
        eq_(r[shadowed.c.parent], 'The Light')

        eq_(r.row, 'Without light there is no shadow')
        eq_(r['row'], 'Without light there is no shadow')
        eq_(r[shadowed.c.row], 'Without light there is no shadow')

        eq_(r['_parent'], 'Hidden parent')
        eq_(r['_row'], 'Hidden row')
Пример #23
0
    def _test_get_unique_constraints(self, schema=None):
        uniques = sorted(
            [
                {'name': 'unique_a', 'column_names': ['a']},
                {'name': 'unique_a_b_c', 'column_names': ['a', 'b', 'c']},
                {'name': 'unique_c_a_b', 'column_names': ['c', 'a', 'b']},
                {'name': 'unique_asc_key', 'column_names': ['asc', 'key']},
            ],
            key=operator.itemgetter('name')
        )
        orig_meta = self.metadata
        table = Table(
            'testtbl', orig_meta,
            Column('a', sa.String(20)),
            Column('b', sa.String(30)),
            Column('c', sa.Integer),
            # reserved identifiers
            Column('asc', sa.String(30)),
            Column('key', sa.String(30)),
            schema=schema
        )
        for uc in uniques:
            table.append_constraint(
                sa.UniqueConstraint(*uc['column_names'], name=uc['name'])
            )
        orig_meta.create_all()

        inspector = inspect(orig_meta.bind)
        reflected = sorted(
            inspector.get_unique_constraints('testtbl', schema=schema),
            key=operator.itemgetter('name')
        )

        for orig, refl in zip(uniques, reflected):
            eq_(orig, refl)
Пример #24
0
 def test_reflect_remote_synonyms(self):
     meta = MetaData(testing.db)
     parent = Table(
         "ptable",
         meta,
         autoload=True,
         schema=testing.config.test_schema,
         oracle_resolve_synonyms=True,
     )
     child = Table(
         "ctable",
         meta,
         autoload=True,
         schema=testing.config.test_schema,
         oracle_resolve_synonyms=True,
     )
     self.assert_compile(
         parent.join(child),
         "%(test_schema)s.ptable JOIN "
         "%(test_schema)s.ctable "
         "ON %(test_schema)s.ptable.id = "
         "%(test_schema)s.ctable.parent_id"
         % {"test_schema": testing.config.test_schema},
     )
     select([parent, child]).select_from(
         parent.join(child)
     ).execute().fetchall()
Пример #25
0
    def test_insert_from_select_fn_defaults(self):
        data = self._fixture()

        counter = itertools.count(1)

        def foo(ctx):
            return next(counter)

        table = Table('sometable', self.metadata,
                      Column('x', Integer),
                      Column('foo', Integer, default=foo),
                      Column('y', Integer))

        table.create()

        sel = select([data.c.x, data.c.y])

        ins = table.insert().\
            from_select(["x", "y"], sel)
        testing.db.execute(ins)

        # counter is only called once!
        eq_(
            testing.db.execute(table.select().order_by(table.c.x)).fetchall(),
            [(2, 1, 5), (7, 1, 12)]
        )
Пример #26
0
    def _run_test(self, *arg, **kw):
        metadata = self.metadata
        implicit_returning = kw.pop("implicit_returning", True)
        kw["primary_key"] = True
        if kw.get("autoincrement", True):
            kw["test_needs_autoincrement"] = True
        t = Table(
            "x",
            metadata,
            Column("y", self.MyInteger, *arg, **kw),
            Column("data", Integer),
            implicit_returning=implicit_returning,
        )

        t.create()
        r = t.insert().values(data=5).execute()

        # we don't pre-fetch 'server_default'.
        if "server_default" in kw and (not testing.db.dialect.implicit_returning or not implicit_returning):
            eq_(r.inserted_primary_key, [None])
        else:
            eq_(r.inserted_primary_key, ["INT_1"])
        r.close()

        eq_(t.select().execute().first(), ("INT_1", 5))
Пример #27
0
 def _dont_test_reflect_all_types_schema(self):
     types_table = Table('all_types', MetaData(testing.db),
                         Column('owner', String(30), primary_key=True),
                         Column('type_name', String(30), primary_key=True),
                         autoload=True, oracle_resolve_synonyms=True)
     for row in types_table.select().execute().fetchall():
         [row[k] for k in row.keys()]
Пример #28
0
    def test_int_default_on_insert_with_returning(self):
        metadata = self.metadata
        t = Table("x", metadata, Column("y", Integer, server_default="5", primary_key=True), Column("data", String(10)))

        metadata.create_all()
        r = t.insert().execute(data="data")
        eq_(r.inserted_primary_key, [5])
        eq_(t.select().execute().fetchall(), [(5, "data")])
Пример #29
0
    def test_autoincrement_single_col(self):
        single = Table("single", self.metadata, Column("id", Integer, primary_key=True))
        single.create()

        r = single.insert().execute()
        id_ = r.inserted_primary_key[0]
        eq_(id_, 1)
        eq_(1, sa.select([func.count(sa.text("*"))], from_obj=single).scalar())
Пример #30
0
 def setup_class(cls):
     global counters, metadata
     metadata = MetaData()
     counters = Table('forupdate_counters', metadata,
                      Column('counter_id', INT, primary_key=True),
                      Column('counter_value', INT),
                      test_needs_acid=True)
     counters.create(testing.db)
Пример #31
0
    def test_long_type(self, metadata, connection):

        t = Table("t", metadata, Column("data", oracle.LONG))
        metadata.create_all(connection)
        connection.execute(t.insert(), dict(data="xyz"))
        eq_(connection.scalar(select(t.c.data)), "xyz")
 def define_tables(cls, metadata):
     Table('users', metadata, Column('id', Integer, primary_key=True),
           Column('name', String(64)))
Пример #33
0
    def define_tables(cls, metadata):
        dt = Table(
            "dt",
            metadata,
            Column("id",
                   Integer,
                   primary_key=True,
                   test_needs_autoincrement=True),
            Column("col1", String(20)),
            Column("col2", String(20),
                   server_default=sa.schema.FetchedValue()),
            Column("col3", String(20),
                   sa.schema.FetchedValue(for_update=True)),
            Column(
                "col4",
                String(20),
                sa.schema.FetchedValue(),
                sa.schema.FetchedValue(for_update=True),
            ),
        )
        for ins in (
                sa.DDL(
                    "CREATE TRIGGER dt_ins AFTER INSERT ON dt "
                    "FOR EACH ROW BEGIN "
                    "UPDATE dt SET col2='ins', col4='ins' "
                    "WHERE dt.id = NEW.id; END").execute_if(dialect="sqlite"),
                sa.DDL("CREATE TRIGGER dt_ins ON dt AFTER INSERT AS "
                       "UPDATE dt SET col2='ins', col4='ins' "
                       "WHERE dt.id IN (SELECT id FROM inserted);").execute_if(
                           dialect="mssql"),
                sa.DDL(
                    "CREATE TRIGGER dt_ins BEFORE INSERT "
                    "ON dt "
                    "FOR EACH ROW "
                    "BEGIN "
                    ":NEW.col2 := 'ins'; :NEW.col4 := 'ins'; END;").execute_if(
                        dialect="oracle"),
                sa.DDL(
                    "CREATE TRIGGER dt_ins BEFORE INSERT ON dt "
                    "FOR EACH ROW BEGIN "
                    "SET NEW.col2='ins'; SET NEW.col4='ins'; END").execute_if(
                        callable_=lambda ddl, target, bind, **kw: bind.engine.
                        name not in ("oracle", "mssql", "sqlite")),
        ):
            event.listen(dt, "after_create", ins)

        event.listen(dt, "before_drop", sa.DDL("DROP TRIGGER dt_ins"))

        for up in (
                sa.DDL(
                    "CREATE TRIGGER dt_up AFTER UPDATE ON dt "
                    "FOR EACH ROW BEGIN "
                    "UPDATE dt SET col3='up', col4='up' "
                    "WHERE dt.id = OLD.id; END").execute_if(dialect="sqlite"),
                sa.DDL("CREATE TRIGGER dt_up ON dt AFTER UPDATE AS "
                       "UPDATE dt SET col3='up', col4='up' "
                       "WHERE dt.id IN (SELECT id FROM deleted);").execute_if(
                           dialect="mssql"),
                sa.DDL(
                    "CREATE TRIGGER dt_up BEFORE UPDATE ON dt "
                    "FOR EACH ROW BEGIN "
                    ":NEW.col3 := 'up'; :NEW.col4 := 'up'; END;").execute_if(
                        dialect="oracle"),
                sa.DDL("CREATE TRIGGER dt_up BEFORE UPDATE ON dt "
                       "FOR EACH ROW BEGIN "
                       "SET NEW.col3='up'; SET NEW.col4='up'; END").execute_if(
                           callable_=lambda ddl, target, bind, **kw: bind.
                           engine.name not in ("oracle", "mssql", "sqlite")),
        ):
            event.listen(dt, "after_create", up)

        event.listen(dt, "before_drop", sa.DDL("DROP TRIGGER dt_up"))
Пример #34
0
    def define_tables(cls, metadata):
        Table(
            "users",
            metadata,
            Column("id",
                   Integer,
                   primary_key=True,
                   test_needs_autoincrement=True),
            Column("name", String(30), nullable=False),
            test_needs_acid=True,
            test_needs_fk=True,
        )

        Table(
            "addresses",
            metadata,
            Column("id",
                   Integer,
                   primary_key=True,
                   test_needs_autoincrement=True),
            Column("user_id", None, ForeignKey("users.id")),
            Column("email_address", String(50), nullable=False),
            test_needs_acid=True,
            test_needs_fk=True,
        )

        Table(
            "email_bounces",
            metadata,
            Column("id", Integer, ForeignKey("addresses.id")),
            Column("bounces", Integer),
        )

        Table(
            "orders",
            metadata,
            Column("id",
                   Integer,
                   primary_key=True,
                   test_needs_autoincrement=True),
            Column("user_id", None, ForeignKey("users.id")),
            Column("address_id", None, ForeignKey("addresses.id")),
            Column("description", String(30)),
            Column("isopen", Integer),
            test_needs_acid=True,
            test_needs_fk=True,
        )

        Table(
            "dingalings",
            metadata,
            Column("id",
                   Integer,
                   primary_key=True,
                   test_needs_autoincrement=True),
            Column("address_id", None, ForeignKey("addresses.id")),
            Column("data", String(30)),
            test_needs_acid=True,
            test_needs_fk=True,
        )

        Table(
            "items",
            metadata,
            Column("id",
                   Integer,
                   primary_key=True,
                   test_needs_autoincrement=True),
            Column("description", String(30), nullable=False),
            test_needs_acid=True,
            test_needs_fk=True,
        )

        Table(
            "order_items",
            metadata,
            Column("item_id", None, ForeignKey("items.id")),
            Column("order_id", None, ForeignKey("orders.id")),
            test_needs_acid=True,
            test_needs_fk=True,
        )

        Table(
            "keywords",
            metadata,
            Column("id",
                   Integer,
                   primary_key=True,
                   test_needs_autoincrement=True),
            Column("name", String(30), nullable=False),
            test_needs_acid=True,
            test_needs_fk=True,
        )

        Table(
            "item_keywords",
            metadata,
            Column("item_id", None, ForeignKey("items.id")),
            Column("keyword_id", None, ForeignKey("keywords.id")),
            test_needs_acid=True,
            test_needs_fk=True,
        )

        Table(
            "nodes",
            metadata,
            Column("id",
                   Integer,
                   primary_key=True,
                   test_needs_autoincrement=True),
            Column("parent_id", Integer, ForeignKey("nodes.id")),
            Column("data", String(30)),
            test_needs_acid=True,
            test_needs_fk=True,
        )

        Table(
            "composite_pk_table",
            metadata,
            Column("i", Integer, primary_key=True),
            Column("j", Integer, primary_key=True),
            Column("k", Integer, nullable=False),
        )
Пример #35
0
        def define_tables(cls, metadata):
            global ta, tb, tc
            ta = ["a", metadata]
            ta.append(
                Column(
                    "id",
                    Integer,
                    primary_key=True,
                    test_needs_autoincrement=True,
                )
            ),
            ta.append(Column("a_data", String(30)))
            if "a" == parent and direction == MANYTOONE:
                ta.append(
                    Column(
                        "child_id",
                        Integer,
                        ForeignKey(
                            "%s.id" % child, use_alter=True, name="foo"
                        ),
                    )
                )
            elif "a" == child and direction == ONETOMANY:
                ta.append(
                    Column(
                        "parent_id",
                        Integer,
                        ForeignKey(
                            "%s.id" % parent, use_alter=True, name="foo"
                        ),
                    )
                )
            ta = Table(*ta)

            tb = ["b", metadata]
            tb.append(
                Column("id", Integer, ForeignKey("a.id"), primary_key=True)
            )

            tb.append(Column("b_data", String(30)))

            if "b" == parent and direction == MANYTOONE:
                tb.append(
                    Column(
                        "child_id",
                        Integer,
                        ForeignKey(
                            "%s.id" % child, use_alter=True, name="foo"
                        ),
                    )
                )
            elif "b" == child and direction == ONETOMANY:
                tb.append(
                    Column(
                        "parent_id",
                        Integer,
                        ForeignKey(
                            "%s.id" % parent, use_alter=True, name="foo"
                        ),
                    )
                )
            tb = Table(*tb)

            tc = ["c", metadata]
            tc.append(
                Column("id", Integer, ForeignKey("b.id"), primary_key=True)
            )

            tc.append(Column("c_data", String(30)))

            if "c" == parent and direction == MANYTOONE:
                tc.append(
                    Column(
                        "child_id",
                        Integer,
                        ForeignKey(
                            "%s.id" % child, use_alter=True, name="foo"
                        ),
                    )
                )
            elif "c" == child and direction == ONETOMANY:
                tc.append(
                    Column(
                        "parent_id",
                        Integer,
                        ForeignKey(
                            "%s.id" % parent, use_alter=True, name="foo"
                        ),
                    )
                )
            tc = Table(*tc)
Пример #36
0
    def test_with_manytomany(self):
        metadata = MetaData(testing.db)

        table1 = Table("mytable", metadata,
            Column('col1', Integer, primary_key=True,
                                    test_needs_autoincrement=True),
            Column('col2', String(30))
            )

        table2 = Table("mytable2", metadata,
            Column('col1', Integer, primary_key=True,
                                    test_needs_autoincrement=True),
            Column('col2', String(30)),
            )

        table3 = Table('t1tot2', metadata,
            Column('t1', Integer, ForeignKey('mytable.col1')),
            Column('t2', Integer, ForeignKey('mytable2.col1')),
            )

        @profile_memory()
        def go():
            class A(fixtures.ComparableEntity):
                pass
            class B(fixtures.ComparableEntity):
                pass

            mapper(A, table1, properties={
                'bs':relationship(B, secondary=table3,
                                    backref='as', order_by=table3.c.t1)
            })
            mapper(B, table2)

            sess = create_session()
            a1 = A(col2='a1')
            a2 = A(col2='a2')
            b1 = B(col2='b1')
            b2 = B(col2='b2')
            a1.bs.append(b1)
            a2.bs.append(b2)
            for x in [a1,a2]:
                sess.add(x)
            sess.flush()
            sess.expunge_all()

            alist = sess.query(A).order_by(A.col1).all()
            eq_(
                [
                    A(bs=[B(col2='b1')]), A(bs=[B(col2='b2')])
                ],
                alist)

            for a in alist:
                sess.delete(a)
            sess.flush()

            # dont need to clear_mappers()
            del B
            del A

        metadata.create_all()
        try:
            go()
        finally:
            metadata.drop_all()
        assert_no_mappers()
Пример #37
0
    def test_orm_many_engines(self):
        metadata = MetaData(self.engine)

        table1 = Table(
            "mytable",
            metadata,
            Column(
                "col1",
                Integer,
                primary_key=True,
                test_needs_autoincrement=True,
            ),
            Column("col2", String(30)),
        )

        table2 = Table(
            "mytable2",
            metadata,
            Column(
                "col1",
                Integer,
                primary_key=True,
                test_needs_autoincrement=True,
            ),
            Column("col2", String(30)),
            Column("col3", Integer, ForeignKey("mytable.col1")),
        )

        metadata.create_all()

        m1 = mapper(
            A,
            table1,
            properties={
                "bs":
                relationship(B, cascade="all, delete", order_by=table2.c.col1)
            },
            _compiled_cache_size=50,
        )
        m2 = mapper(B, table2, _compiled_cache_size=50)

        @profile_memory()
        def go():
            engine = engines.testing_engine(
                options={
                    "logging_name": "FOO",
                    "pool_logging_name": "BAR",
                    "use_reaper": False,
                })
            with Session(engine) as sess:
                a1 = A(col2="a1")
                a2 = A(col2="a2")
                a3 = A(col2="a3")
                a1.bs.append(B(col2="b1"))
                a1.bs.append(B(col2="b2"))
                a3.bs.append(B(col2="b3"))
                for x in [a1, a2, a3]:
                    sess.add(x)
                sess.commit()

                alist = sess.query(A).order_by(A.col1).all()
                eq_(
                    [
                        A(col2="a1", bs=[B(col2="b1"),
                                         B(col2="b2")]),
                        A(col2="a2", bs=[]),
                        A(col2="a3", bs=[B(col2="b3")]),
                    ],
                    alist,
                )

                for a in alist:
                    sess.delete(a)
                sess.commit()

            engine.dispose()

        go()

        metadata.drop_all()
        del m1, m2
        assert_no_mappers()
Пример #38
0
class DDLEventTest(fixtures.TestBase):
    def setup(self):
        self.bind = engines.mock_engine()
        self.metadata = MetaData()
        self.table = Table("t", self.metadata, Column("id", Integer))

    def test_table_create_before(self):
        table, bind = self.table, self.bind
        canary = mock.Mock()
        event.listen(table, "before_create", canary.before_create)

        table.create(bind)
        table.drop(bind)
        eq_(
            canary.mock_calls,
            [
                mock.call.before_create(
                    table,
                    self.bind,
                    checkfirst=False,
                    _ddl_runner=mock.ANY,
                    _is_metadata_operation=mock.ANY,
                )
            ],
        )

    def test_table_create_after(self):
        table, bind = self.table, self.bind
        canary = mock.Mock()
        event.listen(table, "after_create", canary.after_create)

        table.create(bind)
        table.drop(bind)
        eq_(
            canary.mock_calls,
            [
                mock.call.after_create(
                    table,
                    self.bind,
                    checkfirst=False,
                    _ddl_runner=mock.ANY,
                    _is_metadata_operation=mock.ANY,
                )
            ],
        )

    def test_table_create_both(self):
        table, bind = self.table, self.bind
        canary = mock.Mock()
        event.listen(table, "before_create", canary.before_create)
        event.listen(table, "after_create", canary.after_create)

        table.create(bind)
        table.drop(bind)
        eq_(
            canary.mock_calls,
            [
                mock.call.before_create(
                    table,
                    self.bind,
                    checkfirst=False,
                    _ddl_runner=mock.ANY,
                    _is_metadata_operation=mock.ANY,
                ),
                mock.call.after_create(
                    table,
                    self.bind,
                    checkfirst=False,
                    _ddl_runner=mock.ANY,
                    _is_metadata_operation=mock.ANY,
                ),
            ],
        )

    def test_table_drop_before(self):
        table, bind = self.table, self.bind
        canary = mock.Mock()
        event.listen(table, "before_drop", canary.before_drop)

        table.create(bind)
        table.drop(bind)
        eq_(
            canary.mock_calls,
            [
                mock.call.before_drop(
                    table,
                    self.bind,
                    checkfirst=False,
                    _ddl_runner=mock.ANY,
                    _is_metadata_operation=mock.ANY,
                )
            ],
        )

    def test_table_drop_after(self):
        table, bind = self.table, self.bind
        canary = mock.Mock()
        event.listen(table, "after_drop", canary.after_drop)

        table.create(bind)
        canary.state = "skipped"
        table.drop(bind)
        eq_(
            canary.mock_calls,
            [
                mock.call.after_drop(
                    table,
                    self.bind,
                    checkfirst=False,
                    _ddl_runner=mock.ANY,
                    _is_metadata_operation=mock.ANY,
                )
            ],
        )

    def test_table_drop_both(self):
        table, bind = self.table, self.bind
        canary = mock.Mock()

        event.listen(table, "before_drop", canary.before_drop)
        event.listen(table, "after_drop", canary.after_drop)

        table.create(bind)
        table.drop(bind)
        eq_(
            canary.mock_calls,
            [
                mock.call.before_drop(
                    table,
                    self.bind,
                    checkfirst=False,
                    _ddl_runner=mock.ANY,
                    _is_metadata_operation=mock.ANY,
                ),
                mock.call.after_drop(
                    table,
                    self.bind,
                    checkfirst=False,
                    _ddl_runner=mock.ANY,
                    _is_metadata_operation=mock.ANY,
                ),
            ],
        )

    def test_table_all(self):
        table, bind = self.table, self.bind
        canary = mock.Mock()

        event.listen(table, "before_create", canary.before_create)
        event.listen(table, "after_create", canary.after_create)
        event.listen(table, "before_drop", canary.before_drop)
        event.listen(table, "after_drop", canary.after_drop)

        table.create(bind)
        table.drop(bind)
        eq_(
            canary.mock_calls,
            [
                mock.call.before_create(
                    table,
                    self.bind,
                    checkfirst=False,
                    _ddl_runner=mock.ANY,
                    _is_metadata_operation=mock.ANY,
                ),
                mock.call.after_create(
                    table,
                    self.bind,
                    checkfirst=False,
                    _ddl_runner=mock.ANY,
                    _is_metadata_operation=mock.ANY,
                ),
                mock.call.before_drop(
                    table,
                    self.bind,
                    checkfirst=False,
                    _ddl_runner=mock.ANY,
                    _is_metadata_operation=mock.ANY,
                ),
                mock.call.after_drop(
                    table,
                    self.bind,
                    checkfirst=False,
                    _ddl_runner=mock.ANY,
                    _is_metadata_operation=mock.ANY,
                ),
            ],
        )

    def test_metadata_create_before(self):
        metadata, bind = self.metadata, self.bind
        canary = mock.Mock()
        event.listen(metadata, "before_create", canary.before_create)

        metadata.create_all(bind)
        metadata.drop_all(bind)
        eq_(
            canary.mock_calls,
            [
                mock.call.before_create(
                    # checkfirst is False because of the MockConnection
                    # used in the current testing strategy.
                    metadata,
                    self.bind,
                    checkfirst=False,
                    tables=list(metadata.tables.values()),
                    _ddl_runner=mock.ANY,
                )
            ],
        )

    def test_metadata_create_after(self):
        metadata, bind = self.metadata, self.bind
        canary = mock.Mock()
        event.listen(metadata, "after_create", canary.after_create)

        metadata.create_all(bind)
        metadata.drop_all(bind)
        eq_(
            canary.mock_calls,
            [
                mock.call.after_create(
                    metadata,
                    self.bind,
                    checkfirst=False,
                    tables=list(metadata.tables.values()),
                    _ddl_runner=mock.ANY,
                )
            ],
        )

    def test_metadata_create_both(self):
        metadata, bind = self.metadata, self.bind
        canary = mock.Mock()

        event.listen(metadata, "before_create", canary.before_create)
        event.listen(metadata, "after_create", canary.after_create)

        metadata.create_all(bind)
        metadata.drop_all(bind)
        eq_(
            canary.mock_calls,
            [
                mock.call.before_create(
                    metadata,
                    self.bind,
                    checkfirst=False,
                    tables=list(metadata.tables.values()),
                    _ddl_runner=mock.ANY,
                ),
                mock.call.after_create(
                    metadata,
                    self.bind,
                    checkfirst=False,
                    tables=list(metadata.tables.values()),
                    _ddl_runner=mock.ANY,
                ),
            ],
        )

    def test_metadata_drop_before(self):
        metadata, bind = self.metadata, self.bind
        canary = mock.Mock()
        event.listen(metadata, "before_drop", canary.before_drop)

        metadata.create_all(bind)
        metadata.drop_all(bind)
        eq_(
            canary.mock_calls,
            [
                mock.call.before_drop(
                    metadata,
                    self.bind,
                    checkfirst=False,
                    tables=list(metadata.tables.values()),
                    _ddl_runner=mock.ANY,
                )
            ],
        )

    def test_metadata_drop_after(self):
        metadata, bind = self.metadata, self.bind
        canary = mock.Mock()
        event.listen(metadata, "after_drop", canary.after_drop)

        metadata.create_all(bind)
        metadata.drop_all(bind)
        eq_(
            canary.mock_calls,
            [
                mock.call.after_drop(
                    metadata,
                    self.bind,
                    checkfirst=False,
                    tables=list(metadata.tables.values()),
                    _ddl_runner=mock.ANY,
                )
            ],
        )

    def test_metadata_drop_both(self):
        metadata, bind = self.metadata, self.bind
        canary = mock.Mock()

        event.listen(metadata, "before_drop", canary.before_drop)
        event.listen(metadata, "after_drop", canary.after_drop)

        metadata.create_all(bind)
        metadata.drop_all(bind)
        eq_(
            canary.mock_calls,
            [
                mock.call.before_drop(
                    metadata,
                    self.bind,
                    checkfirst=False,
                    tables=list(metadata.tables.values()),
                    _ddl_runner=mock.ANY,
                ),
                mock.call.after_drop(
                    metadata,
                    self.bind,
                    checkfirst=False,
                    tables=list(metadata.tables.values()),
                    _ddl_runner=mock.ANY,
                ),
            ],
        )

    def test_metadata_table_isolation(self):
        metadata, table = self.metadata, self.table
        table_canary = mock.Mock()
        metadata_canary = mock.Mock()

        event.listen(table, "before_create", table_canary.before_create)

        event.listen(metadata, "before_create", metadata_canary.before_create)
        self.table.create(self.bind)
        eq_(
            table_canary.mock_calls,
            [
                mock.call.before_create(
                    table,
                    self.bind,
                    checkfirst=False,
                    _ddl_runner=mock.ANY,
                    _is_metadata_operation=mock.ANY,
                )
            ],
        )
        eq_(metadata_canary.mock_calls, [])
Пример #39
0
 def setup(self):
     self.bind = engines.mock_engine()
     self.metadata = MetaData()
     self.table = Table("t", self.metadata, Column("id", Integer))
Пример #40
0
    def test_numeric_bind_in_crud(self):
        t = Table("asfd", self.metadata, Column("100K", Integer))
        t.create()

        testing.db.execute(t.insert(), {"100K": 10})
        eq_(testing.db.scalar(t.select()), 10)
Пример #41
0
    def define_tables(cls, metadata):
        Table(
            "prj",
            metadata,
            Column(
                "id", Integer, primary_key=True, test_needs_autoincrement=True
            ),
            Column("created", sa.DateTime),
            Column("title", sa.String(100)),
        )

        Table(
            "task",
            metadata,
            Column(
                "id", Integer, primary_key=True, test_needs_autoincrement=True
            ),
            Column(
                "status_id",
                Integer,
                ForeignKey("task_status.id"),
                nullable=False,
            ),
            Column("title", sa.String(100)),
            Column(
                "task_type_id",
                Integer,
                ForeignKey("task_type.id"),
                nullable=False,
            ),
            Column("prj_id", Integer, ForeignKey("prj.id"), nullable=False),
        )

        Table(
            "task_status",
            metadata,
            Column(
                "id", Integer, primary_key=True, test_needs_autoincrement=True
            ),
        )

        Table(
            "task_type",
            metadata,
            Column(
                "id", Integer, primary_key=True, test_needs_autoincrement=True
            ),
        )

        Table(
            "msg",
            metadata,
            Column(
                "id", Integer, primary_key=True, test_needs_autoincrement=True
            ),
            Column("posted", sa.DateTime, index=True),
            Column("type_id", Integer, ForeignKey("msg_type.id")),
            Column("task_id", Integer, ForeignKey("task.id")),
        )

        Table(
            "msg_type",
            metadata,
            Column(
                "id", Integer, primary_key=True, test_needs_autoincrement=True
            ),
            Column("name", sa.String(20)),
            Column("display_name", sa.String(20)),
        )
Пример #42
0
 def test_literal_binds_w_quotes(self):
     m = MetaData()
     t = Table("t", m, Column("x", Integer,
                              server_default=literal("5 ' 8")))
     self.assert_compile(CreateTable(t),
                         """CREATE TABLE t (x INTEGER DEFAULT '5 '' 8')""")
Пример #43
0
    def test_setinputsizes(self, metadata, datatype, value, sis_value_text,
                           set_nchar_flag):
        if isinstance(sis_value_text, str):
            sis_value = getattr(testing.db.dialect.dbapi, sis_value_text)
        else:
            sis_value = sis_value_text

        class TestTypeDec(TypeDecorator):
            impl = NullType()
            cache_ok = True

            def load_dialect_impl(self, dialect):
                if dialect.name == "oracle":
                    return dialect.type_descriptor(datatype)
                else:
                    return self.impl

        m = metadata
        # Oracle can have only one column of type LONG so we make three
        # tables rather than one table w/ three columns
        t1 = Table("t1", m, Column("foo", datatype))
        t2 = Table("t2", m,
                   Column("foo",
                          NullType().with_variant(datatype, "oracle")))
        t3 = Table("t3", m, Column("foo", TestTypeDec()))

        class CursorWrapper:
            # cx_oracle cursor can't be modified so we have to
            # invent a whole wrapping scheme

            def __init__(self, connection_fairy):
                self.cursor = connection_fairy.connection.cursor()
                self.mock = mock.Mock()
                connection_fairy.info["mock"] = self.mock

            def setinputsizes(self, *arg, **kw):
                self.mock.setinputsizes(*arg, **kw)
                self.cursor.setinputsizes(*arg, **kw)

            def __getattr__(self, key):
                return getattr(self.cursor, key)

        if set_nchar_flag:
            engine = testing_engine(options={"use_nchar_for_unicode": True})
        else:
            engine = testing.db

        with engine.connect() as conn:
            conn.begin()

            m.create_all(conn, checkfirst=False)

            connection_fairy = conn.connection
            for tab in [t1, t2, t3]:
                with mock.patch.object(
                        connection_fairy,
                        "cursor",
                        lambda: CursorWrapper(connection_fairy),
                ):
                    conn.execute(tab.insert(), {"foo": value})

                if sis_value:
                    eq_(
                        conn.info["mock"].mock_calls,
                        [mock.call.setinputsizes(foo=sis_value)],
                    )
                else:
                    eq_(
                        conn.info["mock"].mock_calls,
                        [mock.call.setinputsizes()],
                    )
Пример #44
0
    def test_with_inheritance(self):
        metadata = MetaData()

        table1 = Table(
            "mytable",
            metadata,
            Column(
                "col1",
                Integer,
                primary_key=True,
                test_needs_autoincrement=True,
            ),
            Column("col2", String(30)),
        )

        table2 = Table(
            "mytable2",
            metadata,
            Column(
                "col1",
                Integer,
                ForeignKey("mytable.col1"),
                primary_key=True,
                test_needs_autoincrement=True,
            ),
            Column("col3", String(30)),
        )

        @profile_memory()
        def go():
            class A(fixtures.ComparableEntity):
                pass

            class B(A):
                pass

            mapper(
                A,
                table1,
                polymorphic_on=table1.c.col2,
                polymorphic_identity="a",
            )
            mapper(B, table2, inherits=A, polymorphic_identity="b")

            sess = create_session(self.engine)
            a1 = A()
            a2 = A()
            b1 = B(col3="b1")
            b2 = B(col3="b2")
            for x in [a1, a2, b1, b2]:
                sess.add(x)
            sess.flush()
            sess.expunge_all()

            alist = sess.query(A).order_by(A.col1).all()
            eq_([A(), A(), B(col3="b1"), B(col3="b2")], alist)

            for a in alist:
                sess.delete(a)
            sess.flush()

            # don't need to clear_mappers()
            del B
            del A

        metadata.create_all(self.engine)
        try:
            go()
        finally:
            metadata.drop_all(self.engine)
        assert_no_mappers()
Пример #45
0
    def test_mapper_reset(self):
        metadata = MetaData()

        table1 = Table(
            "mytable",
            metadata,
            Column(
                "col1",
                Integer,
                primary_key=True,
                test_needs_autoincrement=True,
            ),
            Column("col2", String(30)),
        )

        table2 = Table(
            "mytable2",
            metadata,
            Column(
                "col1",
                Integer,
                primary_key=True,
                test_needs_autoincrement=True,
            ),
            Column("col2", String(30)),
            Column("col3", Integer, ForeignKey("mytable.col1")),
        )

        @profile_memory()
        def go():
            mapper(
                A,
                table1,
                properties={"bs": relationship(B, order_by=table2.c.col1)},
            )
            mapper(B, table2)

            sess = create_session(self.engine)
            a1 = A(col2="a1")
            a2 = A(col2="a2")
            a3 = A(col2="a3")
            a1.bs.append(B(col2="b1"))
            a1.bs.append(B(col2="b2"))
            a3.bs.append(B(col2="b3"))
            for x in [a1, a2, a3]:
                sess.add(x)
            sess.flush()
            sess.expunge_all()

            alist = sess.query(A).order_by(A.col1).all()
            eq_(
                [
                    A(col2="a1", bs=[B(col2="b1"), B(col2="b2")]),
                    A(col2="a2", bs=[]),
                    A(col2="a3", bs=[B(col2="b3")]),
                ],
                alist,
            )

            for a in alist:
                sess.delete(a)
            sess.flush()
            sess.close()
            clear_mappers()

        metadata.create_all(self.engine)
        try:
            go()
        finally:
            metadata.drop_all(self.engine)
        assert_no_mappers()
Пример #46
0
    def test_numeric_bind_in_crud(self, metadata, connection):
        t = Table("asfd", metadata, Column("100K", Integer))
        t.create(connection)

        connection.execute(t.insert(), {"100K": 10})
        eq_(connection.scalar(t.select()), 10)
Пример #47
0
    def test_session(self):
        metadata = MetaData()

        table1 = Table(
            "mytable",
            metadata,
            Column(
                "col1",
                Integer,
                primary_key=True,
                test_needs_autoincrement=True,
            ),
            Column("col2", String(30)),
        )

        table2 = Table(
            "mytable2",
            metadata,
            Column(
                "col1",
                Integer,
                primary_key=True,
                test_needs_autoincrement=True,
            ),
            Column("col2", String(30)),
            Column("col3", Integer, ForeignKey("mytable.col1")),
        )

        metadata.create_all(self.engine)

        m1 = mapper(
            A,
            table1,
            properties={
                "bs":
                relationship(B, cascade="all, delete", order_by=table2.c.col1)
            },
        )
        m2 = mapper(B, table2)

        @profile_memory()
        def go():
            with Session(self.engine) as sess:
                a1 = A(col2="a1")
                a2 = A(col2="a2")
                a3 = A(col2="a3")
                a1.bs.append(B(col2="b1"))
                a1.bs.append(B(col2="b2"))
                a3.bs.append(B(col2="b3"))
                for x in [a1, a2, a3]:
                    sess.add(x)
                sess.commit()

                alist = sess.query(A).order_by(A.col1).all()
                eq_(
                    [
                        A(col2="a1", bs=[B(col2="b1"),
                                         B(col2="b2")]),
                        A(col2="a2", bs=[]),
                        A(col2="a3", bs=[B(col2="b3")]),
                    ],
                    alist,
                )

                for a in alist:
                    sess.delete(a)
                sess.commit()

        go()

        metadata.drop_all(self.engine)
        del m1, m2
        assert_no_mappers()
Пример #48
0
    def test_explicit(self):
        engineers = Table(
            "engineers",
            Base.metadata,
            Column("id",
                   Integer,
                   primary_key=True,
                   test_needs_autoincrement=True),
            Column("name", String(50)),
            Column("primary_language", String(50)),
        )
        managers = Table(
            "managers",
            Base.metadata,
            Column("id",
                   Integer,
                   primary_key=True,
                   test_needs_autoincrement=True),
            Column("name", String(50)),
            Column("golf_swing", String(50)),
        )
        boss = Table(
            "boss",
            Base.metadata,
            Column("id",
                   Integer,
                   primary_key=True,
                   test_needs_autoincrement=True),
            Column("name", String(50)),
            Column("golf_swing", String(50)),
        )
        punion = polymorphic_union(
            {
                "engineer": engineers,
                "manager": managers,
                "boss": boss
            },
            "type",
            "punion",
        )

        class Employee(Base, fixtures.ComparableEntity):

            __table__ = punion
            __mapper_args__ = {"polymorphic_on": punion.c.type}

        class Engineer(Employee):

            __table__ = engineers
            __mapper_args__ = {
                "polymorphic_identity": "engineer",
                "concrete": True,
            }

        class Manager(Employee):

            __table__ = managers
            __mapper_args__ = {
                "polymorphic_identity": "manager",
                "concrete": True,
            }

        class Boss(Manager):
            __table__ = boss
            __mapper_args__ = {
                "polymorphic_identity": "boss",
                "concrete": True,
            }

        self._roundtrip(Employee, Manager, Engineer, Boss)
Пример #49
0
    def test_orm_many_engines(self):
        metadata = MetaData(testing.db)

        table1 = Table("mytable", metadata,
            Column('col1', Integer, primary_key=True,
                            test_needs_autoincrement=True),
            Column('col2', String(30)))

        table2 = Table("mytable2", metadata,
            Column('col1', Integer, primary_key=True,
                            test_needs_autoincrement=True),
            Column('col2', String(30)),
            Column('col3', Integer, ForeignKey("mytable.col1")))

        metadata.create_all()

        m1 = mapper(A, table1, properties={
            "bs":relationship(B, cascade="all, delete",
                                    order_by=table2.c.col1)},
            order_by=table1.c.col1,
            _compiled_cache_size=10
            )
        m2 = mapper(B, table2,
            _compiled_cache_size=10
        )

        m3 = mapper(A, table1, non_primary=True)

        @profile_memory()
        def go():
            engine = engines.testing_engine(
                                options={'logging_name':'FOO',
                                        'pool_logging_name':'BAR',
                                        'use_reaper':False}
                                    )
            sess = create_session(bind=engine)

            a1 = A(col2="a1")
            a2 = A(col2="a2")
            a3 = A(col2="a3")
            a1.bs.append(B(col2="b1"))
            a1.bs.append(B(col2="b2"))
            a3.bs.append(B(col2="b3"))
            for x in [a1,a2,a3]:
                sess.add(x)
            sess.flush()
            sess.expunge_all()

            alist = sess.query(A).all()
            eq_(
                [
                    A(col2="a1", bs=[B(col2="b1"), B(col2="b2")]),
                    A(col2="a2", bs=[]),
                    A(col2="a3", bs=[B(col2="b3")])
                ],
                alist)

            for a in alist:
                sess.delete(a)
            sess.flush()
            sess.close()
            engine.dispose()
        go()

        metadata.drop_all()
        del m1, m2, m3
        assert_no_mappers()
Пример #50
0
    def define_tables(cls, metadata):
        global products_table, specification_table, documents_table
        global Product, Detail, Assembly, SpecLine, Document, RasterDocument

        products_table = Table(
            "products",
            metadata,
            Column(
                "product_id",
                Integer,
                primary_key=True,
                test_needs_autoincrement=True,
            ),
            Column("product_type", String(128)),
            Column("name", String(128)),
            Column("mark", String(128)),
        )

        specification_table = Table(
            "specification",
            metadata,
            Column(
                "spec_line_id",
                Integer,
                primary_key=True,
                test_needs_autoincrement=True,
            ),
            Column(
                "leader_id",
                Integer,
                ForeignKey("products.product_id"),
                nullable=True,
            ),
            Column(
                "follower_id",
                Integer,
                ForeignKey("products.product_id"),
                nullable=True,
            ),
            Column("quantity", Float, default=1.0),
        )

        documents_table = Table(
            "documents",
            metadata,
            Column(
                "document_id",
                Integer,
                primary_key=True,
                test_needs_autoincrement=True,
            ),
            Column("document_type", String(128)),
            Column("product_id", Integer, ForeignKey("products.product_id")),
            Column("create_date", DateTime, default=lambda: datetime.now()),
            Column(
                "last_updated",
                DateTime,
                default=lambda: datetime.now(),
                onupdate=lambda: datetime.now(),
            ),
            Column("name", String(128)),
            Column("data", LargeBinary),
            Column("size", Integer, default=0),
        )

        class Product(object):
            def __init__(self, name, mark=""):
                self.name = name
                self.mark = mark

            def __repr__(self):
                return "<%s %s>" % (self.__class__.__name__, self.name)

        class Detail(Product):
            def __init__(self, name):
                self.name = name

        class Assembly(Product):
            def __repr__(self):
                return (
                    Product.__repr__(self)
                    + " "
                    + " ".join(
                        [
                            x + "=" + repr(getattr(self, x, None))
                            for x in ["specification", "documents"]
                        ]
                    )
                )

        class SpecLine(object):
            def __init__(self, leader=None, follower=None, quantity=1):
                self.leader = leader
                self.follower = follower
                self.quantity = quantity

            def __repr__(self):
                return "<%s %.01f %s>" % (
                    self.__class__.__name__,
                    self.quantity or 0.0,
                    repr(self.follower),
                )

        class Document(object):
            def __init__(self, name, data=None):
                self.name = name
                self.data = data

            def __repr__(self):
                return "<%s %s>" % (self.__class__.__name__, self.name)

        class RasterDocument(Document):
            pass
Пример #51
0
    def define_tables(cls, metadata):
        dt = Table(
            "dt",
            metadata,
            Column(
                "id", Integer, primary_key=True, test_needs_autoincrement=True
            ),
            Column("col1", String(20)),
            Column(
                "col2", String(20), server_default=sa.schema.FetchedValue()
            ),
            Column(
                "col3", String(20), sa.schema.FetchedValue(for_update=True)
            ),
            Column(
                "col4",
                String(20),
                sa.schema.FetchedValue(),
                sa.schema.FetchedValue(for_update=True),
            ),
        )

        dialect_name = testing.db.dialect.name

        for ins in (
            sa.DDL(
                "CREATE TRIGGER dt_ins AFTER INSERT ON dt "
                "FOR EACH ROW BEGIN "
                "UPDATE dt SET col2='ins', col4='ins' "
                "WHERE dt.id = NEW.id; END"
            ).execute_if(dialect="sqlite"),
            sa.DDL(
                "CREATE TRIGGER dt_ins ON dt AFTER INSERT AS "
                "UPDATE dt SET col2='ins', col4='ins' "
                "WHERE dt.id IN (SELECT id FROM inserted);"
            ).execute_if(dialect="mssql"),
            sa.DDL(
                "CREATE TRIGGER dt_ins BEFORE INSERT "
                "ON dt "
                "FOR EACH ROW "
                "BEGIN "
                ":NEW.col2 := 'ins'; :NEW.col4 := 'ins'; END;"
            ).execute_if(dialect="oracle"),
            sa.DDL(
                "CREATE TRIGGER dt_ins BEFORE INSERT "
                "ON dt "
                "FOR EACH ROW "
                "EXECUTE PROCEDURE my_func_ins();"
            ).execute_if(dialect="postgresql"),
            sa.DDL(
                "CREATE TRIGGER dt_ins BEFORE INSERT ON dt "
                "FOR EACH ROW BEGIN "
                "SET NEW.col2='ins'; SET NEW.col4='ins'; END"
            ).execute_if(
                callable_=lambda ddl, target, bind, **kw: bind.engine.name
                not in ("oracle", "mssql", "sqlite", "postgresql")
            ),
        ):
            my_func_ins = sa.DDL(
                "CREATE OR REPLACE FUNCTION my_func_ins() "
                "RETURNS TRIGGER AS $$ "
                "BEGIN "
                "NEW.col2 := 'ins'; NEW.col4 := 'ins'; "
                "RETURN NEW; "
                "END; $$ LANGUAGE PLPGSQL"
            ).execute_if(dialect="postgresql")
            event.listen(dt, "after_create", my_func_ins)

            event.listen(dt, "after_create", ins)
        if dialect_name == "postgresql":
            event.listen(
                dt, "before_drop", sa.DDL("DROP TRIGGER dt_ins ON dt")
            )
        else:
            event.listen(dt, "before_drop", sa.DDL("DROP TRIGGER dt_ins"))

        for up in (
            sa.DDL(
                "CREATE TRIGGER dt_up AFTER UPDATE ON dt "
                "FOR EACH ROW BEGIN "
                "UPDATE dt SET col3='up', col4='up' "
                "WHERE dt.id = OLD.id; END"
            ).execute_if(dialect="sqlite"),
            sa.DDL(
                "CREATE TRIGGER dt_up ON dt AFTER UPDATE AS "
                "UPDATE dt SET col3='up', col4='up' "
                "WHERE dt.id IN (SELECT id FROM deleted);"
            ).execute_if(dialect="mssql"),
            sa.DDL(
                "CREATE TRIGGER dt_up BEFORE UPDATE ON dt "
                "FOR EACH ROW BEGIN "
                ":NEW.col3 := 'up'; :NEW.col4 := 'up'; END;"
            ).execute_if(dialect="oracle"),
            sa.DDL(
                "CREATE TRIGGER dt_up BEFORE UPDATE ON dt "
                "FOR EACH ROW "
                "EXECUTE PROCEDURE my_func_up();"
            ).execute_if(dialect="postgresql"),
            sa.DDL(
                "CREATE TRIGGER dt_up BEFORE UPDATE ON dt "
                "FOR EACH ROW BEGIN "
                "SET NEW.col3='up'; SET NEW.col4='up'; END"
            ).execute_if(
                callable_=lambda ddl, target, bind, **kw: bind.engine.name
                not in ("oracle", "mssql", "sqlite", "postgresql")
            ),
        ):
            my_func_up = sa.DDL(
                "CREATE OR REPLACE FUNCTION my_func_up() "
                "RETURNS TRIGGER AS $$ "
                "BEGIN "
                "NEW.col3 := 'up'; NEW.col4 := 'up'; "
                "RETURN NEW; "
                "END; $$ LANGUAGE PLPGSQL"
            ).execute_if(dialect="postgresql")
            event.listen(dt, "after_create", my_func_up)

            event.listen(dt, "after_create", up)

        if dialect_name == "postgresql":
            event.listen(dt, "before_drop", sa.DDL("DROP TRIGGER dt_up ON dt"))
        else:
            event.listen(dt, "before_drop", sa.DDL("DROP TRIGGER dt_up"))
Пример #52
0
 def define_tables(cls, metadata):
     global managers_table, engineers_table, hackers_table
     global companies, employees_table
     companies = Table(
         "companies",
         metadata,
         Column("id",
                Integer,
                primary_key=True,
                test_needs_autoincrement=True),
         Column("name", String(50)),
     )
     employees_table = Table(
         "employees",
         metadata,
         Column(
             "employee_id",
             Integer,
             primary_key=True,
             test_needs_autoincrement=True,
         ),
         Column("name", String(50)),
         Column("company_id", Integer, ForeignKey("companies.id")),
     )
     managers_table = Table(
         "managers",
         metadata,
         Column(
             "employee_id",
             Integer,
             primary_key=True,
             test_needs_autoincrement=True,
         ),
         Column("name", String(50)),
         Column("manager_data", String(50)),
         Column("company_id", Integer, ForeignKey("companies.id")),
     )
     engineers_table = Table(
         "engineers",
         metadata,
         Column(
             "employee_id",
             Integer,
             primary_key=True,
             test_needs_autoincrement=True,
         ),
         Column("name", String(50)),
         Column("engineer_info", String(50)),
         Column("company_id", Integer, ForeignKey("companies.id")),
     )
     hackers_table = Table(
         "hackers",
         metadata,
         Column(
             "employee_id",
             Integer,
             primary_key=True,
             test_needs_autoincrement=True,
         ),
         Column("name", String(50)),
         Column("engineer_info", String(50)),
         Column("company_id", Integer, ForeignKey("companies.id")),
         Column("nickname", String(50)),
     )
Пример #53
0
 def setup(self):
     self.bind = engines.mock_engine()
     self.metadata = MetaData()
     self.table = Table('t', self.metadata, Column('id', Integer))
Пример #54
0
class DDLEventTest(fixtures.TestBase):
    def setup(self):
        self.bind = engines.mock_engine()
        self.metadata = MetaData()
        self.table = Table('t', self.metadata, Column('id', Integer))

    def test_table_create_before(self):
        table, bind = self.table, self.bind
        canary = mock.Mock()
        event.listen(table, 'before_create', canary.before_create)

        table.create(bind)
        table.drop(bind)
        eq_(canary.mock_calls, [
            mock.call.before_create(table,
                                    self.bind,
                                    checkfirst=False,
                                    _ddl_runner=mock.ANY,
                                    _is_metadata_operation=mock.ANY)
        ])

    def test_table_create_after(self):
        table, bind = self.table, self.bind
        canary = mock.Mock()
        event.listen(table, 'after_create', canary.after_create)

        table.create(bind)
        table.drop(bind)
        eq_(canary.mock_calls, [
            mock.call.after_create(table,
                                   self.bind,
                                   checkfirst=False,
                                   _ddl_runner=mock.ANY,
                                   _is_metadata_operation=mock.ANY)
        ])

    def test_table_create_both(self):
        table, bind = self.table, self.bind
        canary = mock.Mock()
        event.listen(table, 'before_create', canary.before_create)
        event.listen(table, 'after_create', canary.after_create)

        table.create(bind)
        table.drop(bind)
        eq_(canary.mock_calls, [
            mock.call.before_create(table,
                                    self.bind,
                                    checkfirst=False,
                                    _ddl_runner=mock.ANY,
                                    _is_metadata_operation=mock.ANY),
            mock.call.after_create(table,
                                   self.bind,
                                   checkfirst=False,
                                   _ddl_runner=mock.ANY,
                                   _is_metadata_operation=mock.ANY)
        ])

    def test_table_drop_before(self):
        table, bind = self.table, self.bind
        canary = mock.Mock()
        event.listen(table, 'before_drop', canary.before_drop)

        table.create(bind)
        table.drop(bind)
        eq_(canary.mock_calls, [
            mock.call.before_drop(table,
                                  self.bind,
                                  checkfirst=False,
                                  _ddl_runner=mock.ANY,
                                  _is_metadata_operation=mock.ANY),
        ])

    def test_table_drop_after(self):
        table, bind = self.table, self.bind
        canary = mock.Mock()
        event.listen(table, 'after_drop', canary.after_drop)

        table.create(bind)
        canary.state = 'skipped'
        table.drop(bind)
        eq_(canary.mock_calls, [
            mock.call.after_drop(table,
                                 self.bind,
                                 checkfirst=False,
                                 _ddl_runner=mock.ANY,
                                 _is_metadata_operation=mock.ANY),
        ])

    def test_table_drop_both(self):
        table, bind = self.table, self.bind
        canary = mock.Mock()

        event.listen(table, 'before_drop', canary.before_drop)
        event.listen(table, 'after_drop', canary.after_drop)

        table.create(bind)
        table.drop(bind)
        eq_(canary.mock_calls, [
            mock.call.before_drop(table,
                                  self.bind,
                                  checkfirst=False,
                                  _ddl_runner=mock.ANY,
                                  _is_metadata_operation=mock.ANY),
            mock.call.after_drop(table,
                                 self.bind,
                                 checkfirst=False,
                                 _ddl_runner=mock.ANY,
                                 _is_metadata_operation=mock.ANY),
        ])

    def test_table_all(self):
        table, bind = self.table, self.bind
        canary = mock.Mock()

        event.listen(table, 'before_create', canary.before_create)
        event.listen(table, 'after_create', canary.after_create)
        event.listen(table, 'before_drop', canary.before_drop)
        event.listen(table, 'after_drop', canary.after_drop)

        table.create(bind)
        table.drop(bind)
        eq_(canary.mock_calls, [
            mock.call.before_create(table,
                                    self.bind,
                                    checkfirst=False,
                                    _ddl_runner=mock.ANY,
                                    _is_metadata_operation=mock.ANY),
            mock.call.after_create(table,
                                   self.bind,
                                   checkfirst=False,
                                   _ddl_runner=mock.ANY,
                                   _is_metadata_operation=mock.ANY),
            mock.call.before_drop(table,
                                  self.bind,
                                  checkfirst=False,
                                  _ddl_runner=mock.ANY,
                                  _is_metadata_operation=mock.ANY),
            mock.call.after_drop(table,
                                 self.bind,
                                 checkfirst=False,
                                 _ddl_runner=mock.ANY,
                                 _is_metadata_operation=mock.ANY),
        ])

    def test_metadata_create_before(self):
        metadata, bind = self.metadata, self.bind
        canary = mock.Mock()
        event.listen(metadata, 'before_create', canary.before_create)

        metadata.create_all(bind)
        metadata.drop_all(bind)
        eq_(
            canary.mock_calls,
            [
                mock.call.before_create(
                    # checkfirst is False because of the MockConnection
                    # used in the current testing strategy.
                    metadata,
                    self.bind,
                    checkfirst=False,
                    tables=list(metadata.tables.values()),
                    _ddl_runner=mock.ANY),
            ])

    def test_metadata_create_after(self):
        metadata, bind = self.metadata, self.bind
        canary = mock.Mock()
        event.listen(metadata, 'after_create', canary.after_create)

        metadata.create_all(bind)
        metadata.drop_all(bind)
        eq_(canary.mock_calls, [
            mock.call.after_create(metadata,
                                   self.bind,
                                   checkfirst=False,
                                   tables=list(metadata.tables.values()),
                                   _ddl_runner=mock.ANY),
        ])

    def test_metadata_create_both(self):
        metadata, bind = self.metadata, self.bind
        canary = mock.Mock()

        event.listen(metadata, 'before_create', canary.before_create)
        event.listen(metadata, 'after_create', canary.after_create)

        metadata.create_all(bind)
        metadata.drop_all(bind)
        eq_(canary.mock_calls, [
            mock.call.before_create(metadata,
                                    self.bind,
                                    checkfirst=False,
                                    tables=list(metadata.tables.values()),
                                    _ddl_runner=mock.ANY),
            mock.call.after_create(metadata,
                                   self.bind,
                                   checkfirst=False,
                                   tables=list(metadata.tables.values()),
                                   _ddl_runner=mock.ANY),
        ])

    def test_metadata_drop_before(self):
        metadata, bind = self.metadata, self.bind
        canary = mock.Mock()
        event.listen(metadata, 'before_drop', canary.before_drop)

        metadata.create_all(bind)
        metadata.drop_all(bind)
        eq_(canary.mock_calls, [
            mock.call.before_drop(metadata,
                                  self.bind,
                                  checkfirst=False,
                                  tables=list(metadata.tables.values()),
                                  _ddl_runner=mock.ANY),
        ])

    def test_metadata_drop_after(self):
        metadata, bind = self.metadata, self.bind
        canary = mock.Mock()
        event.listen(metadata, 'after_drop', canary.after_drop)

        metadata.create_all(bind)
        metadata.drop_all(bind)
        eq_(canary.mock_calls, [
            mock.call.after_drop(metadata,
                                 self.bind,
                                 checkfirst=False,
                                 tables=list(metadata.tables.values()),
                                 _ddl_runner=mock.ANY),
        ])

    def test_metadata_drop_both(self):
        metadata, bind = self.metadata, self.bind
        canary = mock.Mock()

        event.listen(metadata, 'before_drop', canary.before_drop)
        event.listen(metadata, 'after_drop', canary.after_drop)

        metadata.create_all(bind)
        metadata.drop_all(bind)
        eq_(canary.mock_calls, [
            mock.call.before_drop(metadata,
                                  self.bind,
                                  checkfirst=False,
                                  tables=list(metadata.tables.values()),
                                  _ddl_runner=mock.ANY),
            mock.call.after_drop(metadata,
                                 self.bind,
                                 checkfirst=False,
                                 tables=list(metadata.tables.values()),
                                 _ddl_runner=mock.ANY),
        ])

    def test_metadata_table_isolation(self):
        metadata, table = self.metadata, self.table
        table_canary = mock.Mock()
        metadata_canary = mock.Mock()

        event.listen(table, 'before_create', table_canary.before_create)

        event.listen(metadata, 'before_create', metadata_canary.before_create)
        self.table.create(self.bind)
        eq_(table_canary.mock_calls, [
            mock.call.before_create(table,
                                    self.bind,
                                    checkfirst=False,
                                    _ddl_runner=mock.ANY,
                                    _is_metadata_operation=mock.ANY),
        ])
        eq_(metadata_canary.mock_calls, [])

    def test_append_listener(self):
        metadata, table, bind = self.metadata, self.table, self.bind

        fn = lambda *a: None

        table.append_ddl_listener('before-create', fn)
        assert_raises(exc.InvalidRequestError, table.append_ddl_listener,
                      'blah', fn)

        metadata.append_ddl_listener('before-create', fn)
        assert_raises(exc.InvalidRequestError, metadata.append_ddl_listener,
                      'blah', fn)
Пример #55
0
class ReconnectRecipeTest(fixtures.TestBase):
    """Test for the reconnect recipe given at doc/build/faq/connections.rst.

    Make sure the above document is updated if changes are made here.

    """

    # this recipe works on PostgreSQL also but only if the connection
    # is cut off from the server side, otherwise the connection.cursor()
    # method rightly fails because we explicitly closed the connection.
    # since we don't have a fixture
    # that can do this we currently rely on the MySQL drivers that allow
    # us to call cursor() even when the connection were closed.   In order
    # to get a real "cut the server off" kind of fixture we'd need to do
    # something in provisioning that seeks out the TCP connection at the
    # OS level and kills it.
    __only_on__ = ("mysql+mysqldb", "mysql+pymysql")

    future = False

    def make_engine(self, engine):
        num_retries = 3
        retry_interval = 0.5

        def _run_with_retries(fn, context, cursor, statement, *arg, **kw):
            for retry in range(num_retries + 1):
                try:
                    fn(cursor, statement, context=context, *arg)
                except engine.dialect.dbapi.Error as raw_dbapi_err:
                    connection = context.root_connection
                    if engine.dialect.is_disconnect(
                        raw_dbapi_err, connection, cursor
                    ):
                        if retry > num_retries:
                            raise
                        engine.logger.error(
                            "disconnection error, retrying operation",
                            exc_info=True,
                        )
                        connection.invalidate()

                        if self.future:
                            connection.rollback()
                        else:
                            trans = connection.get_transaction()
                            if trans:
                                trans.rollback()

                        time.sleep(retry_interval)
                        context.cursor = (
                            cursor
                        ) = connection.connection.cursor()
                    else:
                        raise
                else:
                    return True

        e = engine.execution_options(isolation_level="AUTOCOMMIT")

        @event.listens_for(e, "do_execute_no_params")
        def do_execute_no_params(cursor, statement, context):
            return _run_with_retries(
                context.dialect.do_execute_no_params,
                context,
                cursor,
                statement,
            )

        @event.listens_for(e, "do_execute")
        def do_execute(cursor, statement, parameters, context):
            return _run_with_retries(
                context.dialect.do_execute,
                context,
                cursor,
                statement,
                parameters,
            )

        return e

    __backend__ = True

    def setup_test(self):
        self.engine = engines.reconnecting_engine(
            options=dict(future=self.future)
        )
        self.meta = MetaData()
        self.table = Table(
            "sometable",
            self.meta,
            Column("id", Integer, primary_key=True),
            Column("name", String(50)),
        )
        self.meta.create_all(self.engine)

    def teardown_test(self):
        self.meta.drop_all(self.engine)
        self.engine.dispose()

    def test_restart_on_execute_no_txn(self):
        engine = self.make_engine(self.engine)

        with engine.connect() as conn:
            eq_(conn.execute(select(1)).scalar(), 1)

            self.engine.test_shutdown()
            self.engine.test_restart()

            eq_(conn.execute(select(1)).scalar(), 1)

    def test_restart_on_execute_txn(self):
        engine = self.make_engine(self.engine)

        with engine.begin() as conn:
            eq_(conn.execute(select(1)).scalar(), 1)

            self.engine.test_shutdown()
            self.engine.test_restart()

            eq_(conn.execute(select(1)).scalar(), 1)

    def test_autocommits_txn(self):
        engine = self.make_engine(self.engine)

        with engine.begin() as conn:
            conn.execute(
                self.table.insert(),
                [
                    {"id": 1, "name": "some name 1"},
                    {"id": 2, "name": "some name 2"},
                    {"id": 3, "name": "some name 3"},
                ],
            )

            self.engine.test_shutdown()
            self.engine.test_restart()

            eq_(
                conn.execute(
                    select(self.table).order_by(self.table.c.id)
                ).fetchall(),
                [(1, "some name 1"), (2, "some name 2"), (3, "some name 3")],
            )

    def test_fail_on_executemany_txn(self):
        engine = self.make_engine(self.engine)

        with engine.begin() as conn:
            conn.execute(
                self.table.insert(),
                [
                    {"id": 1, "name": "some name 1"},
                    {"id": 2, "name": "some name 2"},
                    {"id": 3, "name": "some name 3"},
                ],
            )

            self.engine.test_shutdown()
            self.engine.test_restart()

            assert_raises(
                exc.DBAPIError,
                conn.execute,
                self.table.insert(),
                [
                    {"id": 4, "name": "some name 4"},
                    {"id": 5, "name": "some name 5"},
                    {"id": 6, "name": "some name 6"},
                ],
            )
            if self.future:
                conn.rollback()
            else:
                trans = conn.get_transaction()
                trans.rollback()
Пример #56
0
    def test_with_manytomany(self):
        metadata = MetaData()

        table1 = Table(
            "mytable",
            metadata,
            Column(
                "col1",
                Integer,
                primary_key=True,
                test_needs_autoincrement=True,
            ),
            Column("col2", String(30)),
        )

        table2 = Table(
            "mytable2",
            metadata,
            Column(
                "col1",
                Integer,
                primary_key=True,
                test_needs_autoincrement=True,
            ),
            Column("col2", String(30)),
        )

        table3 = Table(
            "t1tot2",
            metadata,
            Column("t1", Integer, ForeignKey("mytable.col1")),
            Column("t2", Integer, ForeignKey("mytable2.col1")),
        )

        @profile_memory()
        def go():
            class A(fixtures.ComparableEntity):
                pass

            class B(fixtures.ComparableEntity):
                pass

            mapper(
                A,
                table1,
                properties={
                    "bs":
                    relationship(B,
                                 secondary=table3,
                                 backref="as",
                                 order_by=table3.c.t1)
                },
            )
            mapper(B, table2)

            sess = create_session(self.engine)
            a1 = A(col2="a1")
            a2 = A(col2="a2")
            b1 = B(col2="b1")
            b2 = B(col2="b2")
            a1.bs.append(b1)
            a2.bs.append(b2)
            for x in [a1, a2]:
                sess.add(x)
            sess.flush()
            sess.expunge_all()

            alist = sess.query(A).order_by(A.col1).all()
            eq_([A(bs=[B(col2="b1")]), A(bs=[B(col2="b2")])], alist)

            for a in alist:
                sess.delete(a)
            sess.flush()

            # mappers necessarily find themselves in the compiled cache,
            # so to allow them to be GC'ed clear out the cache
            self.engine.clear_compiled_cache()
            del B
            del A

        metadata.create_all(self.engine)
        try:
            go()
        finally:
            metadata.drop_all(self.engine)
        assert_no_mappers()
Пример #57
0
 def test_string_w_quotes(self):
     m = MetaData()
     t = Table("t", m, Column("x", Integer, server_default="5'6"))
     self.assert_compile(CreateTable(t),
                         "CREATE TABLE t (x INTEGER DEFAULT '5''6')")
Пример #58
0
    def test_numerics_broken_inspection(self, metadata, connection):
        """Numeric scenarios where Oracle type info is 'broken',
        returning us precision, scale of the form (0, 0) or (0, -127).
        We convert to Decimal and let int()/float() processors take over.

        """

        # this test requires cx_oracle 5

        foo = Table(
            "foo",
            metadata,
            Column("idata", Integer),
            Column("ndata", Numeric(20, 2)),
            Column("ndata2", Numeric(20, 2)),
            Column("nidata", Numeric(5, 0)),
            Column("fdata", Float()),
        )
        foo.create(connection)

        connection.execute(
            foo.insert(),
            {
                "idata": 5,
                "ndata": decimal.Decimal("45.6"),
                "ndata2": decimal.Decimal("45.0"),
                "nidata": decimal.Decimal("53"),
                "fdata": 45.68392,
            },
        )

        stmt = "SELECT idata, ndata, ndata2, nidata, fdata FROM foo"

        row = exec_sql(connection, stmt).fetchall()[0]
        eq_(
            [type(x) for x in row],
            [int, decimal.Decimal, decimal.Decimal, int, float],
        )
        eq_(
            row,
            (
                5,
                decimal.Decimal("45.6"),
                decimal.Decimal("45"),
                53,
                45.683920000000001,
            ),
        )

        # with a nested subquery,
        # both Numeric values that don't have decimal places, regardless
        # of their originating type, come back as ints with no useful
        # typing information beyond "numeric".  So native handler
        # must convert to int.
        # this means our Decimal converters need to run no matter what.
        # totally sucks.

        stmt = """
        SELECT
            (SELECT (SELECT idata FROM foo) FROM DUAL) AS idata,
            (SELECT CAST((SELECT ndata FROM foo) AS NUMERIC(20, 2)) FROM DUAL)
            AS ndata,
            (SELECT CAST((SELECT ndata2 FROM foo) AS NUMERIC(20, 2)) FROM DUAL)
            AS ndata2,
            (SELECT CAST((SELECT nidata FROM foo) AS NUMERIC(5, 0)) FROM DUAL)
            AS nidata,
            (SELECT CAST((SELECT fdata FROM foo) AS FLOAT) FROM DUAL) AS fdata
        FROM dual
        """
        row = exec_sql(connection, stmt).fetchall()[0]
        eq_(
            [type(x) for x in row],
            [int, decimal.Decimal, int, int, decimal.Decimal],
        )
        eq_(
            row,
            (5, decimal.Decimal("45.6"), 45, 53, decimal.Decimal("45.68392")),
        )

        row = connection.execute(
            text(stmt).columns(
                idata=Integer(),
                ndata=Numeric(20, 2),
                ndata2=Numeric(20, 2),
                nidata=Numeric(5, 0),
                fdata=Float(),
            )).fetchall()[0]
        eq_(
            [type(x) for x in row],
            [int, decimal.Decimal, decimal.Decimal, decimal.Decimal, float],
        )
        eq_(
            row,
            (
                5,
                decimal.Decimal("45.6"),
                decimal.Decimal("45"),
                decimal.Decimal("53"),
                45.683920000000001,
            ),
        )

        stmt = """
        SELECT
                anon_1.idata AS anon_1_idata,
                anon_1.ndata AS anon_1_ndata,
                anon_1.ndata2 AS anon_1_ndata2,
                anon_1.nidata AS anon_1_nidata,
                anon_1.fdata AS anon_1_fdata
        FROM (SELECT idata, ndata, ndata2, nidata, fdata
        FROM (
            SELECT
                (SELECT (SELECT idata FROM foo) FROM DUAL) AS idata,
                (SELECT CAST((SELECT ndata FROM foo) AS NUMERIC(20, 2))
                FROM DUAL) AS ndata,
                (SELECT CAST((SELECT ndata2 FROM foo) AS NUMERIC(20, 2))
                FROM DUAL) AS ndata2,
                (SELECT CAST((SELECT nidata FROM foo) AS NUMERIC(5, 0))
                FROM DUAL) AS nidata,
                (SELECT CAST((SELECT fdata FROM foo) AS FLOAT) FROM DUAL)
                AS fdata
            FROM dual
        )
        WHERE ROWNUM >= 0) anon_1
        """
        row = exec_sql(connection, stmt).fetchall()[0]
        eq_(
            [type(x) for x in row],
            [int, decimal.Decimal, int, int, decimal.Decimal],
        )
        eq_(
            row,
            (5, decimal.Decimal("45.6"), 45, 53, decimal.Decimal("45.68392")),
        )

        row = connection.execute(
            text(stmt).columns(
                anon_1_idata=Integer(),
                anon_1_ndata=Numeric(20, 2),
                anon_1_ndata2=Numeric(20, 2),
                anon_1_nidata=Numeric(5, 0),
                anon_1_fdata=Float(),
            )).fetchall()[0]
        eq_(
            [type(x) for x in row],
            [int, decimal.Decimal, decimal.Decimal, decimal.Decimal, float],
        )
        eq_(
            row,
            (
                5,
                decimal.Decimal("45.6"),
                decimal.Decimal("45"),
                decimal.Decimal("53"),
                45.683920000000001,
            ),
        )

        row = connection.execute(
            text(stmt).columns(
                anon_1_idata=Integer(),
                anon_1_ndata=Numeric(20, 2, asdecimal=False),
                anon_1_ndata2=Numeric(20, 2, asdecimal=False),
                anon_1_nidata=Numeric(5, 0, asdecimal=False),
                anon_1_fdata=Float(asdecimal=True),
            )).fetchall()[0]
        eq_([type(x) for x in row],
            [int, float, float, float, decimal.Decimal])
        eq_(row, (5, 45.6, 45, 53, decimal.Decimal("45.68392")))
Пример #59
0
 def define_tables(cls, metadata):
     Table('test_table', metadata, Column('id', Integer, primary_key=True),
           Column('data', String(50)))
Пример #60
0
 def test_text_w_quotes(self):
     m = MetaData()
     t = Table("t", m, Column("x", Integer, server_default=text("5 ' 8")))
     self.assert_compile(CreateTable(t),
                         "CREATE TABLE t (x INTEGER DEFAULT 5 ' 8)")