def test_use_alter(self):
        m = MetaData()
        Table("t", m, Column("a", Integer))

        Table(
            "t2",
            m,
            Column("a", Integer, ForeignKey("t.a",
                                            use_alter=True,
                                            name="fk_ta")),
            Column("b", Integer, ForeignKey("t.a", name="fk_tb")),
        )

        e = engines.mock_engine(dialect_name="postgresql")
        m.create_all(e)
        m.drop_all(e)

        e.assert_sql([
            "CREATE TABLE t (a INTEGER)",
            "CREATE TABLE t2 (a INTEGER, b INTEGER, CONSTRAINT fk_tb "
            "FOREIGN KEY(b) REFERENCES t (a))",
            "ALTER TABLE t2 "
            "ADD CONSTRAINT fk_ta FOREIGN KEY(a) REFERENCES t (a)",
            "ALTER TABLE t2 DROP CONSTRAINT fk_ta",
            "DROP TABLE t2",
            "DROP TABLE t",
        ])
Esempio n. 2
0
    def test_unicode_warnings(self):
        metadata = MetaData(self.engine)
        table1 = Table(
            "mytable",
            metadata,
            Column(
                "col1",
                Integer,
                primary_key=True,
                test_needs_autoincrement=True,
            ),
            Column("col2", Unicode(30)),
        )
        metadata.create_all()
        i = [1]

        # the times here is cranked way up so that we can see
        # pysqlite clearing out its internal buffer and allow
        # the test to pass
        @testing.emits_warning()
        @profile_memory()
        def go():

            # execute with a non-unicode object. a warning is emitted,
            # this warning shouldn't clog up memory.

            self.engine.execute(
                table1.select().where(table1.c.col2 == "foo%d" % i[0]))
            i[0] += 1

        try:
            go()
        finally:
            metadata.drop_all()
Esempio n. 3
0
    def test_clauseelement(self):
        metadata = MetaData()
        table = Table("test_table", metadata, Column("foo", Integer))
        metadata.create_all(bind=testing.db)
        try:
            for elem in [
                table.select,
                lambda **kwargs: sa.func.current_timestamp(**kwargs).select(),
                # func.current_timestamp().select,
                lambda **kwargs: text("select * from test_table", **kwargs),
            ]:
                for bind in (testing.db, testing.db.connect()):
                    try:
                        e = elem(bind=bind)
                        assert e.bind is bind
                        e.execute().close()
                    finally:
                        if isinstance(bind, engine.Connection):
                            bind.close()

                e = elem()
                assert e.bind is None
                assert_raises(exc.UnboundExecutionError, e.execute)
        finally:
            if isinstance(bind, engine.Connection):
                bind.close()
            metadata.drop_all(bind=testing.db)
Esempio n. 4
0
 def test_implicit_execution(self):
     metadata = MetaData()
     table = Table(
         "test_table",
         metadata,
         Column("foo", Integer),
         test_needs_acid=True,
     )
     conn = testing.db.connect()
     metadata.create_all(bind=conn)
     try:
         trans = conn.begin()
         metadata.bind = conn
         t = table.insert()
         assert t.bind is conn
         table.insert().execute(foo=5)
         table.insert().execute(foo=6)
         table.insert().execute(foo=7)
         trans.rollback()
         metadata.bind = None
         assert (
             conn.execute("select count(*) from test_table").scalar() == 0
         )
     finally:
         metadata.drop_all(bind=conn)
class ReflectHugeViewTest(fixtures.TestBase):
    __only_on__ = "mssql"
    __backend__ = True

    # crashes on freetds 0.91, not worth it
    __skip_if__ = (lambda: testing.requires.mssql_freetds.enabled, )

    def setup(self):
        self.col_num = 150

        self.metadata = MetaData(testing.db)
        t = Table(
            "base_table", self.metadata, *[
                Column("long_named_column_number_%d" % i, Integer)
                for i in range(self.col_num)
            ])
        self.view_str = (
            view_str
        ) = "CREATE VIEW huge_named_view AS SELECT %s FROM base_table" % (
            ",".join("long_named_column_number_%d" % i
                     for i in range(self.col_num)))
        assert len(view_str) > 4000

        event.listen(t, "after_create", DDL(view_str))
        event.listen(t, "before_drop", DDL("DROP VIEW huge_named_view"))

        self.metadata.create_all()

    def teardown(self):
        self.metadata.drop_all()

    def test_inspect_view_definition(self):
        inspector = Inspector.from_engine(testing.db)
        view_def = inspector.get_view_definition("huge_named_view")
        eq_(view_def, self.view_str)
    def test_cycle_named_fks(self):
        metadata = MetaData(testing.db)

        Table(
            "a",
            metadata,
            Column("id", Integer, primary_key=True),
            Column("bid", Integer, ForeignKey("b.id")),
        )

        Table(
            "b",
            metadata,
            Column("id", Integer, primary_key=True),
            Column(
                "aid",
                Integer,
                ForeignKey("a.id", use_alter=True, name="aidfk"),
            ),
        )

        assertions = [
            AllOf(
                CompiledSQL("CREATE TABLE b ("
                            "id INTEGER NOT NULL, "
                            "aid INTEGER, "
                            "PRIMARY KEY (id)"
                            ")"),
                CompiledSQL("CREATE TABLE a ("
                            "id INTEGER NOT NULL, "
                            "bid INTEGER, "
                            "PRIMARY KEY (id), "
                            "FOREIGN KEY(bid) REFERENCES b (id)"
                            ")"),
            ),
            CompiledSQL("ALTER TABLE b ADD CONSTRAINT aidfk "
                        "FOREIGN KEY(aid) REFERENCES a (id)"),
        ]
        with self.sql_execution_asserter() as asserter:
            metadata.create_all(checkfirst=False)

        if testing.db.dialect.supports_alter:
            asserter.assert_(*assertions)

            with self.sql_execution_asserter() as asserter:
                metadata.drop_all(checkfirst=False)

            asserter.assert_(
                CompiledSQL("ALTER TABLE b DROP CONSTRAINT aidfk"),
                AllOf(CompiledSQL("DROP TABLE b"),
                      CompiledSQL("DROP TABLE a")),
            )
        else:
            with self.sql_execution_asserter() as asserter:
                metadata.drop_all(checkfirst=False)

            asserter.assert_(
                AllOf(CompiledSQL("DROP TABLE b"),
                      CompiledSQL("DROP TABLE a")))
 def test_checkfirst_metadata(self, connection):
     m = MetaData()
     Sequence("my_sequence", metadata=m)
     m.create_all(connection, checkfirst=False)
     assert self._has_sequence(connection, "my_sequence")
     m.create_all(connection, checkfirst=True)
     m.drop_all(connection, checkfirst=False)
     assert not self._has_sequence(connection, "my_sequence")
     m.drop_all(connection, checkfirst=True)
Esempio n. 8
0
    def test_alias_pathing(self):
        metadata = MetaData(self.engine)

        a = Table(
            "a",
            metadata,
            Column("id",
                   Integer,
                   primary_key=True,
                   test_needs_autoincrement=True),
            Column("bid", Integer, ForeignKey("b.id")),
            Column("type", String(30)),
        )

        asub = Table(
            "asub",
            metadata,
            Column("id", Integer, ForeignKey("a.id"), primary_key=True),
            Column("data", String(30)),
        )

        b = Table(
            "b",
            metadata,
            Column("id",
                   Integer,
                   primary_key=True,
                   test_needs_autoincrement=True),
        )
        mapper(A, a, polymorphic_identity="a", polymorphic_on=a.c.type)
        mapper(ASub, asub, inherits=A, polymorphic_identity="asub")
        mapper(B, b, properties={"as_": relationship(A)})

        metadata.create_all()
        sess = Session()
        a1 = ASub(data="a1")
        a2 = ASub(data="a2")
        a3 = ASub(data="a3")
        b1 = B(as_=[a1, a2, a3])
        sess.add(b1)
        sess.commit()
        del sess

        # sqlite has a slow enough growth here
        # that we have to run it more times to see the
        # "dip" again
        @profile_memory(maxtimes=120)
        def go():
            sess = Session()
            sess.query(B).options(subqueryload(B.as_.of_type(ASub))).all()
            sess.close()

        try:
            go()
        finally:
            metadata.drop_all()
        clear_mappers()
Esempio n. 9
0
 def test_create_drop_explicit(self):
     metadata = MetaData()
     table = Table("test_table", metadata, Column("foo", Integer))
     for bind in (testing.db, testing.db.connect()):
         for args in [([], {"bind": bind}), ([bind], {})]:
             metadata.create_all(*args[0], **args[1])
             assert table.exists(*args[0], **args[1])
             metadata.drop_all(*args[0], **args[1])
             table.create(*args[0], **args[1])
             table.drop(*args[0], **args[1])
             assert not table.exists(*args[0], **args[1])
 def _make_tables(self, e):
     m = MetaData()
     for i in range(15):
         Table(
             "table_%d" % i,
             m,
             Column("id", Integer, primary_key=True),
             Column("data", String(50)),
             Column("t_%d_id" %
                    (i - 1), ForeignKey("table_%d.id" %
                                        (i - 1))) if i > 4 else None,
         )
     m.drop_all(e)
     m.create_all(e)
Esempio n. 11
0
    def test_misc_one(self):
        metadata = MetaData(testing.db)
        node_table = Table(
            "node",
            metadata,
            Column("node_id", Integer, primary_key=True),
            Column("name_index", Integer, nullable=True),
        )
        node_name_table = Table(
            "node_name",
            metadata,
            Column("node_name_id", Integer, primary_key=True),
            Column("node_id", Integer, ForeignKey("node.node_id")),
            Column("host_id", Integer, ForeignKey("host.host_id")),
            Column("name", String(64), nullable=False),
        )
        host_table = Table(
            "host",
            metadata,
            Column("host_id", Integer, primary_key=True),
            Column("hostname", String(64), nullable=False, unique=True),
        )
        metadata.create_all()
        try:
            node_table.insert().execute(node_id=1, node_index=5)

            class Node(object):
                pass

            class NodeName(object):
                pass

            class Host(object):
                pass

            mapper(Node, node_table)
            mapper(Host, host_table)
            mapper(
                NodeName,
                node_name_table,
                properties={
                    "node": relationship(Node, backref=backref("names")),
                    "host": relationship(Host),
                },
            )
            sess = create_session()
            assert sess.query(Node).get(1).names == []
        finally:
            metadata.drop_all()
Esempio n. 12
0
    def test_many_updates(self):
        metadata = MetaData(self.engine)

        wide_table = Table(
            "t", metadata,
            Column("id",
                   Integer,
                   primary_key=True,
                   test_needs_autoincrement=True),
            *[Column("col%d" % i, Integer) for i in range(10)])

        class Wide(object):
            pass

        mapper(Wide, wide_table, _compiled_cache_size=10)

        metadata.create_all()
        session = create_session()
        w1 = Wide()
        session.add(w1)
        session.flush()
        session.close()
        del session
        counter = [1]

        @profile_memory()
        def go():
            session = create_session()
            w1 = session.query(Wide).first()
            x = counter[0]
            dec = 10
            while dec > 0:
                # trying to count in binary here,
                # works enough to trip the test case
                if pow(2, dec) < x:
                    setattr(w1, "col%d" % dec, counter[0])
                    x -= pow(2, dec)
                dec -= 1
            session.flush()
            session.close()
            counter[0] += 1

        try:
            go()
        finally:
            metadata.drop_all()
Esempio n. 13
0
    def test_join_cache(self):
        metadata = MetaData(self.engine)
        table1 = Table(
            "table1",
            metadata,
            Column("id",
                   Integer,
                   primary_key=True,
                   test_needs_autoincrement=True),
            Column("data", String(30)),
        )
        table2 = Table(
            "table2",
            metadata,
            Column("id",
                   Integer,
                   primary_key=True,
                   test_needs_autoincrement=True),
            Column("data", String(30)),
            Column("t1id", Integer, ForeignKey("table1.id")),
        )

        class Foo(object):
            pass

        class Bar(object):
            pass

        mapper(Foo,
               table1,
               properties={"bars": relationship(mapper(Bar, table2))})
        metadata.create_all()
        session = sessionmaker()

        @profile_memory()
        def go():
            s = table2.select()
            sess = session()
            sess.query(Foo).join((s, Foo.bars)).all()
            sess.rollback()

        try:
            go()
        finally:
            metadata.drop_all()
Esempio n. 14
0
 def test_create_drop_constructor_bound(self):
     for bind in (testing.db, testing.db.connect()):
         try:
             for args in (([bind], {}), ([], {"bind": bind})):
                 metadata = MetaData(*args[0], **args[1])
                 table = Table(
                     "test_table", metadata, Column("foo", Integer)
                 )
                 assert metadata.bind is table.bind is bind
                 metadata.create_all()
                 assert table.exists()
                 metadata.drop_all()
                 table.create()
                 table.drop()
                 assert not table.exists()
         finally:
             if isinstance(bind, engine.Connection):
                 bind.close()
 def test_extract_expression(self):
     meta = MetaData(testing.db)
     table = Table("test", meta, Column("dt", DateTime), Column("d", Date))
     meta.create_all()
     try:
         table.insert().execute(
             {
                 "dt": datetime.datetime(2010, 5, 1, 12, 11, 10),
                 "d": datetime.date(2010, 5, 1),
             }
         )
         rs = select(
             [extract("year", table.c.dt), extract("month", table.c.d)]
         ).execute()
         row = rs.first()
         assert row[0] == 2010
         assert row[1] == 5
         rs.close()
     finally:
         meta.drop_all()
Esempio n. 16
0
class InvalidateDuringResultTest(fixtures.TestBase):
    __backend__ = True

    def setup(self):
        self.engine = engines.reconnecting_engine()
        self.meta = MetaData(self.engine)
        table = Table(
            "sometable",
            self.meta,
            Column("id", Integer, primary_key=True),
            Column("name", String(50)),
        )
        self.meta.create_all()
        table.insert().execute([{
            "id": i,
            "name": "row %d" % i
        } for i in range(1, 100)])

    def teardown(self):
        self.meta.drop_all()
        self.engine.dispose()

    @testing.crashes(
        "oracle",
        "cx_oracle 6 doesn't allow a close like this due to open cursors",
    )
    @testing.fails_if(
        ["+mysqlconnector", "+mysqldb", "+cymysql", "+pymysql", "+pg8000"],
        "Buffers the result set and doesn't check for connection close",
    )
    def test_invalidate_on_results(self):
        conn = self.engine.connect()
        result = conn.execute("select * from sometable")
        for x in range(20):
            result.fetchone()
        self.engine.test_shutdown()
        _assert_invalidated(result.fetchone)
        assert conn.invalidated
    def test_fk_cant_drop_cycled_unnamed(self):
        metadata = MetaData()

        Table(
            "a",
            metadata,
            Column("id", Integer, primary_key=True),
            Column("bid", Integer),
            ForeignKeyConstraint(["bid"], ["b.id"]),
        )
        Table(
            "b",
            metadata,
            Column("id", Integer, primary_key=True),
            Column("aid", Integer),
            ForeignKeyConstraint(["aid"], ["a.id"]),
        )
        metadata.create_all(testing.db)
        if testing.db.dialect.supports_alter:
            assert_raises_message(
                exc.CircularDependencyError,
                "Can't sort tables for DROP; an unresolvable foreign key "
                "dependency exists between tables: a, b.  Please ensure "
                "that the ForeignKey and ForeignKeyConstraint objects "
                "involved in the cycle have names so that they can be "
                "dropped using DROP CONSTRAINT.",
                metadata.drop_all,
                testing.db,
            )
        else:
            with expect_warnings("Can't sort tables for DROP; an unresolvable "
                                 "foreign key dependency "):
                with self.sql_execution_asserter() as asserter:
                    metadata.drop_all(testing.db, checkfirst=False)

            asserter.assert_(
                AllOf(CompiledSQL("DROP TABLE a"),
                      CompiledSQL("DROP TABLE b")))
Esempio n. 18
0
    def test_with_manytomany(self):
        metadata = MetaData(self.engine)

        table1 = Table(
            "mytable",
            metadata,
            Column(
                "col1",
                Integer,
                primary_key=True,
                test_needs_autoincrement=True,
            ),
            Column("col2", String(30)),
        )

        table2 = Table(
            "mytable2",
            metadata,
            Column(
                "col1",
                Integer,
                primary_key=True,
                test_needs_autoincrement=True,
            ),
            Column("col2", String(30)),
        )

        table3 = Table(
            "t1tot2",
            metadata,
            Column("t1", Integer, ForeignKey("mytable.col1")),
            Column("t2", Integer, ForeignKey("mytable2.col1")),
        )

        @profile_memory()
        def go():
            class A(fixtures.ComparableEntity):
                pass

            class B(fixtures.ComparableEntity):
                pass

            mapper(
                A,
                table1,
                properties={
                    "bs":
                    relationship(B,
                                 secondary=table3,
                                 backref="as",
                                 order_by=table3.c.t1)
                },
            )
            mapper(B, table2)

            sess = create_session()
            a1 = A(col2="a1")
            a2 = A(col2="a2")
            b1 = B(col2="b1")
            b2 = B(col2="b2")
            a1.bs.append(b1)
            a2.bs.append(b2)
            for x in [a1, a2]:
                sess.add(x)
            sess.flush()
            sess.expunge_all()

            alist = sess.query(A).order_by(A.col1).all()
            eq_([A(bs=[B(col2="b1")]), A(bs=[B(col2="b2")])], alist)

            for a in alist:
                sess.delete(a)
            sess.flush()

            # don't need to clear_mappers()
            del B
            del A

        metadata.create_all()
        try:
            go()
        finally:
            metadata.drop_all()
        assert_no_mappers()
Esempio n. 19
0
    def test_orm_many_engines(self):
        metadata = MetaData(self.engine)

        table1 = Table(
            "mytable",
            metadata,
            Column(
                "col1",
                Integer,
                primary_key=True,
                test_needs_autoincrement=True,
            ),
            Column("col2", String(30)),
        )

        table2 = Table(
            "mytable2",
            metadata,
            Column(
                "col1",
                Integer,
                primary_key=True,
                test_needs_autoincrement=True,
            ),
            Column("col2", String(30)),
            Column("col3", Integer, ForeignKey("mytable.col1")),
        )

        metadata.create_all()

        m1 = mapper(
            A,
            table1,
            properties={
                "bs":
                relationship(B, cascade="all, delete", order_by=table2.c.col1)
            },
            _compiled_cache_size=50,
        )
        m2 = mapper(B, table2, _compiled_cache_size=50)

        @profile_memory()
        def go():
            engine = engines.testing_engine(
                options={
                    "logging_name": "FOO",
                    "pool_logging_name": "BAR",
                    "use_reaper": False,
                })
            sess = create_session(bind=engine)

            a1 = A(col2="a1")
            a2 = A(col2="a2")
            a3 = A(col2="a3")
            a1.bs.append(B(col2="b1"))
            a1.bs.append(B(col2="b2"))
            a3.bs.append(B(col2="b3"))
            for x in [a1, a2, a3]:
                sess.add(x)
            sess.flush()
            sess.expunge_all()

            alist = sess.query(A).order_by(A.col1).all()
            eq_(
                [
                    A(col2="a1", bs=[B(col2="b1"), B(col2="b2")]),
                    A(col2="a2", bs=[]),
                    A(col2="a3", bs=[B(col2="b3")]),
                ],
                alist,
            )

            for a in alist:
                sess.delete(a)
            sess.flush()
            sess.close()
            engine.dispose()

        go()

        metadata.drop_all()
        del m1, m2
        assert_no_mappers()
    def test_cycle_unnamed_fks(self):
        metadata = MetaData(testing.db)

        Table(
            "a",
            metadata,
            Column("id", Integer, primary_key=True),
            Column("bid", Integer, ForeignKey("b.id")),
        )

        Table(
            "b",
            metadata,
            Column("id", Integer, primary_key=True),
            Column("aid", Integer, ForeignKey("a.id")),
        )

        assertions = [
            AllOf(
                CompiledSQL("CREATE TABLE b ("
                            "id INTEGER NOT NULL, "
                            "aid INTEGER, "
                            "PRIMARY KEY (id)"
                            ")"),
                CompiledSQL("CREATE TABLE a ("
                            "id INTEGER NOT NULL, "
                            "bid INTEGER, "
                            "PRIMARY KEY (id)"
                            ")"),
            ),
            AllOf(
                CompiledSQL("ALTER TABLE b ADD "
                            "FOREIGN KEY(aid) REFERENCES a (id)"),
                CompiledSQL("ALTER TABLE a ADD "
                            "FOREIGN KEY(bid) REFERENCES b (id)"),
            ),
        ]
        with self.sql_execution_asserter() as asserter:
            metadata.create_all(checkfirst=False)

        if testing.db.dialect.supports_alter:
            asserter.assert_(*assertions)

            assert_raises_message(
                exc.CircularDependencyError,
                "Can't sort tables for DROP; an unresolvable foreign key "
                "dependency exists between tables: a, b.  "
                "Please ensure that the "
                "ForeignKey and ForeignKeyConstraint objects involved in the "
                "cycle have names so that they can be dropped using "
                "DROP CONSTRAINT.",
                metadata.drop_all,
                checkfirst=False,
            )
        else:
            with expect_warnings(
                    "Can't sort tables for DROP; an unresolvable "
                    "foreign key dependency exists between tables"):
                with self.sql_execution_asserter() as asserter:
                    metadata.drop_all(checkfirst=False)

            asserter.assert_(
                AllOf(CompiledSQL("DROP TABLE b"),
                      CompiledSQL("DROP TABLE a")))
Esempio n. 21
0
    def test_fetchid_trigger(self):
        """
        Verify identity return value on inserting to a trigger table.

        MSSQL's OUTPUT INSERTED clause does not work for the
        case of a table having an identity (autoincrement)
        primary key column, and which also has a trigger configured
        to fire upon each insert and subsequently perform an
        insert into a different table.

        SQLALchemy's MSSQL dialect by default will attempt to
        use an OUTPUT_INSERTED clause, which in this case will
        raise the following error:

        ProgrammingError: (ProgrammingError) ('42000', 334,
        "[Microsoft][SQL Server Native Client 10.0][SQL Server]The
        target table 't1' of the DML statement cannot have any enabled
        triggers if the statement contains an OUTPUT clause without
        INTO clause.", 7748) 'INSERT INTO t1 (descr) OUTPUT inserted.id
        VALUES (?)' ('hello',)

        This test verifies a workaround, which is to rely on the
        older SCOPE_IDENTITY() call, which still works for this scenario.
        To enable the workaround, the Table must be instantiated
        with the init parameter 'implicit_returning = False'.
        """

        # todo: this same test needs to be tried in a multithreaded context
        #      with multiple threads inserting to the same table.
        # todo: check whether this error also occurs with clients other
        #      than the SQL Server Native Client. Maybe an assert_raises
        #      test should be written.
        meta = MetaData(testing.db)
        t1 = Table(
            "t1",
            meta,
            Column("id", Integer, mssql_identity_start=100, primary_key=True),
            Column("descr", String(200)),
            # the following flag will prevent the
            # MSSQLCompiler.returning_clause from getting called,
            # though the ExecutionContext will still have a
            # _select_lastrowid, so the SELECT SCOPE_IDENTITY() will
            # hopefully be called instead.
            implicit_returning=False,
        )
        t2 = Table(
            "t2",
            meta,
            Column("id", Integer, mssql_identity_start=200, primary_key=True),
            Column("descr", String(200)),
        )
        meta.create_all()
        con = testing.db.connect()
        con.execute(
            """create trigger paj on t1 for insert as
            insert into t2 (descr) select descr from inserted"""
        )

        try:
            tr = con.begin()
            r = con.execute(t2.insert(), descr="hello")
            self.assert_(r.inserted_primary_key == [200])
            r = con.execute(t1.insert(), descr="hello")
            self.assert_(r.inserted_primary_key == [100])

        finally:
            tr.commit()
            con.execute("""drop trigger paj""")
            meta.drop_all()
Esempio n. 22
0
    def test_autoincrement(self):
        meta = MetaData(testing.db)
        try:
            Table(
                "ai_1",
                meta,
                Column("int_y", Integer, primary_key=True, autoincrement=True),
                Column("int_n", Integer, DefaultClause("0"), primary_key=True),
                mysql_engine="MyISAM",
            )
            Table(
                "ai_2",
                meta,
                Column("int_y", Integer, primary_key=True, autoincrement=True),
                Column("int_n", Integer, DefaultClause("0"), primary_key=True),
                mysql_engine="MyISAM",
            )
            Table(
                "ai_3",
                meta,
                Column(
                    "int_n",
                    Integer,
                    DefaultClause("0"),
                    primary_key=True,
                    autoincrement=False,
                ),
                Column("int_y", Integer, primary_key=True, autoincrement=True),
                mysql_engine="MyISAM",
            )
            Table(
                "ai_4",
                meta,
                Column(
                    "int_n",
                    Integer,
                    DefaultClause("0"),
                    primary_key=True,
                    autoincrement=False,
                ),
                Column(
                    "int_n2",
                    Integer,
                    DefaultClause("0"),
                    primary_key=True,
                    autoincrement=False,
                ),
                mysql_engine="MyISAM",
            )
            Table(
                "ai_5",
                meta,
                Column("int_y", Integer, primary_key=True, autoincrement=True),
                Column(
                    "int_n",
                    Integer,
                    DefaultClause("0"),
                    primary_key=True,
                    autoincrement=False,
                ),
                mysql_engine="MyISAM",
            )
            Table(
                "ai_6",
                meta,
                Column("o1", String(1), DefaultClause("x"), primary_key=True),
                Column("int_y", Integer, primary_key=True, autoincrement=True),
                mysql_engine="MyISAM",
            )
            Table(
                "ai_7",
                meta,
                Column("o1", String(1), DefaultClause("x"), primary_key=True),
                Column("o2", String(1), DefaultClause("x"), primary_key=True),
                Column("int_y", Integer, primary_key=True, autoincrement=True),
                mysql_engine="MyISAM",
            )
            Table(
                "ai_8",
                meta,
                Column("o1", String(1), DefaultClause("x"), primary_key=True),
                Column("o2", String(1), DefaultClause("x"), primary_key=True),
                mysql_engine="MyISAM",
            )
            meta.create_all()

            table_names = [
                "ai_1",
                "ai_2",
                "ai_3",
                "ai_4",
                "ai_5",
                "ai_6",
                "ai_7",
                "ai_8",
            ]
            mr = MetaData(testing.db)
            mr.reflect(only=table_names)

            for tbl in [mr.tables[name] for name in table_names]:
                for c in tbl.c:
                    if c.name.startswith("int_y"):
                        assert c.autoincrement
                    elif c.name.startswith("int_n"):
                        assert not c.autoincrement
                tbl.insert().execute()
                if "int_y" in tbl.c:
                    assert select([tbl.c.int_y]).scalar() == 1
                    assert list(tbl.select().execute().first()).count(1) == 1
                else:
                    assert 1 not in list(tbl.select().execute().first())
        finally:
            meta.drop_all()
Esempio n. 23
0
    def test_with_inheritance(self):
        metadata = MetaData(self.engine)

        table1 = Table(
            "mytable",
            metadata,
            Column(
                "col1",
                Integer,
                primary_key=True,
                test_needs_autoincrement=True,
            ),
            Column("col2", String(30)),
        )

        table2 = Table(
            "mytable2",
            metadata,
            Column(
                "col1",
                Integer,
                ForeignKey("mytable.col1"),
                primary_key=True,
                test_needs_autoincrement=True,
            ),
            Column("col3", String(30)),
        )

        @profile_memory()
        def go():
            class A(fixtures.ComparableEntity):
                pass

            class B(A):
                pass

            mapper(
                A,
                table1,
                polymorphic_on=table1.c.col2,
                polymorphic_identity="a",
            )
            mapper(B, table2, inherits=A, polymorphic_identity="b")

            sess = create_session()
            a1 = A()
            a2 = A()
            b1 = B(col3="b1")
            b2 = B(col3="b2")
            for x in [a1, a2, b1, b2]:
                sess.add(x)
            sess.flush()
            sess.expunge_all()

            alist = sess.query(A).order_by(A.col1).all()
            eq_([A(), A(), B(col3="b1"), B(col3="b2")], alist)

            for a in alist:
                sess.delete(a)
            sess.flush()

            # don't need to clear_mappers()
            del B
            del A

        metadata.create_all()
        try:
            go()
        finally:
            metadata.drop_all()
        assert_no_mappers()
Esempio n. 24
0
    def test_mapper_reset(self):
        metadata = MetaData(self.engine)

        table1 = Table(
            "mytable",
            metadata,
            Column(
                "col1",
                Integer,
                primary_key=True,
                test_needs_autoincrement=True,
            ),
            Column("col2", String(30)),
        )

        table2 = Table(
            "mytable2",
            metadata,
            Column(
                "col1",
                Integer,
                primary_key=True,
                test_needs_autoincrement=True,
            ),
            Column("col2", String(30)),
            Column("col3", Integer, ForeignKey("mytable.col1")),
        )

        @profile_memory()
        def go():
            mapper(
                A,
                table1,
                properties={"bs": relationship(B, order_by=table2.c.col1)},
            )
            mapper(B, table2)

            sess = create_session()
            a1 = A(col2="a1")
            a2 = A(col2="a2")
            a3 = A(col2="a3")
            a1.bs.append(B(col2="b1"))
            a1.bs.append(B(col2="b2"))
            a3.bs.append(B(col2="b3"))
            for x in [a1, a2, a3]:
                sess.add(x)
            sess.flush()
            sess.expunge_all()

            alist = sess.query(A).order_by(A.col1).all()
            eq_(
                [
                    A(col2="a1", bs=[B(col2="b1"), B(col2="b2")]),
                    A(col2="a2", bs=[]),
                    A(col2="a3", bs=[B(col2="b3")]),
                ],
                alist,
            )

            for a in alist:
                sess.delete(a)
            sess.flush()
            sess.close()
            clear_mappers()

        metadata.create_all()
        try:
            go()
        finally:
            metadata.drop_all()
        assert_no_mappers()