def test_insert_values_col_expression(self):
     with testing.db.connect() as conn:
         conn.execute(cattable.insert().values({cattable.c.id: literal(5)}))
         eq_(conn.scalar(select([cattable.c.id])), 5)
    def test_threelevel_selectin_to_inline_awkward_alias_options(self):
        self._fixture_from_geometry(
            {
                "a": {
                    "subclasses": {
                        "b": {},
                        "c": {"subclasses": {"d": {}, "e": {}}},
                    }
                }
            }
        )

        a, b, c, d, e = self.classes("a", "b", "c", "d", "e")
        sess = Session()
        sess.add_all([d(d_data="d1"), e(e_data="e1")])
        sess.commit()

        from sqlalchemy_1_3 import select

        a_table, c_table, d_table, e_table = self.tables("a", "c", "d", "e")

        poly = (
            select([a_table.c.id, a_table.c.type, c_table, d_table, e_table])
            .select_from(
                a_table.join(c_table).outerjoin(d_table).outerjoin(e_table)
            )
            .apply_labels()
            .alias("poly")
        )

        c_alias = with_polymorphic(c, (d, e), poly)
        q = (
            sess.query(a)
            .options(selectin_polymorphic(a, [b, c_alias]))
            .order_by(a.id)
        )

        result = self.assert_sql_execution(
            testing.db,
            q.all,
            CompiledSQL(
                "SELECT a.type AS a_type, a.id AS a_id, "
                "a.a_data AS a_a_data FROM a ORDER BY a.id",
                {},
            ),
            Or(
                # here, the test is that the adaptation of "a" takes place
                CompiledSQL(
                    "SELECT poly.a_type AS poly_a_type, "
                    "poly.c_id AS poly_c_id, "
                    "poly.a_id AS poly_a_id, poly.c_c_data AS poly_c_c_data, "
                    "poly.e_id AS poly_e_id, poly.e_e_data AS poly_e_e_data, "
                    "poly.d_id AS poly_d_id, poly.d_d_data AS poly_d_d_data "
                    "FROM (SELECT a.id AS a_id, a.type AS a_type, "
                    "c.id AS c_id, "
                    "c.c_data AS c_c_data, d.id AS d_id, "
                    "d.d_data AS d_d_data, "
                    "e.id AS e_id, e.e_data AS e_e_data FROM a JOIN c "
                    "ON a.id = c.id LEFT OUTER JOIN d ON c.id = d.id "
                    "LEFT OUTER JOIN e ON c.id = e.id) AS poly "
                    "WHERE poly.a_id IN ([EXPANDING_primary_keys]) "
                    "ORDER BY poly.a_id",
                    [{"primary_keys": [1, 2]}],
                ),
                CompiledSQL(
                    "SELECT poly.a_type AS poly_a_type, "
                    "poly.c_id AS poly_c_id, "
                    "poly.a_id AS poly_a_id, poly.c_c_data AS poly_c_c_data, "
                    "poly.d_id AS poly_d_id, poly.d_d_data AS poly_d_d_data, "
                    "poly.e_id AS poly_e_id, poly.e_e_data AS poly_e_e_data "
                    "FROM (SELECT a.id AS a_id, a.type AS a_type, "
                    "c.id AS c_id, c.c_data AS c_c_data, d.id AS d_id, "
                    "d.d_data AS d_d_data, e.id AS e_id, "
                    "e.e_data AS e_e_data FROM a JOIN c ON a.id = c.id "
                    "LEFT OUTER JOIN d ON c.id = d.id "
                    "LEFT OUTER JOIN e ON c.id = e.id) AS poly "
                    "WHERE poly.a_id IN ([EXPANDING_primary_keys]) "
                    "ORDER BY poly.a_id",
                    [{"primary_keys": [1, 2]}],
                ),
            ),
        )
        with self.assert_statement_count(testing.db, 0):
            eq_(result, [d(d_data="d1"), e(e_data="e1")])
 def test_insert_plain_param(self):
     with testing.db.connect() as conn:
         conn.execute(cattable.insert(), id=5)
         eq_(conn.scalar(select([cattable.c.id])), 5)
 def test_insert_values_col_plain(self):
     with testing.db.connect() as conn:
         conn.execute(cattable.insert().values({cattable.c.id: 5}))
         eq_(conn.scalar(select([cattable.c.id])), 5)
示例#5
0
    def test_case(self):
        inner = select(
            [
                case([
                    [info_table.c.pk < 3, "lessthan3"],
                    [
                        and_(info_table.c.pk >= 3, info_table.c.pk < 7),
                        "gt3",
                    ],
                ]).label("x"),
                info_table.c.pk,
                info_table.c.info,
            ],
            from_obj=[info_table],
        )

        inner_result = inner.execute().fetchall()

        # Outputs:
        # lessthan3 1 pk_1_data
        # lessthan3 2 pk_2_data
        # gt3 3 pk_3_data
        # gt3 4 pk_4_data
        # gt3 5 pk_5_data
        # gt3 6 pk_6_data
        assert inner_result == [
            ("lessthan3", 1, "pk_1_data"),
            ("lessthan3", 2, "pk_2_data"),
            ("gt3", 3, "pk_3_data"),
            ("gt3", 4, "pk_4_data"),
            ("gt3", 5, "pk_5_data"),
            ("gt3", 6, "pk_6_data"),
        ]

        outer = select([inner.alias("q_inner")])

        outer_result = outer.execute().fetchall()

        assert outer_result == [
            ("lessthan3", 1, "pk_1_data"),
            ("lessthan3", 2, "pk_2_data"),
            ("gt3", 3, "pk_3_data"),
            ("gt3", 4, "pk_4_data"),
            ("gt3", 5, "pk_5_data"),
            ("gt3", 6, "pk_6_data"),
        ]

        w_else = select(
            [
                case(
                    [
                        [info_table.c.pk < 3,
                         cast(3, Integer)],
                        [and_(info_table.c.pk >= 3, info_table.c.pk < 6), 6],
                    ],
                    else_=0,
                ).label("x"),
                info_table.c.pk,
                info_table.c.info,
            ],
            from_obj=[info_table],
        )

        else_result = w_else.execute().fetchall()

        assert else_result == [
            (3, 1, "pk_1_data"),
            (3, 2, "pk_2_data"),
            (6, 3, "pk_3_data"),
            (6, 4, "pk_4_data"),
            (6, 5, "pk_5_data"),
            (0, 6, "pk_6_data"),
        ]
示例#6
0
    def test_autoincrement(self):
        meta = MetaData(testing.db)
        try:
            Table(
                "ai_1",
                meta,
                Column("int_y", Integer, primary_key=True, autoincrement=True),
                Column("int_n", Integer, DefaultClause("0"), primary_key=True),
                mysql_engine="MyISAM",
            )
            Table(
                "ai_2",
                meta,
                Column("int_y", Integer, primary_key=True, autoincrement=True),
                Column("int_n", Integer, DefaultClause("0"), primary_key=True),
                mysql_engine="MyISAM",
            )
            Table(
                "ai_3",
                meta,
                Column(
                    "int_n",
                    Integer,
                    DefaultClause("0"),
                    primary_key=True,
                    autoincrement=False,
                ),
                Column("int_y", Integer, primary_key=True, autoincrement=True),
                mysql_engine="MyISAM",
            )
            Table(
                "ai_4",
                meta,
                Column(
                    "int_n",
                    Integer,
                    DefaultClause("0"),
                    primary_key=True,
                    autoincrement=False,
                ),
                Column(
                    "int_n2",
                    Integer,
                    DefaultClause("0"),
                    primary_key=True,
                    autoincrement=False,
                ),
                mysql_engine="MyISAM",
            )
            Table(
                "ai_5",
                meta,
                Column("int_y", Integer, primary_key=True, autoincrement=True),
                Column(
                    "int_n",
                    Integer,
                    DefaultClause("0"),
                    primary_key=True,
                    autoincrement=False,
                ),
                mysql_engine="MyISAM",
            )
            Table(
                "ai_6",
                meta,
                Column("o1", String(1), DefaultClause("x"), primary_key=True),
                Column("int_y", Integer, primary_key=True, autoincrement=True),
                mysql_engine="MyISAM",
            )
            Table(
                "ai_7",
                meta,
                Column("o1", String(1), DefaultClause("x"), primary_key=True),
                Column("o2", String(1), DefaultClause("x"), primary_key=True),
                Column("int_y", Integer, primary_key=True, autoincrement=True),
                mysql_engine="MyISAM",
            )
            Table(
                "ai_8",
                meta,
                Column("o1", String(1), DefaultClause("x"), primary_key=True),
                Column("o2", String(1), DefaultClause("x"), primary_key=True),
                mysql_engine="MyISAM",
            )
            meta.create_all()

            table_names = [
                "ai_1",
                "ai_2",
                "ai_3",
                "ai_4",
                "ai_5",
                "ai_6",
                "ai_7",
                "ai_8",
            ]
            mr = MetaData(testing.db)
            mr.reflect(only=table_names)

            for tbl in [mr.tables[name] for name in table_names]:
                for c in tbl.c:
                    if c.name.startswith("int_y"):
                        assert c.autoincrement
                    elif c.name.startswith("int_n"):
                        assert not c.autoincrement
                tbl.insert().execute()
                if "int_y" in tbl.c:
                    assert select([tbl.c.int_y]).scalar() == 1
                    assert list(tbl.select().execute().first()).count(1) == 1
                else:
                    assert 1 not in list(tbl.select().execute().first())
        finally:
            meta.drop_all()
示例#7
0
 def go():
     s = select([users]).select_from(users.join(addresses))
     s._froms
    def test_nesting_with_functions(self):
        Stat, Foo, stats, foo, Data, datas = (
            self.classes.Stat,
            self.classes.Foo,
            self.tables.stats,
            self.tables.foo,
            self.classes.Data,
            self.tables.datas,
        )

        mapper(Data, datas)
        mapper(
            Foo,
            foo,
            properties={
                "data": relationship(Data,
                                     backref=backref("foo", uselist=False))
            },
        )

        mapper(Stat, stats, properties={"data": relationship(Data)})

        session = create_session()

        data = [Data(a=x) for x in range(5)]
        session.add_all(data)

        session.add_all((
            Stat(data=data[0], somedata=1),
            Stat(data=data[1], somedata=2),
            Stat(data=data[2], somedata=3),
            Stat(data=data[3], somedata=4),
            Stat(data=data[4], somedata=5),
            Stat(data=data[0], somedata=6),
            Stat(data=data[1], somedata=7),
            Stat(data=data[2], somedata=8),
            Stat(data=data[3], somedata=9),
            Stat(data=data[4], somedata=10),
        ))
        session.flush()

        arb_data = sa.select(
            [stats.c.data_id,
             sa.func.max(stats.c.somedata).label("max")],
            stats.c.data_id <= 5,
            group_by=[stats.c.data_id],
        )

        arb_result = arb_data.execute().fetchall()

        # order the result list descending based on 'max'
        arb_result.sort(key=lambda a: a["max"], reverse=True)

        # extract just the "data_id" from it
        arb_result = [row["data_id"] for row in arb_result]

        arb_data = arb_data.alias("arb")

        # now query for Data objects using that above select, adding the
        # "order by max desc" separately
        q = (session.query(Data).options(sa.orm.joinedload("foo")).select_from(
            datas.join(arb_data, arb_data.c.data_id == datas.c.id)).order_by(
                sa.desc(arb_data.c.max)).limit(10))

        # extract "data_id" from the list of result objects
        verify_result = [d.id for d in q]

        eq_(verify_result, arb_result)
    def test_anon_scalar_subqueries(self):
        s1 = select([1]).as_scalar()
        s2 = select([2]).as_scalar()

        s = select([s1, s2]).apply_labels()
        self._test(s, self._anon_scalar_subqueries)
示例#10
0
    def test_insert(self, connection):
        t = self.tables.default_test

        r = connection.execute(t.insert())
        assert r.lastrow_has_defaults()
        eq_(
            set(r.context.postfetch_cols),
            set([t.c.col3, t.c.col5, t.c.col4, t.c.col6]),
        )

        r = connection.execute(t.insert(inline=True))
        assert r.lastrow_has_defaults()
        eq_(
            set(r.context.postfetch_cols),
            set([t.c.col3, t.c.col5, t.c.col4, t.c.col6]),
        )

        connection.execute(t.insert())

        ctexec = connection.execute(sa.select([self.currenttime.label("now")
                                               ])).scalar()
        result = connection.execute(t.select().order_by(t.c.col1))
        today = datetime.date.today()
        eq_(
            result.fetchall(),
            [(
                x,
                "imthedefault",
                self.f,
                self.ts,
                self.ts,
                ctexec,
                True,
                False,
                12,
                today,
                "py",
                "hi",
                "BINDfoo",
            ) for x in range(51, 54)],
        )

        connection.execute(t.insert(), dict(col9=None))

        # TODO: why are we looking at 'r' when we just executed something
        # else ?
        assert r.lastrow_has_defaults()

        eq_(
            set(r.context.postfetch_cols),
            set([t.c.col3, t.c.col5, t.c.col4, t.c.col6]),
        )

        eq_(
            list(connection.execute(t.select().where(t.c.col1 == 54))),
            [(
                54,
                "imthedefault",
                self.f,
                self.ts,
                self.ts,
                ctexec,
                True,
                False,
                12,
                today,
                None,
                "hi",
                "BINDfoo",
            )],
        )
示例#11
0
 def mydefault_using_connection(ctx):
     conn = ctx.connection
     return conn.execute(sa.select([sa.text("12")])).scalar()
示例#12
0
 def myupdate_with_ctx(ctx):
     conn = ctx.connection
     return conn.execute(sa.select([sa.text("13")])).scalar()
示例#13
0
    def define_tables(cls, metadata):
        default_generator = cls.default_generator = {"x": 50}

        def mydefault():
            default_generator["x"] += 1
            return default_generator["x"]

        def myupdate_with_ctx(ctx):
            conn = ctx.connection
            return conn.execute(sa.select([sa.text("13")])).scalar()

        def mydefault_using_connection(ctx):
            conn = ctx.connection
            return conn.execute(sa.select([sa.text("12")])).scalar()

        use_function_defaults = testing.against("postgresql", "mssql")
        is_oracle = testing.against("oracle")

        class MyClass(object):
            @classmethod
            def gen_default(cls, ctx):
                return "hi"

        class MyType(TypeDecorator):
            impl = String(50)

            def process_bind_param(self, value, dialect):
                if value is not None:
                    value = "BIND" + value
                return value

        cls.f = 6
        cls.f2 = 11
        with testing.db.connect() as conn:
            currenttime = cls.currenttime = func.current_date(type_=sa.Date)
            if is_oracle:
                ts = conn.scalar(
                    sa.select([
                        func.trunc(
                            func.current_timestamp(),
                            sa.literal_column("'DAY'"),
                            type_=sa.Date,
                        )
                    ]))
                currenttime = cls.currenttime = func.trunc(
                    currenttime, sa.literal_column("'DAY'"), type_=sa.Date)
                def1 = currenttime
                def2 = func.trunc(
                    sa.text("current_timestamp"),
                    sa.literal_column("'DAY'"),
                    type_=sa.Date,
                )

                deftype = sa.Date
            elif use_function_defaults:
                def1 = currenttime
                deftype = sa.Date
                if testing.against("mssql"):
                    def2 = sa.text("getdate()")
                else:
                    def2 = sa.text("current_date")
                ts = conn.scalar(func.current_date())
            else:
                def1 = def2 = "3"
                ts = 3
                deftype = Integer

            cls.ts = ts

        Table(
            "default_test",
            metadata,
            # python function
            Column("col1", Integer, primary_key=True, default=mydefault),
            # python literal
            Column(
                "col2",
                String(20),
                default="imthedefault",
                onupdate="im the update",
            ),
            # preexecute expression
            Column(
                "col3",
                Integer,
                default=func.length("abcdef"),
                onupdate=func.length("abcdefghijk"),
            ),
            # SQL-side default from sql expression
            Column("col4", deftype, server_default=def1),
            # SQL-side default from literal expression
            Column("col5", deftype, server_default=def2),
            # preexecute + update timestamp
            Column("col6", sa.Date, default=currenttime, onupdate=currenttime),
            Column("boolcol1", sa.Boolean, default=True),
            Column("boolcol2", sa.Boolean, default=False),
            # python function which uses ExecutionContext
            Column(
                "col7",
                Integer,
                default=mydefault_using_connection,
                onupdate=myupdate_with_ctx,
            ),
            # python builtin
            Column(
                "col8",
                sa.Date,
                default=datetime.date.today,
                onupdate=datetime.date.today,
            ),
            # combo
            Column("col9", String(20), default="py", server_default="ddl"),
            # python method w/ context
            Column("col10", String(20), default=MyClass.gen_default),
            # fixed default w/ type that has bound processor
            Column("col11", MyType(), default="foo"),
        )