def _exotic_targets_fixture(self, conn): users = self.tables.users_xtra conn.execute( insert(users), dict( id=1, name="name1", login_email="*****@*****.**", lets_index_this="not", ), ) conn.execute( users.insert(), dict( id=2, name="name2", login_email="*****@*****.**", lets_index_this="not", ), ) eq_( conn.execute(users.select().where(users.c.id == 1)).fetchall(), [(1, "name1", "*****@*****.**", "not")], )
def test_on_conflict_do_nothing_target(self): users = self.tables.users with testing.db.connect() as conn: result = conn.execute( insert(users).on_conflict_do_nothing( index_elements=users.primary_key.columns ), dict(id=1, name="name1"), ) eq_(result.inserted_primary_key, [1]) eq_(result.returned_defaults, None) result = conn.execute( insert(users).on_conflict_do_nothing( index_elements=users.primary_key.columns ), dict(id=1, name="name2"), ) eq_(result.inserted_primary_key, [1]) eq_(result.returned_defaults, None) eq_( conn.execute(users.select().where(users.c.id == 1)).fetchall(), [(1, "name1")], )
def test_psycopg2_nonempty_connection_string_w_query_two(self): dialect = psycopg2_dialect.dialect() url_string = "postgresql://*****:*****@/DB?host=hostA" u = url.make_url(url_string) cargs, cparams = dialect.create_connect_args(u) eq_(cargs, []) eq_(cparams["host"], "hostA")
def test_on_conflict_do_nothing_connectionless(self): users = self.tables.users_xtra with testing.db.connect() as conn: result = conn.execute( insert(users).on_conflict_do_nothing( constraint="uq_login_email" ), dict(name="name1", login_email="email1"), ) eq_(result.inserted_primary_key, [1]) eq_(result.returned_defaults, (1,)) result = testing.db.execute( insert(users).on_conflict_do_nothing(constraint="uq_login_email"), dict(name="name2", login_email="email1"), ) eq_(result.inserted_primary_key, None) eq_(result.returned_defaults, None) eq_( testing.db.execute( users.select().where(users.c.id == 1) ).fetchall(), [(1, "name1", "email1", None)], )
def test_on_duplicate_key_update_preserve_order(self): foos = self.tables.foos with testing.db.connect() as conn: conn.execute( insert( foos, [ dict(id=1, bar="b", baz="bz"), dict(id=2, bar="b", baz="bz2"), ], )) stmt = insert(foos) update_condition = foos.c.updated_once == False # The following statements show importance of the columns update # ordering as old values being referenced in UPDATE clause are # getting replaced one by one from left to right with their new # values. stmt1 = stmt.on_duplicate_key_update([ ( "bar", func.if_( update_condition, func.values(foos.c.bar), foos.c.bar, ), ), ( "updated_once", func.if_(update_condition, True, foos.c.updated_once), ), ]) stmt2 = stmt.on_duplicate_key_update([ ( "updated_once", func.if_(update_condition, True, foos.c.updated_once), ), ( "bar", func.if_( update_condition, func.values(foos.c.bar), foos.c.bar, ), ), ]) # First statement should succeed updating column bar conn.execute(stmt1, dict(id=1, bar="ab")) eq_( conn.execute(foos.select().where(foos.c.id == 1)).fetchall(), [(1, "ab", "bz", True)], ) # Second statement will do noop update of column bar conn.execute(stmt2, dict(id=2, bar="ab")) eq_( conn.execute(foos.select().where(foos.c.id == 2)).fetchall(), [(2, "b", "bz2", True)], )
def test_psycopg2_nonempty_connection_string_w_query_three(self): dialect = psycopg2_dialect.dialect() url_string = ( "postgresql://*****:*****@/DB" "?host=hostA:portA&host=hostB&host=hostC" ) u = url.make_url(url_string) cargs, cparams = dialect.create_connect_args(u) eq_(cargs, []) eq_(cparams["host"], "hostA:portA,hostB,hostC")
def test_serial_integer(self): class BITD(TypeDecorator): impl = Integer def load_dialect_impl(self, dialect): if dialect.name == "postgresql": return BigInteger() else: return Integer() for version, type_, expected in [ (None, Integer, "SERIAL"), (None, BigInteger, "BIGSERIAL"), ((9, 1), SmallInteger, "SMALLINT"), ((9, 2), SmallInteger, "SMALLSERIAL"), (None, postgresql.INTEGER, "SERIAL"), (None, postgresql.BIGINT, "BIGSERIAL"), ( None, Integer().with_variant(BigInteger(), "postgresql"), "BIGSERIAL", ), ( None, Integer().with_variant(postgresql.BIGINT, "postgresql"), "BIGSERIAL", ), ( (9, 2), Integer().with_variant(SmallInteger, "postgresql"), "SMALLSERIAL", ), (None, BITD(), "BIGSERIAL"), ]: m = MetaData() t = Table("t", m, Column("c", type_, primary_key=True)) if version: dialect = postgresql.dialect() dialect._get_server_version_info = mock.Mock( return_value=version ) dialect.initialize(testing.db.connect()) else: dialect = testing.db.dialect ddl_compiler = dialect.ddl_compiler(dialect, schema.CreateTable(t)) eq_( ddl_compiler.get_column_specification(t.c.c), "c %s NOT NULL" % expected, )
def test_on_duplicate_key_update_null(self): foos = self.tables.foos with testing.db.connect() as conn: conn.execute(insert(foos, dict(id=1, bar="b", baz="bz"))) stmt = insert(foos).values( [dict(id=1, bar="ab"), dict(id=2, bar="b")]) stmt = stmt.on_duplicate_key_update(updated_once=None) result = conn.execute(stmt) eq_(result.inserted_primary_key, [2]) eq_( conn.execute(foos.select().where(foos.c.id == 1)).fetchall(), [(1, "b", "bz", None)], )
def test_last_inserted_id(self): foos = self.tables.foos with testing.db.connect() as conn: stmt = insert(foos).values({"bar": "b", "baz": "bz"}) result = conn.execute( stmt.on_duplicate_key_update(bar=stmt.inserted.bar, baz="newbz")) eq_(result.inserted_primary_key, [1]) stmt = insert(foos).values({"id": 1, "bar": "b", "baz": "bz"}) result = conn.execute( stmt.on_duplicate_key_update(bar=stmt.inserted.bar, baz="newbz")) eq_(result.inserted_primary_key, [1])
def test_extract(self): fivedaysago = testing.db.scalar( select([func.now()]) ) - datetime.timedelta(days=5) for field, exp in ( ("year", fivedaysago.year), ("month", fivedaysago.month), ("day", fivedaysago.day), ): r = testing.db.execute( select( [extract(field, func.now() + datetime.timedelta(days=-5))] ) ).scalar() eq_(r, exp)
def test_insert(self): from psycopg2 import extras if self.engine.dialect.executemany_mode is EXECUTEMANY_BATCH: meth = extras.execute_batch stmt = "INSERT INTO data (x, y) VALUES (%(x)s, %(y)s)" expected_kwargs = {} else: meth = extras.execute_values stmt = "INSERT INTO data (x, y) VALUES %s" expected_kwargs = {"template": "(%(x)s, %(y)s)"} with mock.patch.object( extras, meth.__name__, side_effect=meth ) as mock_exec: with self.engine.connect() as conn: conn.execute( self.tables.data.insert(), [ {"x": "x1", "y": "y1"}, {"x": "x2", "y": "y2"}, {"x": "x3", "y": "y3"}, ], ) eq_( conn.execute(select([self.tables.data])).fetchall(), [ (1, "x1", "y1", 5), (2, "x2", "y2", 5), (3, "x3", "y3", 5), ], ) eq_( mock_exec.mock_calls, [ mock.call( mock.ANY, stmt, ( {"x": "x1", "y": "y1"}, {"x": "x2", "y": "y2"}, {"x": "x3", "y": "y3"}, ), **expected_kwargs ) ], )
def test_pg_dialect_use_native_unicode_from_config(self): config = { "sqlalchemy_1_3.url": testing.db.url, "sqlalchemy_1_3.use_native_unicode": "false", } e = engine_from_config(config, _initialize=False) eq_(e.dialect.use_native_unicode, False) config = { "sqlalchemy_1_3.url": testing.db.url, "sqlalchemy_1_3.use_native_unicode": "true", } e = engine_from_config(config, _initialize=False) eq_(e.dialect.use_native_unicode, True)
def test_client_encoding(self): c = testing.db.connect() current_encoding = c.execute("show client_encoding").fetchone()[0] c.close() # attempt to use an encoding that's not # already set if current_encoding == "UTF8": test_encoding = "LATIN1" else: test_encoding = "UTF8" e = engines.testing_engine(options={"client_encoding": test_encoding}) c = e.connect() new_encoding = c.execute("show client_encoding").fetchone()[0] eq_(new_encoding, test_encoding)
def test_insert_w_newlines(self): from psycopg2 import extras t = self.tables.data ins = t.insert(inline=True).values( id=bindparam("id"), x=select([literal_column("5")]).select_from(self.tables.data), y=bindparam("y"), z=bindparam("z"), ) # compiled SQL has a newline in it eq_( str(ins.compile(testing.db)), "INSERT INTO data (id, x, y, z) VALUES (%(id)s, " "(SELECT 5 \nFROM data), %(y)s, %(z)s)", ) meth = extras.execute_values with mock.patch.object( extras, "execute_values", side_effect=meth ) as mock_exec: with self.engine.connect() as conn: conn.execute( ins, [ {"id": 1, "y": "y1", "z": 1}, {"id": 2, "y": "y2", "z": 2}, {"id": 3, "y": "y3", "z": 3}, ], ) eq_( mock_exec.mock_calls, [ mock.call( mock.ANY, "INSERT INTO data (id, x, y, z) VALUES %s", ( {"id": 1, "y": "y1", "z": 1}, {"id": 2, "y": "y2", "z": 2}, {"id": 3, "y": "y3", "z": 3}, ), template="(%(id)s, (SELECT 5 \nFROM data), %(y)s, %(z)s)", ) ], )
def test_insert_page_size(self): from psycopg2 import extras opts = self.options.copy() opts["executemany_batch_page_size"] = 500 opts["executemany_values_page_size"] = 1000 with self.expect_deprecated_opts(): eng = engines.testing_engine(options=opts) if eng.dialect.executemany_mode is EXECUTEMANY_BATCH: meth = extras.execute_batch stmt = "INSERT INTO data (x, y) VALUES (%(x)s, %(y)s)" expected_kwargs = {"page_size": 500} else: meth = extras.execute_values stmt = "INSERT INTO data (x, y) VALUES %s" expected_kwargs = {"page_size": 1000, "template": "(%(x)s, %(y)s)"} with mock.patch.object( extras, meth.__name__, side_effect=meth ) as mock_exec: with eng.connect() as conn: conn.execute( self.tables.data.insert(), [ {"x": "x1", "y": "y1"}, {"x": "x2", "y": "y2"}, {"x": "x3", "y": "y3"}, ], ) eq_( mock_exec.mock_calls, [ mock.call( mock.ANY, stmt, ( {"x": "x1", "y": "y1"}, {"x": "x2", "y": "y2"}, {"x": "x3", "y": "y3"}, ), **expected_kwargs ) ], )
def test_quoted_name_bindparam_ok(self): from sqlalchemy_1_3.sql.elements import quoted_name with testing.db.connect() as conn: eq_( conn.scalar( select( [ cast( literal(quoted_name("some_name", False)), String, ) ] ) ), "some_name", )
def test_version_parsing(self): def mock_conn(res): return mock.Mock( execute=mock.Mock( return_value=mock.Mock(scalar=mock.Mock(return_value=res)) ) ) dialect = postgresql.dialect() for string, version in [ ( "PostgreSQL 8.3.8 on i686-redhat-linux-gnu, compiled by " "GCC gcc (GCC) 4.1.2 20070925 (Red Hat 4.1.2-33)", (8, 3, 8), ), ( "PostgreSQL 8.5devel on x86_64-unknown-linux-gnu, " "compiled by GCC gcc (GCC) 4.4.2, 64-bit", (8, 5), ), ( "EnterpriseDB 9.1.2.2 on x86_64-unknown-linux-gnu, " "compiled by gcc (GCC) 4.1.2 20080704 (Red Hat 4.1.2-50), " "64-bit", (9, 1, 2), ), ( "[PostgreSQL 9.2.4 ] VMware vFabric Postgres 9.2.4.0 " "release build 1080137", (9, 2, 4), ), ( "PostgreSQL 10devel on x86_64-pc-linux-gnu" "compiled by gcc (GCC) 6.3.1 20170306, 64-bit", (10,), ), ( "PostgreSQL 10beta1 on x86_64-pc-linux-gnu, " "compiled by gcc (GCC) 4.8.5 20150623 " "(Red Hat 4.8.5-11), 64-bit", (10,), ), ]: eq_(dialect._get_server_version_info(mock_conn(string)), version)
def test_on_conflict_do_update_exotic_targets_four_no_pk(self): users = self.tables.users_xtra with testing.db.connect() as conn: self._exotic_targets_fixture(conn) # try unique constraint by name: cause an # upsert on target login_email, not id i = insert(users) i = i.on_conflict_do_update( index_elements=[users.c.login_email], set_=dict( id=i.excluded.id, name=i.excluded.name, login_email=i.excluded.login_email, ), ) result = conn.execute( i, dict(name="name3", login_email="*****@*****.**") ) eq_(result.inserted_primary_key, [1]) eq_(result.returned_defaults, (1,)) eq_( conn.execute(users.select().order_by(users.c.id)).fetchall(), [ (1, "name3", "*****@*****.**", "not"), (2, "name2", "*****@*****.**", "not"), ], )
def test_on_conflict_do_update_exotic_targets_six(self): users = self.tables.users_xtra with testing.db.connect() as conn: conn.execute( insert(users), dict( id=1, name="name1", login_email="*****@*****.**", lets_index_this="unique_name", ), ) i = insert(users) i = i.on_conflict_do_update( index_elements=self.unique_partial_index.columns, index_where=self.unique_partial_index.dialect_options[ "postgresql" ]["where"], set_=dict( name=i.excluded.name, login_email=i.excluded.login_email ), ) conn.execute( i, [ dict( name="name1", login_email="*****@*****.**", lets_index_this="unique_name", ) ], ) eq_( conn.execute(users.select()).fetchall(), [(1, "name1", "*****@*****.**", "unique_name")], )
def test_on_conflict_do_update_multivalues(self): users = self.tables.users with testing.db.connect() as conn: conn.execute(users.insert(), dict(id=1, name="name1")) conn.execute(users.insert(), dict(id=2, name="name2")) i = insert(users) i = i.on_conflict_do_update( index_elements=users.primary_key.columns, set_=dict(name="updated"), where=(i.excluded.name != "name12"), ).values( [ dict(id=1, name="name11"), dict(id=2, name="name12"), dict(id=3, name="name13"), dict(id=4, name="name14"), ] ) result = conn.execute(i) eq_(result.inserted_primary_key, [None]) eq_(result.returned_defaults, None) eq_( conn.execute(users.select().order_by(users.c.id)).fetchall(), [(1, "updated"), (2, "name2"), (3, "name13"), (4, "name14")], )
def test_on_conflict_do_update_no_row_actually_affected(self): users = self.tables.users_xtra with testing.db.connect() as conn: self._exotic_targets_fixture(conn) i = insert(users) i = i.on_conflict_do_update( index_elements=[users.c.login_email], set_=dict(name="new_name"), where=(i.excluded.name == "other_name"), ) result = conn.execute( i, dict(name="name2", login_email="*****@*****.**") ) eq_(result.returned_defaults, None) eq_(result.inserted_primary_key, None) eq_( conn.execute(users.select()).fetchall(), [ (1, "name1", "*****@*****.**", "not"), (2, "name2", "*****@*****.**", "not"), ], )
def test_on_conflict_do_update_exotic_targets_two(self): users = self.tables.users_xtra with testing.db.connect() as conn: self._exotic_targets_fixture(conn) # try primary key constraint: cause an upsert on unique id column i = insert(users) i = i.on_conflict_do_update( index_elements=users.primary_key.columns, set_=dict( name=i.excluded.name, login_email=i.excluded.login_email ), ) result = conn.execute( i, dict( id=1, name="name2", login_email="*****@*****.**", lets_index_this="not", ), ) eq_(result.inserted_primary_key, [1]) eq_(result.returned_defaults, None) eq_( conn.execute(users.select().where(users.c.id == 1)).fetchall(), [(1, "name2", "*****@*****.**", "not")], )
def test_on_conflict_do_update_special_types_in_set(self): bind_targets = self.tables.bind_targets with testing.db.connect() as conn: i = insert(bind_targets) conn.execute(i, {"id": 1, "data": "initial data"}) eq_( conn.scalar(sql.select([bind_targets.c.data])), "initial data processed", ) i = insert(bind_targets) i = i.on_conflict_do_update( index_elements=[bind_targets.c.id], set_=dict(data="new updated data"), ) conn.execute(i, {"id": 1, "data": "new inserted data"}) eq_( conn.scalar(sql.select([bind_targets.c.data])), "new updated data processed", )
def test_update(self): with self.engine.connect() as conn: conn.execute( self.tables.data.insert(), [ {"x": "x1", "y": "y1"}, {"x": "x2", "y": "y2"}, {"x": "x3", "y": "y3"}, ], ) conn.execute( self.tables.data.update() .where(self.tables.data.c.x == bindparam("xval")) .values(y=bindparam("yval")), [{"xval": "x1", "yval": "y5"}, {"xval": "x3", "yval": "y6"}], ) eq_( conn.execute( select([self.tables.data]).order_by(self.tables.data.c.id) ).fetchall(), [(1, "x1", "y5", 5), (2, "x2", "y2", 5), (3, "x3", "y6", 5)], )
def test_update_fallback(self): from psycopg2 import extras eng = self.engine meth = extras.execute_batch stmt = "UPDATE data SET y=%(yval)s WHERE data.x = %(xval)s" expected_kwargs = {} with mock.patch.object( extras, meth.__name__, side_effect=meth ) as mock_exec: with eng.connect() as conn: conn.execute( self.tables.data.update() .where(self.tables.data.c.x == bindparam("xval")) .values(y=bindparam("yval")), [ {"xval": "x1", "yval": "y5"}, {"xval": "x3", "yval": "y6"}, ], ) eq_( mock_exec.mock_calls, [ mock.call( mock.ANY, stmt, ( {"xval": "x1", "yval": "y5"}, {"xval": "x3", "yval": "y6"}, ), **expected_kwargs ) ], )
def test_schema_roundtrips(self): meta = self.metadata users = Table( "users", meta, Column("id", Integer, primary_key=True), Column("name", String(50)), schema="test_schema", ) users.create() users.insert().execute(id=1, name="name1") users.insert().execute(id=2, name="name2") users.insert().execute(id=3, name="name3") users.insert().execute(id=4, name="name4") eq_( users.select().where(users.c.name == "name2").execute().fetchall(), [(2, "name2")], ) eq_( users.select(use_labels=True) .where(users.c.name == "name2") .execute() .fetchall(), [(2, "name2")], ) users.delete().where(users.c.id == 3).execute() eq_( users.select().where(users.c.name == "name3").execute().fetchall(), [], ) users.update().where(users.c.name == "name4").execute(name="newname") eq_( users.select(use_labels=True) .where(users.c.id == 4) .execute() .fetchall(), [(4, "newname")], )
def test_on_conflict_do_update_three(self): users = self.tables.users with testing.db.connect() as conn: conn.execute(users.insert(), dict(id=1, name="name1")) i = insert(users) i = i.on_conflict_do_update( index_elements=users.primary_key.columns, set_=dict(name=i.excluded.name), ) result = conn.execute(i, dict(id=1, name="name3")) eq_(result.inserted_primary_key, [1]) eq_(result.returned_defaults, None) eq_( conn.execute(users.select().where(users.c.id == 1)).fetchall(), [(1, "name3")], )
def test_on_conflict_do_update_five(self): users = self.tables.users with testing.db.connect() as conn: conn.execute(users.insert(), dict(id=1, name="name1")) i = insert(users) i = i.on_conflict_do_update( index_elements=users.primary_key.columns, set_=dict(id=10, name="I'm a name"), ).values(id=1, name="name4") result = conn.execute(i) eq_(result.inserted_primary_key, [1]) eq_(result.returned_defaults, None) eq_( conn.execute( users.select().where(users.c.id == 10) ).fetchall(), [(10, "I'm a name")], )
def test_on_conflict_do_update_exotic_targets_four(self): users = self.tables.users_xtra with testing.db.connect() as conn: self._exotic_targets_fixture(conn) # try unique constraint by name: cause an # upsert on target login_email, not id i = insert(users) i = i.on_conflict_do_update( constraint=self.unique_constraint.name, set_=dict( id=i.excluded.id, name=i.excluded.name, login_email=i.excluded.login_email, ), ) # note: lets_index_this value totally ignored in SET clause. result = conn.execute( i, dict( id=43, name="nameunique2", login_email="*****@*****.**", lets_index_this="unique", ), ) eq_(result.inserted_primary_key, [43]) eq_(result.returned_defaults, None) eq_( conn.execute( users.select().where( users.c.login_email == "*****@*****.**" ) ).fetchall(), [(43, "nameunique2", "*****@*****.**", "not")], )
def test_insert_modified_by_event(self): from psycopg2 import extras t = self.tables.data ins = t.insert(inline=True).values( id=bindparam("id"), x=select([literal_column("5")]).select_from(self.tables.data), y=bindparam("y"), z=bindparam("z"), ) # compiled SQL has a newline in it eq_( str(ins.compile(testing.db)), "INSERT INTO data (id, x, y, z) VALUES (%(id)s, " "(SELECT 5 \nFROM data), %(y)s, %(z)s)", ) meth = extras.execute_batch with mock.patch.object( extras, "execute_values" ) as mock_values, mock.patch.object( extras, "execute_batch", side_effect=meth ) as mock_batch: with self.engine.connect() as conn: # create an event hook that will change the statement to # something else, meaning the dialect has to detect that # insert_single_values_expr is no longer useful @event.listens_for(conn, "before_cursor_execute", retval=True) def before_cursor_execute( conn, cursor, statement, parameters, context, executemany ): statement = ( "INSERT INTO data (id, y, z) VALUES " "(%(id)s, %(y)s, %(z)s)" ) return statement, parameters conn.execute( ins, [ {"id": 1, "y": "y1", "z": 1}, {"id": 2, "y": "y2", "z": 2}, {"id": 3, "y": "y3", "z": 3}, ], ) eq_(mock_values.mock_calls, []) eq_( mock_batch.mock_calls, [ mock.call( mock.ANY, "INSERT INTO data (id, y, z) VALUES " "(%(id)s, %(y)s, %(z)s)", ( {"id": 1, "y": "y1", "z": 1}, {"id": 2, "y": "y2", "z": 2}, {"id": 3, "y": "y3", "z": 3}, ), ) ], )