def test_wrap_unicode_arg(self): # this is not supported by the API but oslo_db is doing it orig = sa_exceptions.DBAPIError(False, False, False) orig.args = [u("méil")] eq_( compat.text_type(orig), compat.u("méil\n(Background on this error at: " "http://sqlalche.me/e/%s/dbapi)" % sa_exceptions._version_token), ) eq_(orig.args, (u("méil"), ))
def test_quoted_column_non_unicode(self): metadata = self.metadata table = Table( "atable", metadata, Column("_underscorecolumn", Unicode(255), primary_key=True), ) metadata.create_all() table.insert().execute({"_underscorecolumn": u("’é")}) result = testing.db.execute(table.select().where( table.c._underscorecolumn == u("’é"))).scalar() eq_(result, u("’é"))
def test_insert(self): t1.insert().execute({u("méil"): 1, ue("\u6e2c\u8a66"): 5}) t2.insert().execute({u("a"): 1, u("b"): 1}) t3.insert().execute({ ue("\u6e2c\u8a66_id"): 1, ue("unitable1_\u6e2c\u8a66"): 5, u("Unitéble2_b"): 1, ue("\u6e2c\u8a66_self"): 1, }) assert t1.select().execute().fetchall() == [(1, 5)] assert t2.select().execute().fetchall() == [(1, 1)] assert t3.select().execute().fetchall() == [(1, 5, 1, 1)]
def test_reflect_nvarchar(self): metadata = self.metadata Table( "tnv", metadata, Column("nv_data", sqltypes.NVARCHAR(255)), Column("c_data", sqltypes.NCHAR(20)), ) metadata.create_all() m2 = MetaData(testing.db) t2 = Table("tnv", m2, autoload=True) assert isinstance(t2.c.nv_data.type, sqltypes.NVARCHAR) assert isinstance(t2.c.c_data.type, sqltypes.NCHAR) if testing.against("oracle+cx_oracle"): assert isinstance( t2.c.nv_data.type.dialect_impl(testing.db.dialect), cx_oracle._OracleUnicodeStringNCHAR, ) assert isinstance( t2.c.c_data.type.dialect_impl(testing.db.dialect), cx_oracle._OracleNChar, ) data = u("m’a réveillé.") with testing.db.connect() as conn: conn.execute(t2.insert(), dict(nv_data=data, c_data=data)) nv_data, c_data = conn.execute(t2.select()).first() eq_(nv_data, data) eq_(c_data, data + (" " * 7)) # char is space padded assert isinstance(nv_data, util.text_type) assert isinstance(c_data, util.text_type)
def test_string_text_explicit_literal_binds(self): # the literal experssion here coerces the right side to # Unicode on Python 3 for plain string, test with unicode # string just to confirm literal is doing this self.assert_compile( column("x", String()) == literal(util.u("foo")), "x = N'foo'", literal_binds=True, )
def setup(self): metadata.create_all() t.insert().execute([ dict(("field%d" % fnum, u("value%d" % fnum)) for fnum in range(NUM_FIELDS)) for r_num in range(NUM_RECORDS) ]) t2.insert().execute([ dict(("field%d" % fnum, u("value%d" % fnum)) for fnum in range(NUM_FIELDS)) for r_num in range(NUM_RECORDS) ]) # warm up type caches t.select().execute().fetchall() t2.select().execute().fetchall() testing.db.execute( "SELECT %s FROM table1" % (", ".join("field%d" % fnum for fnum in range(NUM_FIELDS)))).fetchall() testing.db.execute( "SELECT %s FROM table2" % (", ".join("field%d" % fnum for fnum in range(NUM_FIELDS)))).fetchall()
def _bug_88718_96365_casing_1(self): fkeys_casing_1 = [ { "name": "FK_PlaylistTTrackId", "constrained_columns": ["TTrackID"], "referred_schema": "Test_Schema", "referred_table": "Track", "referred_columns": ["trackid"], "options": {}, }, { "name": "FK_PlaylistTrackId", "constrained_columns": ["TrackID"], "referred_schema": None, "referred_table": "Track", "referred_columns": ["trackid"], "options": {}, }, ] ischema_casing_1 = [ (util.u("Test"), util.u("Track"), "TrackID"), (util.u("Test_Schema"), util.u("Track"), "TrackID"), ] return fkeys_casing_1, ischema_casing_1
def test_reflect(self): t1.insert().execute({u("méil"): 2, ue("\u6e2c\u8a66"): 7}) t2.insert().execute({u("a"): 2, u("b"): 2}) t3.insert().execute({ ue("\u6e2c\u8a66_id"): 2, ue("unitable1_\u6e2c\u8a66"): 7, u("Unitéble2_b"): 2, ue("\u6e2c\u8a66_self"): 2, }) meta = MetaData(testing.db) tt1 = Table(t1.name, meta, autoload=True) tt2 = Table(t2.name, meta, autoload=True) tt3 = Table(t3.name, meta, autoload=True) tt1.insert().execute({u("méil"): 1, ue("\u6e2c\u8a66"): 5}) tt2.insert().execute({u("méil"): 1, ue("\u6e2c\u8a66"): 1}) tt3.insert().execute({ ue("\u6e2c\u8a66_id"): 1, ue("unitable1_\u6e2c\u8a66"): 5, u("Unitéble2_b"): 1, ue("\u6e2c\u8a66_self"): 1, }) self.assert_( tt1.select( order_by=desc(u("méil"))).execute().fetchall() == [(2, 7), (1, 5)]) self.assert_( tt2.select( order_by=desc(u("méil"))).execute().fetchall() == [(2, 2), (1, 1)]) self.assert_( tt3.select(order_by=desc(ue( "\u6e2c\u8a66_id"))).execute().fetchall() == [(2, 7, 2, 2), (1, 5, 1, 1)])
class ConvertUnicodeDeprecationTest(fixtures.TestBase): __backend__ = True data = util.u("Alors vous imaginez ma surprise, au lever du jour, quand " "une drôle de petite voix m’a réveillé. " "Elle disait: « S’il vous plaît… dessine-moi un mouton! »") def test_unicode_warnings_dialectlevel(self): unicodedata = self.data with testing.expect_deprecated( "The create_engine.convert_unicode parameter and " "corresponding dialect-level"): dialect = default.DefaultDialect(convert_unicode=True) dialect.supports_unicode_binds = False s = String() uni = s.dialect_impl(dialect).bind_processor(dialect) uni(util.b("x")) assert isinstance(uni(unicodedata), util.binary_type) eq_(uni(unicodedata), unicodedata.encode("utf-8")) def test_ignoring_unicode_error(self): """checks String(unicode_error='ignore') is passed to underlying codec.""" unicodedata = self.data with testing.expect_deprecated( "The String.convert_unicode parameter is deprecated and " "will be removed in a future release.", "The String.unicode_errors parameter is deprecated and " "will be removed in a future release.", ): type_ = String(248, convert_unicode="force", unicode_error="ignore") dialect = default.DefaultDialect(encoding="ascii") proc = type_.result_processor(dialect, 10) utfdata = unicodedata.encode("utf8") eq_(proc(utfdata), unicodedata.encode("ascii", "ignore").decode())
def test_quoted_column_unicode(self): metadata = self.metadata table = Table( "atable", metadata, Column(u("méil"), Unicode(255), primary_key=True), ) metadata.create_all() table.insert().execute({u("méil"): u("’é")}) result = testing.db.execute( table.select().where(table.c[u("méil")] == u("’é"))).scalar() eq_(result, u("’é"))
def test_unicode_roundtrip(self): set_table = Table( "t", self.metadata, Column("id", Integer, primary_key=True), Column("data", mysql.SET(u("réveillé"), u("drôle"), u("S’il"))), ) set_table.create() with testing.db.begin() as conn: conn.execute( set_table.insert(), {"data": set([u("réveillé"), u("drôle")])}) row = conn.execute(set_table.select()).first() eq_(row, (1, set([u("réveillé"), u("drôle")])))
def test_reflect_unicode_no_nvarchar(self): metadata = self.metadata Table("tnv", metadata, Column("data", sqltypes.Unicode(255))) metadata.create_all() m2 = MetaData(testing.db) t2 = Table("tnv", m2, autoload=True) assert isinstance(t2.c.data.type, sqltypes.VARCHAR) if testing.against("oracle+cx_oracle"): assert isinstance( t2.c.data.type.dialect_impl(testing.db.dialect), cx_oracle._OracleString, ) data = u("m’a réveillé.") t2.insert().execute(data=data) res = t2.select().execute().first()["data"] eq_(res, data) assert isinstance(res, util.text_type)
def test_col_targeting(self): t1.insert().execute({u("méil"): 1, ue("\u6e2c\u8a66"): 5}) t2.insert().execute({u("a"): 1, u("b"): 1}) t3.insert().execute({ ue("\u6e2c\u8a66_id"): 1, ue("unitable1_\u6e2c\u8a66"): 5, u("Unitéble2_b"): 1, ue("\u6e2c\u8a66_self"): 1, }) row = t1.select().execute().first() eq_(row[t1.c[u("méil")]], 1) eq_(row[t1.c[ue("\u6e2c\u8a66")]], 5) row = t2.select().execute().first() eq_(row[t2.c[u("a")]], 1) eq_(row[t2.c[u("b")]], 1) row = t3.select().execute().first() eq_(row[t3.c[ue("\u6e2c\u8a66_id")]], 1) eq_(row[t3.c[ue("unitable1_\u6e2c\u8a66")]], 5) eq_(row[t3.c[u("Unitéble2_b")]], 1) eq_(row[t3.c[ue("\u6e2c\u8a66_self")]], 1)
def test_charset_collate_table(self): t = Table( "foo", self.metadata, Column("id", Integer), Column("data", UnicodeText), mysql_default_charset="utf8", mysql_collate="utf8_bin", ) t.create() m2 = MetaData(testing.db) t2 = Table("foo", m2, autoload=True) eq_(t2.kwargs["mysql_collate"], "utf8_bin") eq_(t2.kwargs["mysql_default charset"], "utf8") # test [ticket:2906] # in order to test the condition here, need to use # MySQLdb 1.2.3 and also need to pass either use_unicode=1 # or charset=utf8 to the URL. t.insert().execute(id=1, data=u("some text")) assert isinstance(testing.db.scalar(select([t.c.data])), util.text_type)
class SetInputSizesTest(fixtures.TestBase): __only_on__ = "oracle+cx_oracle" __backend__ = True @testing.combinations( (SmallInteger, 25, int, False), (Integer, 25, int, False), (Numeric(10, 8), decimal.Decimal("25.34534"), None, False), (Float(15), 25.34534, None, False), (oracle.BINARY_DOUBLE, 25.34534, "NATIVE_FLOAT", False), (oracle.BINARY_FLOAT, 25.34534, "NATIVE_FLOAT", False), (oracle.DOUBLE_PRECISION, 25.34534, None, False), (Unicode(30), u("test"), "NCHAR", True), (UnicodeText(), u("test"), "NCLOB", True), (Unicode(30), u("test"), None, False), (UnicodeText(), u("test"), "CLOB", False), (String(30), "test", None, False), (CHAR(30), "test", "FIXED_CHAR", False), (NCHAR(30), u("test"), "FIXED_NCHAR", False), (oracle.LONG(), "test", None, False), ) @testing.provide_metadata def test_setinputsizes( self, datatype, value, sis_value_text, set_nchar_flag ): if isinstance(sis_value_text, str): sis_value = getattr(testing.db.dialect.dbapi, sis_value_text) else: sis_value = sis_value_text class TestTypeDec(TypeDecorator): impl = NullType() def load_dialect_impl(self, dialect): if dialect.name == "oracle": return dialect.type_descriptor(datatype) else: return self.impl m = self.metadata # Oracle can have only one column of type LONG so we make three # tables rather than one table w/ three columns t1 = Table("t1", m, Column("foo", datatype)) t2 = Table( "t2", m, Column("foo", NullType().with_variant(datatype, "oracle")) ) t3 = Table("t3", m, Column("foo", TestTypeDec())) m.create_all() class CursorWrapper(object): # cx_oracle cursor can't be modified so we have to # invent a whole wrapping scheme def __init__(self, connection_fairy): self.cursor = connection_fairy.connection.cursor() self.mock = mock.Mock() connection_fairy.info["mock"] = self.mock def setinputsizes(self, *arg, **kw): self.mock.setinputsizes(*arg, **kw) self.cursor.setinputsizes(*arg, **kw) def __getattr__(self, key): return getattr(self.cursor, key) if set_nchar_flag: engine = testing_engine(options={"use_nchar_for_unicode": True}) else: engine = testing.db with engine.connect() as conn: connection_fairy = conn.connection for tab in [t1, t2, t3]: with mock.patch.object( connection_fairy, "cursor", lambda: CursorWrapper(connection_fairy), ): conn.execute(tab.insert(), {"foo": value}) if sis_value: eq_( conn.info["mock"].mock_calls, [mock.call.setinputsizes(foo=sis_value)], ) else: eq_( conn.info["mock"].mock_calls, [mock.call.setinputsizes()], ) def test_event_no_native_float(self): def _remove_type(inputsizes, cursor, statement, parameters, context): for param, dbapitype in list(inputsizes.items()): if dbapitype is testing.db.dialect.dbapi.NATIVE_FLOAT: del inputsizes[param] event.listen(testing.db, "do_setinputsizes", _remove_type) try: self.test_setinputsizes(oracle.BINARY_FLOAT, 25.34534, None, False) finally: event.remove(testing.db, "do_setinputsizes", _remove_type)
def setup_class(cls): global metadata, t1, t2, t3 metadata = MetaData(testing.db) t1 = Table( u("unitable1"), metadata, Column(u("méil"), Integer, primary_key=True), Column(ue("\u6e2c\u8a66"), Integer), test_needs_fk=True, ) t2 = Table( u("Unitéble2"), metadata, Column(u("méil"), Integer, primary_key=True, key="a"), Column( ue("\u6e2c\u8a66"), Integer, ForeignKey(u("unitable1.méil")), key="b", ), test_needs_fk=True, ) # Few DBs support Unicode foreign keys if testing.against("sqlite"): t3 = Table( ue("\u6e2c\u8a66"), metadata, Column( ue("\u6e2c\u8a66_id"), Integer, primary_key=True, autoincrement=False, ), Column( ue("unitable1_\u6e2c\u8a66"), Integer, ForeignKey(ue("unitable1.\u6e2c\u8a66")), ), Column(u("Unitéble2_b"), Integer, ForeignKey(u("Unitéble2.b"))), Column( ue("\u6e2c\u8a66_self"), Integer, ForeignKey(ue("\u6e2c\u8a66.\u6e2c\u8a66_id")), ), test_needs_fk=True, ) else: t3 = Table( ue("\u6e2c\u8a66"), metadata, Column( ue("\u6e2c\u8a66_id"), Integer, primary_key=True, autoincrement=False, ), Column(ue("unitable1_\u6e2c\u8a66"), Integer), Column(u("Unitéble2_b"), Integer), Column(ue("\u6e2c\u8a66_self"), Integer), test_needs_fk=True, ) metadata.create_all()
def test_unicode_default(self): default = u("foo") Column(Unicode(32), default=default)
def test_string_text_literal_binds_explicit_unicode_right(self): self.assert_compile( column("x", String()) == util.u("foo"), "x = 'foo'", literal_binds=True, )
def test_unicode_enum(self): metadata = self.metadata t1 = Table( "table", metadata, Column("id", Integer, primary_key=True), Column("value", Enum(u("réveillé"), u("drôle"), u("S’il"))), Column("value2", mysql.ENUM(u("réveillé"), u("drôle"), u("S’il"))), ) metadata.create_all() t1.insert().execute(value=u("drôle"), value2=u("drôle")) t1.insert().execute(value=u("réveillé"), value2=u("réveillé")) t1.insert().execute(value=u("S’il"), value2=u("S’il")) eq_( t1.select().order_by(t1.c.id).execute().fetchall(), [ (1, u("drôle"), u("drôle")), (2, u("réveillé"), u("réveillé")), (3, u("S’il"), u("S’il")), ], ) # test reflection of the enum labels m2 = MetaData(testing.db) t2 = Table("table", m2, autoload=True) # TODO: what's wrong with the last element ? is there # latin-1 stuff forcing its way in ? eq_(t2.c.value.type.enums[0:2], [u("réveillé"), u("drôle")]) # u'S’il') # eh ? eq_(t2.c.value2.type.enums[0:2], [u("réveillé"), u("drôle")]) # u'S’il') # eh ?