def test_boolean(self): "Exercise type specification for boolean type." columns = [ # column type, args, kwargs, expected ddl (Boolean, [], {}, "BIT") ] metadata = MetaData() table_args = ["test_mssql_boolean", metadata] for index, spec in enumerate(columns): type_, args, kw, res = spec table_args.append( Column("c%s" % index, type_(*args, **kw), nullable=None)) boolean_table = Table(*table_args) dialect = mssql.dialect() gen = dialect.ddl_compiler(dialect, schema.CreateTable(boolean_table)) for col in boolean_table.c: index = int(col.name[1:]) testing.eq_( gen.get_column_specification(col), "%s %s" % (col.name, columns[index][3]), ) self.assert_(repr(col))
def test_binary(self): "Exercise type specification for binary types." columns = [ # column type, args, kwargs, expected ddl (mssql.MSBinary, [], {}, "BINARY"), (mssql.MSBinary, [10], {}, "BINARY(10)"), (types.BINARY, [], {}, "BINARY"), (types.BINARY, [10], {}, "BINARY(10)"), (mssql.MSVarBinary, [], {}, "VARBINARY(max)"), (mssql.MSVarBinary, [10], {}, "VARBINARY(10)"), (types.VARBINARY, [10], {}, "VARBINARY(10)"), (types.VARBINARY, [], {}, "VARBINARY(max)"), (mssql.MSImage, [], {}, "IMAGE"), (mssql.IMAGE, [], {}, "IMAGE"), (types.LargeBinary, [], {}, "IMAGE"), ] metadata = MetaData() table_args = ["test_mssql_binary", metadata] for index, spec in enumerate(columns): type_, args, kw, res = spec table_args.append( Column("c%s" % index, type_(*args, **kw), nullable=None)) binary_table = Table(*table_args) dialect = mssql.dialect() gen = dialect.ddl_compiler(dialect, schema.CreateTable(binary_table)) for col in binary_table.c: index = int(col.name[1:]) testing.eq_( gen.get_column_specification(col), "%s %s" % (col.name, columns[index][3]), ) self.assert_(repr(col))
def _assert_sql(self, element, legacy_sql, modern_sql=None): dialect = self._legacy_dialect() self.assert_compile(element, legacy_sql, dialect=dialect) dialect = mssql.dialect() self.assert_compile(element, modern_sql or "foob", dialect=dialect)
def test_binary_reflection(self, metadata, deprecate_large_types): "Exercise type specification for binary types." columns = [ # column type, args, kwargs, expected ddl from reflected (mssql.MSBinary, [], {}, "BINARY(1)"), (mssql.MSBinary, [10], {}, "BINARY(10)"), (types.BINARY, [], {}, "BINARY(1)"), (types.BINARY, [10], {}, "BINARY(10)"), (mssql.MSVarBinary, [], {}, "VARBINARY(max)"), (mssql.MSVarBinary, [10], {}, "VARBINARY(10)"), (types.VARBINARY, [10], {}, "VARBINARY(10)"), (types.VARBINARY, [], {}, "VARBINARY(max)"), (mssql.MSImage, [], {}, "IMAGE"), (mssql.IMAGE, [], {}, "IMAGE"), ( types.LargeBinary, [], {}, "IMAGE" if not deprecate_large_types else "VARBINARY(max)", ), ] engine = engines.testing_engine( options={"deprecate_large_types": deprecate_large_types} ) with engine.begin() as conn: table_args = ["test_mssql_binary", metadata] for index, spec in enumerate(columns): type_, args, kw, res = spec table_args.append( Column("c%s" % index, type_(*args, **kw), nullable=None) ) binary_table = Table(*table_args) metadata.create_all(conn) reflected_binary = Table( "test_mssql_binary", MetaData(), autoload_with=conn ) for col, spec in zip(reflected_binary.c, columns): eq_( col.type.compile(dialect=mssql.dialect()), spec[3], "column %s %s != %s" % ( col.key, col.type.compile(dialect=conn.dialect), spec[3], ), ) c1 = conn.dialect.type_descriptor(col.type).__class__ c2 = conn.dialect.type_descriptor( binary_table.c[col.name].type ).__class__ assert issubclass( c1, c2 ), "column %s: %r is not a subclass of %r" % (col.key, c1, c2) if binary_table.c[col.name].type.length: testing.eq_( col.type.length, binary_table.c[col.name].type.length )
class StringTest(fixtures.TestBase, AssertsCompiledSQL): __dialect__ = mssql.dialect() def test_unicode_literal_binds(self): self.assert_compile(column("x", Unicode()) == "foo", "x = N'foo'", literal_binds=True) def test_unicode_text_literal_binds(self): self.assert_compile( column("x", UnicodeText()) == "foo", "x = N'foo'", literal_binds=True, ) def test_string_text_literal_binds(self): self.assert_compile(column("x", String()) == "foo", "x = 'foo'", literal_binds=True) def test_string_text_literal_binds_explicit_unicode_right(self): self.assert_compile( column("x", String()) == "foo", "x = 'foo'", literal_binds=True, ) @testing.combinations(None, String(), Unicode(), argnames="coltype") @testing.combinations(None, String(), Unicode(), argnames="literaltype") @testing.combinations("réve🐍 illé", "hello", "réveillé", argnames="value") def test_string_text_explicit_literal_binds(self, coltype, literaltype, value): """test #7551, dynamic coercion for string literals""" lhs = column("x", coltype) rhs = literal(value, type_=literaltype) rhs_force_unicode = isinstance(literaltype, Unicode) rhs_tests_as_unicode = literaltype is None and value != "hello" should_it_be_n = rhs_force_unicode or rhs_tests_as_unicode if should_it_be_n: self.assert_compile( lhs == rhs, f"x = N'{value}'", literal_binds=True, ) else: self.assert_compile( lhs == rhs, f"x = '{value}'", literal_binds=True, ) def test_text_text_literal_binds(self): self.assert_compile(column("x", Text()) == "foo", "x = 'foo'", literal_binds=True)
def test_large_type_deprecation(self): d1 = mssql.dialect(deprecate_large_types=True) d2 = mssql.dialect(deprecate_large_types=False) d3 = mssql.dialect() d3.server_version_info = (11, 0) d3._setup_version_attributes() d4 = mssql.dialect() d4.server_version_info = (10, 0) d4._setup_version_attributes() for dialect in (d1, d3): eq_(str(Text().compile(dialect=dialect)), "VARCHAR(max)") eq_(str(UnicodeText().compile(dialect=dialect)), "NVARCHAR(max)") eq_(str(LargeBinary().compile(dialect=dialect)), "VARBINARY(max)") for dialect in (d2, d4): eq_(str(Text().compile(dialect=dialect)), "TEXT") eq_(str(UnicodeText().compile(dialect=dialect)), "NTEXT") eq_(str(LargeBinary().compile(dialect=dialect)), "IMAGE")
def test_VARBINARY_init(self): d = mssql.dialect() t = mssql.MSVarBinary(length=None, filestream=True) eq_(str(t.compile(dialect=d)), "VARBINARY(max) FILESTREAM") t = mssql.MSVarBinary(length="max", filestream=True) eq_(str(t.compile(dialect=d)), "VARBINARY(max) FILESTREAM") with expect_raises_message( ValueError, "length must be None or 'max' when setting filestream"): mssql.MSVarBinary(length=1000, filestream=True)
def test_other_programming_error_on_rollback(self): """test #8231""" class ProgrammingError(Exception): pass dialect = base.dialect(ignore_no_transaction_on_rollback=True) dialect.dbapi = mock.Mock(ProgrammingError=ProgrammingError) connection = mock.Mock(rollback=mock.Mock( side_effect=ProgrammingError("Some other error happened"))) with expect_raises_message(ProgrammingError, "Some other error happened"): dialect.do_rollback(connection)
def test_ignore_no_transaction_on_rollback(self): """test #8231""" class ProgrammingError(Exception): pass dialect = base.dialect(ignore_no_transaction_on_rollback=True) dialect.dbapi = mock.Mock(ProgrammingError=ProgrammingError) connection = mock.Mock(rollback=mock.Mock( side_effect=ProgrammingError("Error 111214 happened"))) with expect_warnings( "ProgrammingError 111214 'No corresponding transaction found.' " "has been suppressed via ignore_no_transaction_on_rollback=True" ): dialect.do_rollback(connection)
class StringTest(fixtures.TestBase, AssertsCompiledSQL): __dialect__ = mssql.dialect() def test_unicode_literal_binds(self): self.assert_compile(column("x", Unicode()) == "foo", "x = N'foo'", literal_binds=True) def test_unicode_text_literal_binds(self): self.assert_compile( column("x", UnicodeText()) == "foo", "x = N'foo'", literal_binds=True, ) def test_string_text_literal_binds(self): self.assert_compile(column("x", String()) == "foo", "x = 'foo'", literal_binds=True) def test_string_text_literal_binds_explicit_unicode_right(self): self.assert_compile( column("x", String()) == util.u("foo"), "x = 'foo'", literal_binds=True, ) def test_string_text_explicit_literal_binds(self): # the literal experssion here coerces the right side to # Unicode on Python 3 for plain string, test with unicode # string just to confirm literal is doing this self.assert_compile( column("x", String()) == literal(util.u("foo")), "x = N'foo'", literal_binds=True, ) def test_text_text_literal_binds(self): self.assert_compile(column("x", Text()) == "foo", "x = 'foo'", literal_binds=True)
def test_money(self): """Exercise type specification for money types.""" columns = [ (mssql.MSMoney, [], {}, "MONEY"), (mssql.MSSmallMoney, [], {}, "SMALLMONEY"), ] metadata = MetaData() table_args = ["test_mssql_money", metadata] for index, spec in enumerate(columns): type_, args, kw, res = spec table_args.append( Column("c%s" % index, type_(*args, **kw), nullable=None)) money_table = Table(*table_args) dialect = mssql.dialect() gen = dialect.ddl_compiler(dialect, schema.CreateTable(money_table)) for col in money_table.c: index = int(col.name[1:]) testing.eq_( gen.get_column_specification(col), "%s %s" % (col.name, columns[index][3]), ) self.assert_(repr(col))
def test_numeric(self): "Exercise type specification and options for numeric types." columns = [ # column type, args, kwargs, expected ddl (types.NUMERIC, [], {}, "NUMERIC"), (types.NUMERIC, [None], {}, "NUMERIC"), (types.NUMERIC, [12, 4], {}, "NUMERIC(12, 4)"), (types.Float, [], {}, "FLOAT"), (types.Float, [None], {}, "FLOAT"), (types.Float, [12], {}, "FLOAT(12)"), (mssql.MSReal, [], {}, "REAL"), (types.Integer, [], {}, "INTEGER"), (types.BigInteger, [], {}, "BIGINT"), (mssql.MSTinyInteger, [], {}, "TINYINT"), (types.SmallInteger, [], {}, "SMALLINT"), ] metadata = MetaData() table_args = ["test_mssql_numeric", metadata] for index, spec in enumerate(columns): type_, args, kw, res = spec table_args.append( Column("c%s" % index, type_(*args, **kw), nullable=None) ) numeric_table = Table(*table_args) dialect = mssql.dialect() gen = dialect.ddl_compiler(dialect, schema.CreateTable(numeric_table)) for col in numeric_table.c: index = int(col.name[1:]) testing.eq_( gen.get_column_specification(col), "%s %s" % (col.name, columns[index][3]), ) self.assert_(repr(col))
def test_char(self): """Exercise COLLATE-ish options on string types.""" columns = [ (mssql.MSChar, [], {}, "CHAR"), (mssql.MSChar, [1], {}, "CHAR(1)"), ( mssql.MSChar, [1], { "collation": "Latin1_General_CI_AS" }, "CHAR(1) COLLATE Latin1_General_CI_AS", ), (mssql.MSNChar, [], {}, "NCHAR"), (mssql.MSNChar, [1], {}, "NCHAR(1)"), ( mssql.MSNChar, [1], { "collation": "Latin1_General_CI_AS" }, "NCHAR(1) COLLATE Latin1_General_CI_AS", ), (mssql.MSString, [], {}, "VARCHAR(max)"), (mssql.MSString, [1], {}, "VARCHAR(1)"), ( mssql.MSString, [1], { "collation": "Latin1_General_CI_AS" }, "VARCHAR(1) COLLATE Latin1_General_CI_AS", ), (mssql.MSNVarchar, [], {}, "NVARCHAR(max)"), (mssql.MSNVarchar, [1], {}, "NVARCHAR(1)"), ( mssql.MSNVarchar, [1], { "collation": "Latin1_General_CI_AS" }, "NVARCHAR(1) COLLATE Latin1_General_CI_AS", ), (mssql.MSText, [], {}, "TEXT"), ( mssql.MSText, [], { "collation": "Latin1_General_CI_AS" }, "TEXT COLLATE Latin1_General_CI_AS", ), (mssql.MSNText, [], {}, "NTEXT"), ( mssql.MSNText, [], { "collation": "Latin1_General_CI_AS" }, "NTEXT COLLATE Latin1_General_CI_AS", ), ] metadata = MetaData() table_args = ["test_mssql_charset", metadata] for index, spec in enumerate(columns): type_, args, kw, res = spec table_args.append( Column("c%s" % index, type_(*args, **kw), nullable=None)) charset_table = Table(*table_args) dialect = mssql.dialect() gen = dialect.ddl_compiler(dialect, schema.CreateTable(charset_table)) for col in charset_table.c: index = int(col.name[1:]) testing.eq_( gen.get_column_specification(col), "%s %s" % (col.name, columns[index][3]), ) self.assert_(repr(col))
def _legacy_dialect(self): return mssql.dialect(legacy_schema_aliasing=True)
def _legacy_dialect(self): with _legacy_schema_aliasing_warning(): return mssql.dialect(legacy_schema_aliasing=True)
def test_dates(self): "Exercise type specification for date types." columns = [ # column type, args, kwargs, expected ddl (mssql.MSDateTime, [], {}, "DATETIME", None), (types.DATE, [], {}, "DATE", None), (types.Date, [], {}, "DATE", None), (types.Date, [], {}, "DATETIME", MS_2005_VERSION), (mssql.MSDate, [], {}, "DATE", None), (mssql.MSDate, [], {}, "DATETIME", MS_2005_VERSION), (types.TIME, [], {}, "TIME", None), (types.Time, [], {}, "TIME", None), (mssql.MSTime, [], {}, "TIME", None), (mssql.MSTime, [1], {}, "TIME(1)", None), (types.Time, [], {}, "DATETIME", MS_2005_VERSION), (mssql.MSTime, [], {}, "TIME", None), (mssql.MSSmallDateTime, [], {}, "SMALLDATETIME", None), (mssql.MSDateTimeOffset, [], {}, "DATETIMEOFFSET", None), (mssql.MSDateTimeOffset, [1], {}, "DATETIMEOFFSET(1)", None), (mssql.MSDateTime2, [], {}, "DATETIME2", None), (mssql.MSDateTime2, [0], {}, "DATETIME2(0)", None), (mssql.MSDateTime2, [1], {}, "DATETIME2(1)", None), (mssql.MSTime, [0], {}, "TIME(0)", None), (mssql.MSDateTimeOffset, [0], {}, "DATETIMEOFFSET(0)", None), (types.DateTime, [], { "timezone": True }, "DATETIMEOFFSET", None), (types.DateTime, [], { "timezone": False }, "DATETIME", None), ] metadata = MetaData() table_args = ["test_mssql_dates", metadata] for index, spec in enumerate(columns): type_, args, kw, res, server_version = spec table_args.append( Column("c%s" % index, type_(*args, **kw), nullable=None)) date_table = Table(*table_args) dialect = mssql.dialect() dialect.server_version_info = MS_2008_VERSION ms_2005_dialect = mssql.dialect() ms_2005_dialect.server_version_info = MS_2005_VERSION gen = dialect.ddl_compiler(dialect, schema.CreateTable(date_table)) gen2005 = ms_2005_dialect.ddl_compiler(ms_2005_dialect, schema.CreateTable(date_table)) for col in date_table.c: index = int(col.name[1:]) server_version = columns[index][4] if not server_version: testing.eq_( gen.get_column_specification(col), "%s %s" % (col.name, columns[index][3]), ) else: testing.eq_( gen2005.get_column_specification(col), "%s %s" % (col.name, columns[index][3]), ) self.assert_(repr(col))