Пример #1
0
    def _assert_sql(self, element, legacy_sql, modern_sql=None):
        dialect = mssql.dialect(legacy_schema_aliasing=True)

        self.assert_compile(element, legacy_sql, dialect=dialect)

        dialect = mssql.dialect()
        self.assert_compile(element, modern_sql or "foob", dialect=dialect)
Пример #2
0
    def test_dates(self):
        "Exercise type specification for date types."

        columns = [
            # column type, args, kwargs, expected ddl
            (mssql.MSDateTime, [], {}, "DATETIME", None),
            (types.DATE, [], {}, "DATE", None),
            (types.Date, [], {}, "DATE", None),
            (types.Date, [], {}, "DATETIME", MS_2005_VERSION),
            (mssql.MSDate, [], {}, "DATE", None),
            (mssql.MSDate, [], {}, "DATETIME", MS_2005_VERSION),
            (types.TIME, [], {}, "TIME", None),
            (types.Time, [], {}, "TIME", None),
            (mssql.MSTime, [], {}, "TIME", None),
            (mssql.MSTime, [1], {}, "TIME(1)", None),
            (types.Time, [], {}, "DATETIME", MS_2005_VERSION),
            (mssql.MSTime, [], {}, "TIME", None),
            (mssql.MSSmallDateTime, [], {}, "SMALLDATETIME", None),
            (mssql.MSDateTimeOffset, [], {}, "DATETIMEOFFSET", None),
            (mssql.MSDateTimeOffset, [1], {}, "DATETIMEOFFSET(1)", None),
            (mssql.MSDateTime2, [], {}, "DATETIME2", None),
            (mssql.MSDateTime2, [0], {}, "DATETIME2(0)", None),
            (mssql.MSDateTime2, [1], {}, "DATETIME2(1)", None),
            (mssql.MSTime, [0], {}, "TIME(0)", None),
            (mssql.MSDateTimeOffset, [0], {}, "DATETIMEOFFSET(0)", None),
        ]

        metadata = MetaData()
        table_args = ["test_mssql_dates", metadata]
        for index, spec in enumerate(columns):
            type_, args, kw, res, server_version = spec
            table_args.append(
                Column("c%s" % index, type_(*args, **kw), nullable=None)
            )

        date_table = Table(*table_args)
        dialect = mssql.dialect()
        dialect.server_version_info = MS_2008_VERSION
        ms_2005_dialect = mssql.dialect()
        ms_2005_dialect.server_version_info = MS_2005_VERSION
        gen = dialect.ddl_compiler(dialect, schema.CreateTable(date_table))
        gen2005 = ms_2005_dialect.ddl_compiler(
            ms_2005_dialect, schema.CreateTable(date_table)
        )

        for col in date_table.c:
            index = int(col.name[1:])
            server_version = columns[index][4]
            if not server_version:
                testing.eq_(
                    gen.get_column_specification(col),
                    "%s %s" % (col.name, columns[index][3]),
                )
            else:
                testing.eq_(
                    gen2005.get_column_specification(col),
                    "%s %s" % (col.name, columns[index][3]),
                )

            self.assert_(repr(col))
Пример #3
0
    def test_numeric(self):
        "Exercise type specification and options for numeric types."

        columns = [
            # column type, args, kwargs, expected ddl
            (types.NUMERIC, [], {}, "NUMERIC"),
            (types.NUMERIC, [None], {}, "NUMERIC"),
            (types.NUMERIC, [12, 4], {}, "NUMERIC(12, 4)"),
            (types.Float, [], {}, "FLOAT"),
            (types.Float, [None], {}, "FLOAT"),
            (types.Float, [12], {}, "FLOAT(12)"),
            (mssql.MSReal, [], {}, "REAL"),
            (types.Integer, [], {}, "INTEGER"),
            (types.BigInteger, [], {}, "BIGINT"),
            (mssql.MSTinyInteger, [], {}, "TINYINT"),
            (types.SmallInteger, [], {}, "SMALLINT"),
        ]

        metadata = MetaData()
        table_args = ["test_mssql_numeric", metadata]
        for index, spec in enumerate(columns):
            type_, args, kw, res = spec
            table_args.append(Column("c%s" % index, type_(*args, **kw), nullable=None))

        numeric_table = Table(*table_args)
        dialect = mssql.dialect()
        gen = dialect.ddl_compiler(dialect, schema.CreateTable(numeric_table))

        for col in numeric_table.c:
            index = int(col.name[1:])
            testing.eq_(gen.get_column_specification(col), "%s %s" % (col.name, columns[index][3]))
            self.assert_(repr(col))
Пример #4
0
    def test_money(self):
        "Exercise type specification for money types."

        columns = [
            # column type, args, kwargs, expected ddl
            (mssql.MSMoney, [], {},
             'MONEY'),
            (mssql.MSSmallMoney, [], {},
             'SMALLMONEY'),
           ]

        table_args = ['test_mssql_money', metadata]
        for index, spec in enumerate(columns):
            type_, args, kw, res = spec
            table_args.append(Column('c%s' % index, type_(*args, **kw), nullable=None))

        money_table = Table(*table_args)
        dialect = mssql.dialect()
        gen = dialect.ddl_compiler(dialect, schema.CreateTable(money_table))

        for col in money_table.c:
            index = int(col.name[1:])
            testing.eq_(gen.get_column_specification(col),
                           "%s %s" % (col.name, columns[index][3]))
            self.assert_(repr(col))

        try:
            money_table.create(checkfirst=True)
            assert True
        except:
            raise
        money_table.drop()
Пример #5
0
    def test_char(self):
        """Exercise COLLATE-ish options on string types."""

        columns = [
            (mssql.MSChar, [], {},
             'CHAR'),
            (mssql.MSChar, [1], {},
             'CHAR(1)'),
            (mssql.MSChar, [1], {'collation': 'Latin1_General_CI_AS'},
             'CHAR(1) COLLATE Latin1_General_CI_AS'),

            (mssql.MSNChar, [], {},
             'NCHAR'),
            (mssql.MSNChar, [1], {},
             'NCHAR(1)'),
            (mssql.MSNChar, [1], {'collation': 'Latin1_General_CI_AS'},
             'NCHAR(1) COLLATE Latin1_General_CI_AS'),

            (mssql.MSString, [], {},
             'VARCHAR(max)'),
            (mssql.MSString, [1], {},
             'VARCHAR(1)'),
            (mssql.MSString, [1], {'collation': 'Latin1_General_CI_AS'},
             'VARCHAR(1) COLLATE Latin1_General_CI_AS'),

            (mssql.MSNVarchar, [], {},
             'NVARCHAR(max)'),
            (mssql.MSNVarchar, [1], {},
             'NVARCHAR(1)'),
            (mssql.MSNVarchar, [1], {'collation': 'Latin1_General_CI_AS'},
             'NVARCHAR(1) COLLATE Latin1_General_CI_AS'),

            (mssql.MSText, [], {},
             'TEXT'),
            (mssql.MSText, [], {'collation': 'Latin1_General_CI_AS'},
             'TEXT COLLATE Latin1_General_CI_AS'),

            (mssql.MSNText, [], {},
             'NTEXT'),
            (mssql.MSNText, [], {'collation': 'Latin1_General_CI_AS'},
             'NTEXT COLLATE Latin1_General_CI_AS'),
        ]

        metadata = MetaData()
        table_args = ['test_mssql_charset', metadata]
        for index, spec in enumerate(columns):
            type_, args, kw, res = spec
            table_args.append(
                Column('c%s' % index, type_(*args, **kw), nullable=None))

        charset_table = Table(*table_args)
        dialect = mssql.dialect()
        gen = dialect.ddl_compiler(dialect, schema.CreateTable(charset_table))

        for col in charset_table.c:
            index = int(col.name[1:])
            testing.eq_(
                gen.get_column_specification(col),
                "%s %s" % (col.name, columns[index][3]))
            self.assert_(repr(col))
Пример #6
0
    def test_boolean(self):
        "Exercise type specification for boolean type."

        columns = [
            # column type, args, kwargs, expected ddl
            (Boolean, [], {},
             'BIT'),
        ]

        metadata = MetaData()
        table_args = ['test_mssql_boolean', metadata]
        for index, spec in enumerate(columns):
            type_, args, kw, res = spec
            table_args.append(
                Column('c%s' % index, type_(*args, **kw), nullable=None))

        boolean_table = Table(*table_args)
        dialect = mssql.dialect()
        gen = dialect.ddl_compiler(dialect, schema.CreateTable(boolean_table))

        for col in boolean_table.c:
            index = int(col.name[1:])
            testing.eq_(
                gen.get_column_specification(col),
                "%s %s" % (col.name, columns[index][3]))
            self.assert_(repr(col))
Пример #7
0
    def test_binary(self):
        "Exercise type specification for binary types."

        columns = [
            # column type, args, kwargs, expected ddl
            (mssql.MSBinary, [], {}, "BINARY"),
            (mssql.MSBinary, [10], {}, "BINARY(10)"),
            (types.BINARY, [], {}, "BINARY"),
            (types.BINARY, [10], {}, "BINARY(10)"),
            (mssql.MSVarBinary, [], {}, "VARBINARY(max)"),
            (mssql.MSVarBinary, [10], {}, "VARBINARY(10)"),
            (types.VARBINARY, [10], {}, "VARBINARY(10)"),
            (types.VARBINARY, [], {}, "VARBINARY(max)"),
            (mssql.MSImage, [], {}, "IMAGE"),
            (mssql.IMAGE, [], {}, "IMAGE"),
            (types.LargeBinary, [], {}, "IMAGE"),
        ]

        metadata = MetaData()
        table_args = ["test_mssql_binary", metadata]
        for index, spec in enumerate(columns):
            type_, args, kw, res = spec
            table_args.append(
                Column("c%s" % index, type_(*args, **kw), nullable=None)
            )
        binary_table = Table(*table_args)
        dialect = mssql.dialect()
        gen = dialect.ddl_compiler(dialect, schema.CreateTable(binary_table))
        for col in binary_table.c:
            index = int(col.name[1:])
            testing.eq_(
                gen.get_column_specification(col),
                "%s %s" % (col.name, columns[index][3]),
            )
            self.assert_(repr(col))
Пример #8
0
    def _assert_sql(self, element, legacy_sql, modern_sql=None):
        dialect = mssql.dialect()

        with assertions.expect_warnings(
                "legacy_schema_aliasing flag is defaulted to True.*"):
            self.assert_compile(
                element,
                legacy_sql,
                dialect=dialect
            )

        dialect = mssql.dialect(legacy_schema_aliasing=False)
        self.assert_compile(
            element,
            modern_sql or "foob",
            dialect=dialect
        )
Пример #9
0
 def test_info_unicode_cast_no_2000(self):
     dialect = mssql.dialect()
     dialect.server_version_info = base.MS_2000_VERSION
     stmt = tables.c.table_name == 'somename'
     self.assert_compile(
         stmt,
         "[INFORMATION_SCHEMA].[TABLES].[TABLE_NAME] = :table_name_1",
         dialect=dialect
     )
Пример #10
0
 def test_info_unicode_cast(self):
     dialect = mssql.dialect()
     dialect.server_version_info = base.MS_2005_VERSION
     stmt = tables.c.table_name == 'somename'
     self.assert_compile(
         stmt,
         "[TABLES_1].[TABLE_NAME] = CAST(:table_name_1 AS NVARCHAR(max))",
         dialect=dialect
     )
Пример #11
0
    def setUp(self):
        t = Table('sometable', MetaData(),
            Column('pk_column', Integer),
            Column('test_column', String)
        )
        self.column = t.c.test_column

        dialect = mssql.dialect()
        self.ddl_compiler = dialect.ddl_compiler(dialect, schema.CreateTable(t))
Пример #12
0
    def test_large_type_deprecation(self):
        d1 = mssql.dialect(deprecate_large_types=True)
        d2 = mssql.dialect(deprecate_large_types=False)
        d3 = mssql.dialect()
        d3.server_version_info = (11, 0)
        d3._setup_version_attributes()
        d4 = mssql.dialect()
        d4.server_version_info = (10, 0)
        d4._setup_version_attributes()

        for dialect in (d1, d3):
            eq_(str(Text().compile(dialect=dialect)), "VARCHAR(max)")
            eq_(str(UnicodeText().compile(dialect=dialect)), "NVARCHAR(max)")
            eq_(str(LargeBinary().compile(dialect=dialect)), "VARBINARY(max)")

        for dialect in (d2, d4):
            eq_(str(Text().compile(dialect=dialect)), "TEXT")
            eq_(str(UnicodeText().compile(dialect=dialect)), "NTEXT")
            eq_(str(LargeBinary().compile(dialect=dialect)), "IMAGE")
Пример #13
0
 def test_info_unicode_cast(self):
     dialect = mssql.dialect()
     dialect.server_version_info = base.MS_2005_VERSION
     stmt = tables.c.table_name == "somename"
     self.assert_compile(
         stmt,
         "[INFORMATION_SCHEMA].[TABLES].[TABLE_NAME] = "
         "CAST(:table_name_1 AS NVARCHAR(max))",
         dialect=dialect,
     )
Пример #14
0
 def test_info_unicode_cast(self):
     dialect = mssql.dialect()
     dialect.server_version_info = base.MS_2005_VERSION
     stmt = tables.c.table_name == "somename"
     self.assert_compile(
         stmt,
         "[INFORMATION_SCHEMA].[TABLES].[TABLE_NAME] = "
         "CAST(:table_name_1 AS NVARCHAR(max))",
         dialect=dialect,
     )
Пример #15
0
    def test_large_type_deprecation(self):
        d1 = mssql.dialect(deprecate_large_types=True)
        d2 = mssql.dialect(deprecate_large_types=False)
        d3 = mssql.dialect()
        d3.server_version_info = (11, 0)
        d3._setup_version_attributes()
        d4 = mssql.dialect()
        d4.server_version_info = (10, 0)
        d4._setup_version_attributes()

        for dialect in (d1, d3):
            eq_(str(Text().compile(dialect=dialect)), "VARCHAR(max)")
            eq_(str(UnicodeText().compile(dialect=dialect)), "NVARCHAR(max)")
            eq_(str(LargeBinary().compile(dialect=dialect)), "VARBINARY(max)")

        for dialect in (d2, d4):
            eq_(str(Text().compile(dialect=dialect)), "TEXT")
            eq_(str(UnicodeText().compile(dialect=dialect)), "NTEXT")
            eq_(str(LargeBinary().compile(dialect=dialect)), "IMAGE")
Пример #16
0
    def setup(self):
        t = Table('sometable', MetaData(),
                  Column('pk_column', Integer),
                  Column('test_column', String)
                  )
        self.column = t.c.test_column

        dialect = mssql.dialect()
        self.ddl_compiler = dialect.ddl_compiler(dialect,
                                                 schema.CreateTable(t))
Пример #17
0
    def test_timestamp(self):
        """Exercise TIMESTAMP column."""

        dialect = mssql.dialect()

        metadata = MetaData()
        spec, expected = (TIMESTAMP, "TIMESTAMP")
        t = Table("mssql_ts", metadata, Column("id", Integer, primary_key=True), Column("t", spec, nullable=None))
        gen = dialect.ddl_compiler(dialect, schema.CreateTable(t))
        testing.eq_(gen.get_column_specification(t.c.t), "t %s" % expected)
        self.assert_(repr(t.c.t))
Пример #18
0
    def test_default_schema_name_not_interpreted_as_tokenized(self):
        dialect = mssql.dialect()
        dialect.server_version_info = base.MS_2014_VERSION

        mock_connection = mock.Mock(scalar=lambda sql: "Jonah.The.Whale")
        schema_name = dialect._get_default_schema_name(mock_connection)
        eq_(schema_name, "Jonah.The.Whale")
        eq_(
            base._owner_plus_db(dialect, schema_name),
            (None, "Jonah.The.Whale"),
        )
Пример #19
0
    def test_owner_database_pairs_dont_use_for_same_db(self):
        dialect = mssql.dialect()

        identifier = "my_db.some_schema"
        schema, owner = base._owner_plus_db(dialect, identifier)

        mock_connection = mock.Mock(dialect=dialect,
                                    scalar=mock.Mock(return_value="my_db"))
        mock_lambda = mock.Mock()
        base._switch_db(schema, mock_connection, mock_lambda, "x", y="bar")
        eq_(mock_connection.mock_calls, [mock.call.scalar("select db_name()")])
        eq_(mock_lambda.mock_calls, [mock.call("x", y="bar")])
Пример #20
0
    def test_timestamp(self):
        """Exercise TIMESTAMP column."""

        dialect = mssql.dialect()

        spec, expected = (TIMESTAMP,'TIMESTAMP')
        t = Table('mssql_ts', metadata,
                  Column('id', Integer, primary_key=True),
                  Column('t', spec, nullable=None))
        gen = dialect.ddl_compiler(dialect, schema.CreateTable(t))
        testing.eq_(gen.get_column_specification(t.c.t), "t %s" % expected)
        self.assert_(repr(t.c.t))
        t.create(checkfirst=True)
Пример #21
0
    def test_timestamp(self):
        """Exercise TIMESTAMP column."""

        dialect = mssql.dialect()

        metadata = MetaData()
        spec, expected = (TIMESTAMP, 'TIMESTAMP')
        t = Table('mssql_ts', metadata, Column('id', Integer,
                                               primary_key=True),
                  Column('t', spec, nullable=None))
        gen = dialect.ddl_compiler(dialect, schema.CreateTable(t))
        testing.eq_(gen.get_column_specification(t.c.t), "t %s" % expected)
        self.assert_(repr(t.c.t))
Пример #22
0
    def test_binary(self):
        "Exercise type specification for binary types."

        columns = [
            # column type, args, kwargs, expected ddl
            (mssql.MSBinary, [], {},
             'BINARY'),
            (types.Binary, [10], {},
             'BINARY(10)'),

            (mssql.MSBinary, [10], {},
             'BINARY(10)'),

            (mssql.MSVarBinary, [], {},
             'VARBINARY'),
            (mssql.MSVarBinary, [10], {},
             'VARBINARY(10)'),

            (mssql.MSImage, [], {},
             'IMAGE'),

            (types.Binary, [], {},
             'IMAGE'),
            (types.Binary, [10], {},
             'BINARY(10)')
            ]

        table_args = ['test_mssql_binary', metadata]
        for index, spec in enumerate(columns):
            type_, args, kw, res = spec
            table_args.append(Column('c%s' % index, type_(*args, **kw), nullable=None))

        binary_table = Table(*table_args)
        dialect = mssql.dialect()
        gen = dialect.ddl_compiler(dialect, schema.CreateTable(binary_table))

        for col in binary_table.c:
            index = int(col.name[1:])
            testing.eq_(gen.get_column_specification(col),
                           "%s %s" % (col.name, columns[index][3]))
            self.assert_(repr(col))

        metadata.create_all()

        reflected_binary = Table('test_mssql_binary', MetaData(testing.db), autoload=True)
        for col in reflected_binary.c:
            c1 =testing.db.dialect.type_descriptor(col.type).__class__
            c2 =testing.db.dialect.type_descriptor(binary_table.c[col.name].type).__class__ 
            assert issubclass(c1, c2), "%r is not a subclass of %r" % (c1, c2)
            if binary_table.c[col.name].type.length:
                testing.eq_(col.type.length, binary_table.c[col.name].type.length)
Пример #23
0
    def test_char(self):
        """Exercise COLLATE-ish options on string types."""

        columns = [
            (mssql.MSChar, [], {}, 'CHAR'),
            (mssql.MSChar, [1], {}, 'CHAR(1)'),
            (mssql.MSChar, [1], {
                'collation': 'Latin1_General_CI_AS'
            }, 'CHAR(1) COLLATE Latin1_General_CI_AS'),
            (mssql.MSNChar, [], {}, 'NCHAR'),
            (mssql.MSNChar, [1], {}, 'NCHAR(1)'),
            (mssql.MSNChar, [1], {
                'collation': 'Latin1_General_CI_AS'
            }, 'NCHAR(1) COLLATE Latin1_General_CI_AS'),
            (mssql.MSString, [], {}, 'VARCHAR(max)'),
            (mssql.MSString, [1], {}, 'VARCHAR(1)'),
            (mssql.MSString, [1], {
                'collation': 'Latin1_General_CI_AS'
            }, 'VARCHAR(1) COLLATE Latin1_General_CI_AS'),
            (mssql.MSNVarchar, [], {}, 'NVARCHAR(max)'),
            (mssql.MSNVarchar, [1], {}, 'NVARCHAR(1)'),
            (mssql.MSNVarchar, [1], {
                'collation': 'Latin1_General_CI_AS'
            }, 'NVARCHAR(1) COLLATE Latin1_General_CI_AS'),
            (mssql.MSText, [], {}, 'TEXT'),
            (mssql.MSText, [], {
                'collation': 'Latin1_General_CI_AS'
            }, 'TEXT COLLATE Latin1_General_CI_AS'),
            (mssql.MSNText, [], {}, 'NTEXT'),
            (mssql.MSNText, [], {
                'collation': 'Latin1_General_CI_AS'
            }, 'NTEXT COLLATE Latin1_General_CI_AS'),
        ]

        metadata = MetaData()
        table_args = ['test_mssql_charset', metadata]
        for index, spec in enumerate(columns):
            type_, args, kw, res = spec
            table_args.append(
                Column('c%s' % index, type_(*args, **kw), nullable=None))

        charset_table = Table(*table_args)
        dialect = mssql.dialect()
        gen = dialect.ddl_compiler(dialect, schema.CreateTable(charset_table))

        for col in charset_table.c:
            index = int(col.name[1:])
            testing.eq_(gen.get_column_specification(col),
                        "%s %s" % (col.name, columns[index][3]))
            self.assert_(repr(col))
Пример #24
0
    def test_binary(self):
        "Exercise type specification for binary types."

        columns = [
            # column type, args, kwargs, expected ddl
            (mssql.MSBinary, [], {},
             'BINARY'),
            (mssql.MSBinary, [10], {},
             'BINARY(10)'),

            (types.BINARY, [], {},
             'BINARY'),
            (types.BINARY, [10], {},
             'BINARY(10)'),

            (mssql.MSVarBinary, [], {},
             'VARBINARY(max)'),
            (mssql.MSVarBinary, [10], {},
             'VARBINARY(10)'),

            (types.VARBINARY, [10], {},
             'VARBINARY(10)'),
            (types.VARBINARY, [], {},
             'VARBINARY(max)'),

            (mssql.MSImage, [], {},
             'IMAGE'),

            (mssql.IMAGE, [], {},
             'IMAGE'),

            (types.LargeBinary, [], {},
             'IMAGE'),
        ]

        metadata = MetaData()
        table_args = ['test_mssql_binary', metadata]
        for index, spec in enumerate(columns):
            type_, args, kw, res = spec
            table_args.append(Column('c%s' % index, type_(*args, **kw),
                              nullable=None))
        binary_table = Table(*table_args)
        dialect = mssql.dialect()
        gen = dialect.ddl_compiler(dialect,
                                   schema.CreateTable(binary_table))
        for col in binary_table.c:
            index = int(col.name[1:])
            testing.eq_(gen.get_column_specification(col), '%s %s'
                        % (col.name, columns[index][3]))
            self.assert_(repr(col))
Пример #25
0
    def test_numeric(self):
        "Exercise type specification and options for numeric types."

        columns = [
            # column type, args, kwargs, expected ddl
            (mssql.MSNumeric, [], {},
             'NUMERIC(10, 2)'),
            (mssql.MSNumeric, [None], {},
             'NUMERIC'),
            (mssql.MSNumeric, [12], {},
             'NUMERIC(12, 2)'),
            (mssql.MSNumeric, [12, 4], {},
             'NUMERIC(12, 4)'),

            (types.Float, [], {},
             'FLOAT(10)'),
            (types.Float, [None], {},
             'FLOAT'),
            (types.Float, [12], {},
             'FLOAT(12)'),
            (mssql.MSReal, [], {},
             'REAL'),

            (types.Integer, [], {},
             'INTEGER'),
            (types.BigInteger, [], {},
             'BIGINT'),
            (mssql.MSTinyInteger, [], {},
             'TINYINT'),
            (types.SmallInteger, [], {},
             'SMALLINT'),
           ]

        table_args = ['test_mssql_numeric', metadata]
        for index, spec in enumerate(columns):
            type_, args, kw, res = spec
            table_args.append(Column('c%s' % index, type_(*args, **kw), nullable=None))

        numeric_table = Table(*table_args)
        dialect = mssql.dialect()
        gen = dialect.ddl_compiler(dialect, schema.CreateTable(numeric_table))

        for col in numeric_table.c:
            index = int(col.name[1:])
            testing.eq_(gen.get_column_specification(col),
                           "%s %s" % (col.name, columns[index][3]))
            self.assert_(repr(col))

        metadata.create_all()
Пример #26
0
    def test_numeric(self):
        "Exercise type specification and options for numeric types."

        columns = [
            # column type, args, kwargs, expected ddl
            (types.NUMERIC, [], {},
             'NUMERIC'),
            (types.NUMERIC, [None], {},
             'NUMERIC'),
            (types.NUMERIC, [12, 4], {},
             'NUMERIC(12, 4)'),

            (types.Float, [], {},
             'FLOAT'),
            (types.Float, [None], {},
             'FLOAT'),
            (types.Float, [12], {},
             'FLOAT(12)'),
            (mssql.MSReal, [], {},
             'REAL'),

            (types.Integer, [], {},
             'INTEGER'),
            (types.BigInteger, [], {},
             'BIGINT'),
            (mssql.MSTinyInteger, [], {},
             'TINYINT'),
            (types.SmallInteger, [], {},
             'SMALLINT'),
        ]

        metadata = MetaData()
        table_args = ['test_mssql_numeric', metadata]
        for index, spec in enumerate(columns):
            type_, args, kw, res = spec
            table_args.append(
                Column('c%s' % index, type_(*args, **kw), nullable=None))

        numeric_table = Table(*table_args)
        dialect = mssql.dialect()
        gen = dialect.ddl_compiler(dialect, schema.CreateTable(numeric_table))

        for col in numeric_table.c:
            index = int(col.name[1:])
            testing.eq_(
                gen.get_column_specification(col),
                "%s %s" % (col.name, columns[index][3]))
            self.assert_(repr(col))
Пример #27
0
    def test_money(self):
        """Exercise type specification for money types."""

        columns = [(mssql.MSMoney, [], {}, "MONEY"), (mssql.MSSmallMoney, [], {}, "SMALLMONEY")]
        metadata = MetaData()
        table_args = ["test_mssql_money", metadata]
        for index, spec in enumerate(columns):
            type_, args, kw, res = spec
            table_args.append(Column("c%s" % index, type_(*args, **kw), nullable=None))
        money_table = Table(*table_args)
        dialect = mssql.dialect()
        gen = dialect.ddl_compiler(dialect, schema.CreateTable(money_table))
        for col in money_table.c:
            index = int(col.name[1:])
            testing.eq_(gen.get_column_specification(col), "%s %s" % (col.name, columns[index][3]))
            self.assert_(repr(col))
Пример #28
0
    def test_owner_database_pairs(self):
        dialect = mssql.dialect()

        for identifier, expected_schema, expected_owner, use_stmt in [
            ("foo", None, "foo", "use foo"),
            ("foo.bar", "foo", "bar", "use foo"),
            ("Foo.Bar", "Foo", "Bar", "use [Foo]"),
            ("[Foo.Bar]", None, "Foo.Bar", "use [Foo].[Bar]"),
            ("[Foo.Bar].[bat]", "Foo.Bar", "bat", "use [Foo].[Bar]"),
            (
                "[foo].]do something; select [foo",
                "foo",
                "do something; select foo",
                "use foo",
            ),
            (
                "something; select [foo].bar",
                "something; select foo",
                "bar",
                "use [something; select foo]",
            ),
        ]:
            schema, owner = base._owner_plus_db(dialect, identifier)

            eq_(owner, expected_owner)
            eq_(schema, expected_schema)

            mock_connection = mock.Mock(
                dialect=dialect,
                scalar=mock.Mock(return_value="Some ] Database"),
            )
            mock_lambda = mock.Mock()
            base._switch_db(schema, mock_connection, mock_lambda, "x", y="bar")
            if schema is None:
                eq_(mock_connection.mock_calls, [])
            else:
                eq_(
                    mock_connection.mock_calls,
                    [
                        mock.call.scalar("select db_name()"),
                        mock.call.execute(use_stmt),
                        mock.call.execute("use [Some  Database]"),
                    ],
                )
            eq_(mock_lambda.mock_calls, [mock.call("x", y="bar")])
Пример #29
0
    def test_money(self):
        """Exercise type specification for money types."""

        columns = [(mssql.MSMoney, [], {}, 'MONEY'),
                   (mssql.MSSmallMoney, [], {}, 'SMALLMONEY')]
        metadata = MetaData()
        table_args = ['test_mssql_money', metadata]
        for index, spec in enumerate(columns):
            type_, args, kw, res = spec
            table_args.append(
                Column('c%s' % index, type_(*args, **kw), nullable=None))
        money_table = Table(*table_args)
        dialect = mssql.dialect()
        gen = dialect.ddl_compiler(dialect, schema.CreateTable(money_table))
        for col in money_table.c:
            index = int(col.name[1:])
            testing.eq_(gen.get_column_specification(col),
                        '%s %s' % (col.name, columns[index][3]))
            self.assert_(repr(col))
Пример #30
0
class StringTest(fixtures.TestBase, AssertsCompiledSQL):
    __dialect__ = mssql.dialect()

    def test_unicode_literal_binds(self):
        self.assert_compile(
            column("x", Unicode()) == "foo", "x = N'foo'", literal_binds=True
        )

    def test_unicode_text_literal_binds(self):
        self.assert_compile(
            column("x", UnicodeText()) == "foo",
            "x = N'foo'",
            literal_binds=True,
        )

    def test_string_text_literal_binds(self):
        self.assert_compile(
            column("x", String()) == "foo", "x = 'foo'", literal_binds=True
        )

    def test_string_text_literal_binds_explicit_unicode_right(self):
        self.assert_compile(
            column("x", String()) == util.u("foo"),
            "x = 'foo'",
            literal_binds=True,
        )

    def test_string_text_explicit_literal_binds(self):
        # the literal experssion here coerces the right side to
        # Unicode on Python 3 for plain string, test with unicode
        # string just to confirm literal is doing this
        self.assert_compile(
            column("x", String()) == literal(util.u("foo")),
            "x = N'foo'",
            literal_binds=True,
        )

    def test_text_text_literal_binds(self):
        self.assert_compile(
            column("x", Text()) == "foo", "x = 'foo'", literal_binds=True
        )
Пример #31
0
    def test_boolean(self):
        "Exercise type specification for boolean type."

        columns = [
            # column type, args, kwargs, expected ddl
            (Boolean, [], {}, 'BIT'),
        ]

        metadata = MetaData()
        table_args = ['test_mssql_boolean', metadata]
        for index, spec in enumerate(columns):
            type_, args, kw, res = spec
            table_args.append(
                Column('c%s' % index, type_(*args, **kw), nullable=None))

        boolean_table = Table(*table_args)
        dialect = mssql.dialect()
        gen = dialect.ddl_compiler(dialect, schema.CreateTable(boolean_table))

        for col in boolean_table.c:
            index = int(col.name[1:])
            testing.eq_(gen.get_column_specification(col),
                        "%s %s" % (col.name, columns[index][3]))
            self.assert_(repr(col))
Пример #32
0
    def test_info_unicode_coercion(self):

        dialect = mssql.dialect()
        value = CoerceUnicode().bind_processor(dialect)("a string")
        assert isinstance(value, util.text_type)
Пример #33
0
    def setup(self):
        t = Table("sometable", MetaData(), Column("pk_column", Integer), Column("test_column", String))
        self.column = t.c.test_column

        dialect = mssql.dialect()
        self.ddl_compiler = dialect.ddl_compiler(dialect, schema.CreateTable(t))
Пример #34
0
    def test_info_unicode_coercion(self):

        dialect = mssql.dialect()
        value = CoerceUnicode().bind_processor(dialect)('a string')
        assert isinstance(value, util.text_type)
Пример #35
0
 def _legacy_dialect(self):
     return mssql.dialect(legacy_schema_aliasing=True)
Пример #36
0
 def _legacy_dialect(self):
     return mssql.dialect(legacy_schema_aliasing=True)
Пример #37
0
    def test_dates(self):
        "Exercise type specification for date types."

        columns = [
            # column type, args, kwargs, expected ddl
            (mssql.MSDateTime, [], {},
             'DATETIME', None),

            (types.DATE, [], {},
             'DATE', None),
            (types.Date, [], {},
             'DATE', None),
            (types.Date, [], {},
             'DATETIME', MS_2005_VERSION),
            (mssql.MSDate, [], {},
             'DATE', None),
            (mssql.MSDate, [], {},
             'DATETIME', MS_2005_VERSION),

            (types.TIME, [], {},
             'TIME', None),
            (types.Time, [], {},
             'TIME', None),
            (mssql.MSTime, [], {},
             'TIME', None),
            (mssql.MSTime, [1], {},
             'TIME(1)', None),
            (types.Time, [], {},
             'DATETIME', MS_2005_VERSION),
            (mssql.MSTime, [], {},
             'TIME', None),

            (mssql.MSSmallDateTime, [], {},
             'SMALLDATETIME', None),

            (mssql.MSDateTimeOffset, [], {},
             'DATETIMEOFFSET', None),
            (mssql.MSDateTimeOffset, [1], {},
             'DATETIMEOFFSET(1)', None),

            (mssql.MSDateTime2, [], {},
             'DATETIME2', None),
            (mssql.MSDateTime2, [0], {},
             'DATETIME2(0)', None),
            (mssql.MSDateTime2, [1], {},
             'DATETIME2(1)', None),

            (mssql.MSTime, [0], {},
             'TIME(0)', None),

            (mssql.MSDateTimeOffset, [0], {},
             'DATETIMEOFFSET(0)', None),

        ]

        metadata = MetaData()
        table_args = ['test_mssql_dates', metadata]
        for index, spec in enumerate(columns):
            type_, args, kw, res, server_version = spec
            table_args.append(
                Column('c%s' % index, type_(*args, **kw), nullable=None))

        date_table = Table(*table_args)
        dialect = mssql.dialect()
        dialect.server_version_info = MS_2008_VERSION
        ms_2005_dialect = mssql.dialect()
        ms_2005_dialect.server_version_info = MS_2005_VERSION
        gen = dialect.ddl_compiler(dialect, schema.CreateTable(date_table))
        gen2005 = ms_2005_dialect.ddl_compiler(
            ms_2005_dialect, schema.CreateTable(date_table))

        for col in date_table.c:
            index = int(col.name[1:])
            server_version = columns[index][4]
            if not server_version:
                testing.eq_(
                    gen.get_column_specification(col),
                    "%s %s" % (col.name, columns[index][3]))
            else:
                testing.eq_(
                    gen2005.get_column_specification(col),
                    "%s %s" % (col.name, columns[index][3]))

            self.assert_(repr(col))
Пример #38
0
class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
    __dialect__ = mssql.dialect()

    def test_true_false(self):
        self.assert_compile(sql.false(), "0")
        self.assert_compile(sql.true(), "1")

    def test_select(self):
        t = table('sometable', column('somecolumn'))
        self.assert_compile(t.select(),
                            'SELECT sometable.somecolumn FROM sometable')

    def test_select_with_nolock(self):
        t = table('sometable', column('somecolumn'))
        self.assert_compile(
            t.select().with_hint(t, 'WITH (NOLOCK)'),
            'SELECT sometable.somecolumn FROM sometable WITH (NOLOCK)')

    def test_join_with_hint(self):
        t1 = table(
            't1',
            column('a', Integer),
            column('b', String),
            column('c', String),
        )
        t2 = table(
            't2',
            column("a", Integer),
            column("b", Integer),
            column("c", Integer),
        )
        join = t1.join(t2, t1.c.a==t2.c.a).\
                        select().with_hint(t1, 'WITH (NOLOCK)')
        self.assert_compile(
            join, 'SELECT t1.a, t1.b, t1.c, t2.a, t2.b, t2.c '
            'FROM t1 WITH (NOLOCK) JOIN t2 ON t1.a = t2.a')

    def test_insert(self):
        t = table('sometable', column('somecolumn'))
        self.assert_compile(
            t.insert(), 'INSERT INTO sometable (somecolumn) VALUES '
            '(:somecolumn)')

    def test_update(self):
        t = table('sometable', column('somecolumn'))
        self.assert_compile(
            t.update(t.c.somecolumn == 7),
            'UPDATE sometable SET somecolumn=:somecolum'
            'n WHERE sometable.somecolumn = '
            ':somecolumn_1', dict(somecolumn=10))

    def test_insert_hint(self):
        t = table('sometable', column('somecolumn'))
        for targ in (None, t):
            for darg in ("*", "mssql"):
                self.assert_compile(
                    t.insert().values(somecolumn="x").with_hint(
                        "WITH (PAGLOCK)", selectable=targ, dialect_name=darg),
                    "INSERT INTO sometable WITH (PAGLOCK) "
                    "(somecolumn) VALUES (:somecolumn)")

    def test_update_hint(self):
        t = table('sometable', column('somecolumn'))
        for targ in (None, t):
            for darg in ("*", "mssql"):
                self.assert_compile(
                    t.update().where(t.c.somecolumn == "q").values(
                        somecolumn="x").with_hint("WITH (PAGLOCK)",
                                                  selectable=targ,
                                                  dialect_name=darg),
                    "UPDATE sometable WITH (PAGLOCK) "
                    "SET somecolumn=:somecolumn "
                    "WHERE sometable.somecolumn = :somecolumn_1")

    def test_update_exclude_hint(self):
        t = table('sometable', column('somecolumn'))
        self.assert_compile(
            t.update().where(t.c.somecolumn == "q").values(
                somecolumn="x").with_hint("XYZ", "mysql"),
            "UPDATE sometable SET somecolumn=:somecolumn "
            "WHERE sometable.somecolumn = :somecolumn_1")

    def test_delete_hint(self):
        t = table('sometable', column('somecolumn'))
        for targ in (None, t):
            for darg in ("*", "mssql"):
                self.assert_compile(
                    t.delete().where(t.c.somecolumn == "q").with_hint(
                        "WITH (PAGLOCK)", selectable=targ, dialect_name=darg),
                    "DELETE FROM sometable WITH (PAGLOCK) "
                    "WHERE sometable.somecolumn = :somecolumn_1")

    def test_delete_exclude_hint(self):
        t = table('sometable', column('somecolumn'))
        self.assert_compile(
            t.delete().\
                where(t.c.somecolumn=="q").\
                with_hint("XYZ", dialect_name="mysql"),
            "DELETE FROM sometable WHERE "
            "sometable.somecolumn = :somecolumn_1"
        )

    def test_update_from_hint(self):
        t = table('sometable', column('somecolumn'))
        t2 = table('othertable', column('somecolumn'))
        for darg in ("*", "mssql"):
            self.assert_compile(
                t.update().where(t.c.somecolumn == t2.c.somecolumn).values(
                    somecolumn="x").with_hint("WITH (PAGLOCK)",
                                              selectable=t2,
                                              dialect_name=darg),
                "UPDATE sometable SET somecolumn=:somecolumn "
                "FROM sometable, othertable WITH (PAGLOCK) "
                "WHERE sometable.somecolumn = othertable.somecolumn")

    # TODO: not supported yet.
    #def test_delete_from_hint(self):
    #    t = table('sometable', column('somecolumn'))
    #    t2 = table('othertable', column('somecolumn'))
    #    for darg in ("*", "mssql"):
    #        self.assert_compile(
    #            t.delete().where(t.c.somecolumn==t2.c.somecolumn).
    #                    with_hint("WITH (PAGLOCK)",
    #                            selectable=t2,
    #                            dialect_name=darg),
    #            ""
    #        )

    def test_strict_binds(self):
        """test the 'strict' compiler binds."""

        from sqlalchemy.dialects.mssql.base import MSSQLStrictCompiler
        mxodbc_dialect = mxodbc.dialect()
        mxodbc_dialect.statement_compiler = MSSQLStrictCompiler

        t = table('sometable', column('foo'))

        for expr, compile in [
            (
                select([literal("x"), literal("y")]),
                "SELECT 'x' AS anon_1, 'y' AS anon_2",
            ),
            (
                select([t]).where(t.c.foo.in_(['x', 'y', 'z'])),
                "SELECT sometable.foo FROM sometable WHERE sometable.foo "
                "IN ('x', 'y', 'z')",
            ), (t.c.foo.in_([None]), "sometable.foo IN (NULL)")
        ]:
            self.assert_compile(expr, compile, dialect=mxodbc_dialect)

    def test_in_with_subqueries(self):
        """Test removal of legacy behavior that converted "x==subquery"
        to use IN.

        """

        t = table('sometable', column('somecolumn'))
        self.assert_compile(
            t.select().where(t.c.somecolumn == t.select()),
            'SELECT sometable.somecolumn FROM '
            'sometable WHERE sometable.somecolumn = '
            '(SELECT sometable.somecolumn FROM '
            'sometable)')
        self.assert_compile(
            t.select().where(t.c.somecolumn != t.select()),
            'SELECT sometable.somecolumn FROM '
            'sometable WHERE sometable.somecolumn != '
            '(SELECT sometable.somecolumn FROM '
            'sometable)')

    def test_count(self):
        t = table('sometable', column('somecolumn'))
        self.assert_compile(
            t.count(), 'SELECT count(sometable.somecolumn) AS '
            'tbl_row_count FROM sometable')

    def test_noorderby_insubquery(self):
        """test that the ms-sql dialect removes ORDER BY clauses from
        subqueries"""

        table1 = table(
            'mytable',
            column('myid', Integer),
            column('name', String),
            column('description', String),
        )

        q = select([table1.c.myid], order_by=[table1.c.myid]).alias('foo')
        crit = q.c.myid == table1.c.myid
        self.assert_compile(
            select(['*'], crit), "SELECT * FROM (SELECT mytable.myid AS "
            "myid FROM mytable) AS foo, mytable WHERE "
            "foo.myid = mytable.myid")

    def test_delete_schema(self):
        metadata = MetaData()
        tbl = Table('test',
                    metadata,
                    Column('id', Integer, primary_key=True),
                    schema='paj')
        self.assert_compile(
            tbl.delete(tbl.c.id == 1),
            'DELETE FROM paj.test WHERE paj.test.id = '
            ':id_1')
        s = select([tbl.c.id]).where(tbl.c.id == 1)
        self.assert_compile(
            tbl.delete().where(tbl.c.id.in_(s)),
            'DELETE FROM paj.test WHERE paj.test.id IN '
            '(SELECT test_1.id FROM paj.test AS test_1 '
            'WHERE test_1.id = :id_1)')

    def test_delete_schema_multipart(self):
        metadata = MetaData()
        tbl = Table('test',
                    metadata,
                    Column('id', Integer, primary_key=True),
                    schema='banana.paj')
        self.assert_compile(
            tbl.delete(tbl.c.id == 1), 'DELETE FROM banana.paj.test WHERE '
            'banana.paj.test.id = :id_1')
        s = select([tbl.c.id]).where(tbl.c.id == 1)
        self.assert_compile(
            tbl.delete().where(tbl.c.id.in_(s)),
            'DELETE FROM banana.paj.test WHERE '
            'banana.paj.test.id IN (SELECT test_1.id '
            'FROM banana.paj.test AS test_1 WHERE '
            'test_1.id = :id_1)')

    def test_delete_schema_multipart_needs_quoting(self):
        metadata = MetaData()
        tbl = Table('test',
                    metadata,
                    Column('id', Integer, primary_key=True),
                    schema='banana split.paj')
        self.assert_compile(
            tbl.delete(tbl.c.id == 1),
            'DELETE FROM [banana split].paj.test WHERE '
            '[banana split].paj.test.id = :id_1')
        s = select([tbl.c.id]).where(tbl.c.id == 1)
        self.assert_compile(
            tbl.delete().where(tbl.c.id.in_(s)),
            'DELETE FROM [banana split].paj.test WHERE '
            '[banana split].paj.test.id IN (SELECT '
            'test_1.id FROM [banana split].paj.test AS '
            'test_1 WHERE test_1.id = :id_1)')

    def test_delete_schema_multipart_both_need_quoting(self):
        metadata = MetaData()
        tbl = Table('test',
                    metadata,
                    Column('id', Integer, primary_key=True),
                    schema='banana split.paj with a space')
        self.assert_compile(
            tbl.delete(tbl.c.id == 1),
            'DELETE FROM [banana split].[paj with a '
            'space].test WHERE [banana split].[paj '
            'with a space].test.id = :id_1')
        s = select([tbl.c.id]).where(tbl.c.id == 1)
        self.assert_compile(
            tbl.delete().where(tbl.c.id.in_(s)),
            'DELETE FROM [banana split].[paj with a '
            'space].test WHERE [banana split].[paj '
            'with a space].test.id IN (SELECT '
            'test_1.id FROM [banana split].[paj with a '
            'space].test AS test_1 WHERE test_1.id = '
            ':id_1)')

    def test_union(self):
        t1 = table('t1', column('col1'), column('col2'), column('col3'),
                   column('col4'))
        t2 = table('t2', column('col1'), column('col2'), column('col3'),
                   column('col4'))
        s1, s2 = select([t1.c.col3.label('col3'), t1.c.col4.label('col4'
                        )], t1.c.col2.in_(['t1col2r1', 't1col2r2'])), \
            select([t2.c.col3.label('col3'), t2.c.col4.label('col4')],
                   t2.c.col2.in_(['t2col2r2', 't2col2r3']))
        u = union(s1, s2, order_by=['col3', 'col4'])
        self.assert_compile(
            u, 'SELECT t1.col3 AS col3, t1.col4 AS col4 '
            'FROM t1 WHERE t1.col2 IN (:col2_1, '
            ':col2_2) UNION SELECT t2.col3 AS col3, '
            't2.col4 AS col4 FROM t2 WHERE t2.col2 IN '
            '(:col2_3, :col2_4) ORDER BY col3, col4')
        self.assert_compile(
            u.alias('bar').select(), 'SELECT bar.col3, bar.col4 FROM (SELECT '
            't1.col3 AS col3, t1.col4 AS col4 FROM t1 '
            'WHERE t1.col2 IN (:col2_1, :col2_2) UNION '
            'SELECT t2.col3 AS col3, t2.col4 AS col4 '
            'FROM t2 WHERE t2.col2 IN (:col2_3, '
            ':col2_4)) AS bar')

    def test_function(self):
        self.assert_compile(func.foo(1, 2), 'foo(:foo_1, :foo_2)')
        self.assert_compile(func.current_time(), 'CURRENT_TIME')
        self.assert_compile(func.foo(), 'foo()')
        m = MetaData()
        t = Table('sometable', m, Column('col1', Integer),
                  Column('col2', Integer))
        self.assert_compile(
            select([func.max(t.c.col1)]),
            'SELECT max(sometable.col1) AS max_1 FROM '
            'sometable')

    def test_function_overrides(self):
        self.assert_compile(func.current_date(), "GETDATE()")
        self.assert_compile(func.length(3), "LEN(:length_1)")

    def test_extract(self):
        t = table('t', column('col1'))

        for field in 'day', 'month', 'year':
            self.assert_compile(
                select([extract(field, t.c.col1)]),
                'SELECT DATEPART("%s", t.col1) AS anon_1 FROM t' % field)

    def test_update_returning(self):
        table1 = table('mytable', column('myid', Integer),
                       column('name', String(128)),
                       column('description', String(128)))
        u = update(table1,
                   values=dict(name='foo')).returning(table1.c.myid,
                                                      table1.c.name)
        self.assert_compile(
            u, 'UPDATE mytable SET name=:name OUTPUT '
            'inserted.myid, inserted.name')
        u = update(table1, values=dict(name='foo')).returning(table1)
        self.assert_compile(
            u, 'UPDATE mytable SET name=:name OUTPUT '
            'inserted.myid, inserted.name, '
            'inserted.description')
        u = update(table1, values=dict(name='foo')).returning(table1).where(
            table1.c.name == 'bar')
        self.assert_compile(
            u, 'UPDATE mytable SET name=:name OUTPUT '
            'inserted.myid, inserted.name, '
            'inserted.description WHERE mytable.name = '
            ':name_1')
        u = update(table1, values=dict(name='foo')).returning(
            func.length(table1.c.name))
        self.assert_compile(
            u, 'UPDATE mytable SET name=:name OUTPUT '
            'LEN(inserted.name) AS length_1')

    def test_delete_returning(self):
        table1 = table('mytable', column('myid', Integer),
                       column('name', String(128)),
                       column('description', String(128)))
        d = delete(table1).returning(table1.c.myid, table1.c.name)
        self.assert_compile(
            d, 'DELETE FROM mytable OUTPUT deleted.myid, '
            'deleted.name')
        d = delete(table1).where(table1.c.name == 'bar').returning(
            table1.c.myid, table1.c.name)
        self.assert_compile(
            d, 'DELETE FROM mytable OUTPUT deleted.myid, '
            'deleted.name WHERE mytable.name = :name_1')

    def test_insert_returning(self):
        table1 = table('mytable', column('myid', Integer),
                       column('name', String(128)),
                       column('description', String(128)))
        i = insert(table1,
                   values=dict(name='foo')).returning(table1.c.myid,
                                                      table1.c.name)
        self.assert_compile(
            i, 'INSERT INTO mytable (name) OUTPUT '
            'inserted.myid, inserted.name VALUES '
            '(:name)')
        i = insert(table1, values=dict(name='foo')).returning(table1)
        self.assert_compile(
            i, 'INSERT INTO mytable (name) OUTPUT '
            'inserted.myid, inserted.name, '
            'inserted.description VALUES (:name)')
        i = insert(table1, values=dict(name='foo')).returning(
            func.length(table1.c.name))
        self.assert_compile(
            i, 'INSERT INTO mytable (name) OUTPUT '
            'LEN(inserted.name) AS length_1 VALUES '
            '(:name)')

    def test_limit_using_top(self):
        t = table('t', column('x', Integer), column('y', Integer))

        s = select([t]).where(t.c.x == 5).order_by(t.c.y).limit(10)

        self.assert_compile(
            s,
            "SELECT TOP 10 t.x, t.y FROM t WHERE t.x = :x_1 ORDER BY t.y",
            checkparams={'x_1': 5})

    def test_limit_zero_using_top(self):
        t = table('t', column('x', Integer), column('y', Integer))

        s = select([t]).where(t.c.x == 5).order_by(t.c.y).limit(0)

        self.assert_compile(
            s,
            "SELECT TOP 0 t.x, t.y FROM t WHERE t.x = :x_1 ORDER BY t.y",
            checkparams={'x_1': 5})

    def test_offset_using_window(self):
        t = table('t', column('x', Integer), column('y', Integer))

        s = select([t]).where(t.c.x == 5).order_by(t.c.y).offset(20)

        # test that the select is not altered with subsequent compile
        # calls
        for i in range(2):
            self.assert_compile(
                s, "SELECT anon_1.x, anon_1.y FROM (SELECT t.x AS x, t.y "
                "AS y, ROW_NUMBER() OVER (ORDER BY t.y) AS "
                "mssql_rn FROM t WHERE t.x = :x_1) AS "
                "anon_1 WHERE mssql_rn > :mssql_rn_1",
                checkparams={
                    'mssql_rn_1': 20,
                    'x_1': 5
                })

    def test_limit_offset_using_window(self):
        t = table('t', column('x', Integer), column('y', Integer))

        s = select([t]).where(t.c.x == 5).order_by(t.c.y).limit(10).offset(20)

        self.assert_compile(
            s, "SELECT anon_1.x, anon_1.y "
            "FROM (SELECT t.x AS x, t.y AS y, "
            "ROW_NUMBER() OVER (ORDER BY t.y) AS mssql_rn "
            "FROM t "
            "WHERE t.x = :x_1) AS anon_1 "
            "WHERE mssql_rn > :mssql_rn_1 AND mssql_rn <= :mssql_rn_2",
            checkparams={
                'mssql_rn_1': 20,
                'mssql_rn_2': 30,
                'x_1': 5
            })

    def test_limit_offset_with_correlated_order_by(self):
        t1 = table('t1', column('x', Integer), column('y', Integer))
        t2 = table('t2', column('x', Integer), column('y', Integer))

        order_by = select([t2.c.y]).where(t1.c.x == t2.c.x).as_scalar()
        s = select([t1]).where(t1.c.x == 5).order_by(order_by) \
            .limit(10).offset(20)

        self.assert_compile(
            s, "SELECT anon_1.x, anon_1.y "
            "FROM (SELECT t1.x AS x, t1.y AS y, "
            "ROW_NUMBER() OVER (ORDER BY "
            "(SELECT t2.y FROM t2 WHERE t1.x = t2.x)"
            ") AS mssql_rn "
            "FROM t1 "
            "WHERE t1.x = :x_1) AS anon_1 "
            "WHERE mssql_rn > :mssql_rn_1 AND mssql_rn <= :mssql_rn_2",
            checkparams={
                'mssql_rn_1': 20,
                'mssql_rn_2': 30,
                'x_1': 5
            })

    def test_limit_zero_offset_using_window(self):
        t = table('t', column('x', Integer), column('y', Integer))

        s = select([t]).where(t.c.x == 5).order_by(t.c.y).limit(0).offset(0)

        # render the LIMIT of zero, but not the OFFSET
        # of zero, so produces TOP 0
        self.assert_compile(s, "SELECT TOP 0 t.x, t.y FROM t "
                            "WHERE t.x = :x_1 ORDER BY t.y",
                            checkparams={'x_1': 5})

    def test_sequence_start_0(self):
        metadata = MetaData()
        tbl = Table('test', metadata,
                    Column('id', Integer, Sequence('', 0), primary_key=True))
        self.assert_compile(
            schema.CreateTable(tbl),
            "CREATE TABLE test (id INTEGER NOT NULL IDENTITY(0,1), "
            "PRIMARY KEY (id))")

    def test_sequence_non_primary_key(self):
        metadata = MetaData()
        tbl = Table('test', metadata,
                    Column('id', Integer, Sequence(''), primary_key=False))
        self.assert_compile(
            schema.CreateTable(tbl),
            "CREATE TABLE test (id INTEGER NOT NULL IDENTITY(1,1))")

    def test_sequence_ignore_nullability(self):
        metadata = MetaData()
        tbl = Table('test', metadata,
                    Column('id', Integer, Sequence(''), nullable=True))
        self.assert_compile(
            schema.CreateTable(tbl),
            "CREATE TABLE test (id INTEGER NOT NULL IDENTITY(1,1))")

    def test_table_pkc_clustering(self):
        metadata = MetaData()
        tbl = Table('test', metadata, Column('x', Integer,
                                             autoincrement=False),
                    Column('y', Integer, autoincrement=False),
                    PrimaryKeyConstraint("x", "y", mssql_clustered=True))
        self.assert_compile(
            schema.CreateTable(tbl),
            "CREATE TABLE test (x INTEGER NOT NULL, y INTEGER NOT NULL, "
            "PRIMARY KEY CLUSTERED (x, y))")

    def test_table_uc_clustering(self):
        metadata = MetaData()
        tbl = Table('test', metadata, Column('x', Integer,
                                             autoincrement=False),
                    Column('y', Integer, autoincrement=False),
                    PrimaryKeyConstraint("x"),
                    UniqueConstraint("y", mssql_clustered=True))
        self.assert_compile(
            schema.CreateTable(tbl),
            "CREATE TABLE test (x INTEGER NOT NULL, y INTEGER NULL, "
            "PRIMARY KEY (x), UNIQUE CLUSTERED (y))")

    def test_index_clustering(self):
        metadata = MetaData()
        tbl = Table('test', metadata, Column('id', Integer))
        idx = Index("foo", tbl.c.id, mssql_clustered=True)
        self.assert_compile(schema.CreateIndex(idx),
                            "CREATE CLUSTERED INDEX foo ON test (id)")

    def test_index_ordering(self):
        metadata = MetaData()
        tbl = Table('test', metadata, Column('x', Integer),
                    Column('y', Integer), Column('z', Integer))
        idx = Index("foo", tbl.c.x.desc(), "y")
        self.assert_compile(schema.CreateIndex(idx),
                            "CREATE INDEX foo ON test (x DESC, y)")

    def test_create_index_expr(self):
        m = MetaData()
        t1 = Table('foo', m, Column('x', Integer))
        self.assert_compile(schema.CreateIndex(Index("bar", t1.c.x > 5)),
                            "CREATE INDEX bar ON foo (x > 5)")

    def test_drop_index_w_schema(self):
        m = MetaData()
        t1 = Table('foo', m, Column('x', Integer), schema='bar')
        self.assert_compile(schema.DropIndex(Index("idx_foo", t1.c.x)),
                            "DROP INDEX idx_foo ON bar.foo")

    def test_index_extra_include_1(self):
        metadata = MetaData()
        tbl = Table('test', metadata, Column('x', Integer),
                    Column('y', Integer), Column('z', Integer))
        idx = Index("foo", tbl.c.x, mssql_include=['y'])
        self.assert_compile(schema.CreateIndex(idx),
                            "CREATE INDEX foo ON test (x) INCLUDE (y)")

    def test_index_extra_include_2(self):
        metadata = MetaData()
        tbl = Table('test', metadata, Column('x', Integer),
                    Column('y', Integer), Column('z', Integer))
        idx = Index("foo", tbl.c.x, mssql_include=[tbl.c.y])
        self.assert_compile(schema.CreateIndex(idx),
                            "CREATE INDEX foo ON test (x) INCLUDE (y)")
Пример #39
0
class SchemaAliasingTest(fixtures.TestBase, AssertsCompiledSQL):
    """SQL server cannot reference schema-qualified tables in a SELECT statement, they
    must be aliased.
    """
    __dialect__ = mssql.dialect()

    def setup(self):
        metadata = MetaData()
        self.t1 = table(
            't1',
            column('a', Integer),
            column('b', String),
            column('c', String),
        )
        self.t2 = Table('t2',
                        metadata,
                        Column("a", Integer),
                        Column("b", Integer),
                        Column("c", Integer),
                        schema='schema')

    def test_result_map(self):
        s = self.t2.select()
        c = s.compile(dialect=self.__dialect__)
        assert self.t2.c.a in set(c.result_map['a'][1])

    def test_result_map_use_labels(self):
        s = self.t2.select(use_labels=True)
        c = s.compile(dialect=self.__dialect__)
        assert self.t2.c.a in set(c.result_map['schema_t2_a'][1])

    def test_straight_select(self):
        self.assert_compile(
            self.t2.select(),
            "SELECT t2_1.a, t2_1.b, t2_1.c FROM [schema].t2 AS t2_1")

    def test_straight_select_use_labels(self):
        self.assert_compile(
            self.t2.select(use_labels=True),
            "SELECT t2_1.a AS schema_t2_a, t2_1.b AS schema_t2_b, "
            "t2_1.c AS schema_t2_c FROM [schema].t2 AS t2_1")

    def test_join_to_schema(self):
        t1, t2 = self.t1, self.t2
        self.assert_compile(
            t1.join(t2, t1.c.a == t2.c.a).select(),
            "SELECT t1.a, t1.b, t1.c, t2_1.a, t2_1.b, t2_1.c FROM t1 "
            "JOIN [schema].t2 AS t2_1 ON t2_1.a = t1.a")

    def test_union_schema_to_non(self):
        t1, t2 = self.t1, self.t2
        s = select([t2.c.a, t2.c.b]).apply_labels().\
                union(
                    select([t1.c.a, t1.c.b]).apply_labels()
                ).alias().select()
        self.assert_compile(
            s, "SELECT anon_1.schema_t2_a, anon_1.schema_t2_b FROM "
            "(SELECT t2_1.a AS schema_t2_a, t2_1.b AS schema_t2_b "
            "FROM [schema].t2 AS t2_1 UNION SELECT t1.a AS t1_a, "
            "t1.b AS t1_b FROM t1) AS anon_1")

    def test_column_subquery_to_alias(self):
        a1 = self.t2.alias('a1')
        s = select([self.t2, select([a1.c.a]).as_scalar()])
        self.assert_compile(
            s, "SELECT t2_1.a, t2_1.b, t2_1.c, "
            "(SELECT a1.a FROM [schema].t2 AS a1) "
            "AS anon_1 FROM [schema].t2 AS t2_1")