Exemple #1
0
    def date_fixture(self, metadata):
        t = Table(
            "test_dates",
            metadata,
            Column("adate", Date),
            Column("atime1", Time),
            Column("atime2", Time),
            Column("adatetime", DateTime),
            Column("adatetimeoffset", DATETIMEOFFSET),
            Column("adatetimewithtimezone", DateTime(timezone=True)),
        )

        d1 = datetime.date(2007, 10, 30)
        t1 = datetime.time(11, 2, 32)
        d2 = datetime.datetime(2007, 10, 30, 11, 2, 32)
        d3 = datetime.datetime(
            2007,
            10,
            30,
            11,
            2,
            32,
            123456,
            util.timezone(datetime.timedelta(hours=-5)),
        )
        return t, (d1, t1, d2, d3)
    def test_datetime_offset(
        self,
        datetimeoffset_fixture,
        dto_param_value,
        expected_offset_hours,
        should_fail,
    ):
        t = datetimeoffset_fixture
        dto_param_value = dto_param_value()

        with testing.db.begin() as conn:
            if should_fail:
                assert_raises(
                    sa.exc.DBAPIError,
                    conn.execute,
                    t.insert(),
                    adatetimeoffset=dto_param_value,
                )
                return

            conn.execute(
                t.insert(),
                adatetimeoffset=dto_param_value,
            )

            row = conn.execute(t.select()).first()

            if dto_param_value is None:
                is_(row.adatetimeoffset, None)
            else:
                eq_(
                    row.adatetimeoffset,
                    datetime.datetime(
                        2007,
                        10,
                        30,
                        11,
                        2,
                        32,
                        123456,
                        util.timezone(
                            datetime.timedelta(hours=expected_offset_hours)
                        ),
                    ),
                )
Exemple #3
0
class TypeRoundTripTest(fixtures.TestBase, AssertsExecutionResults,
                        ComparesTables):
    __only_on__ = "mssql"

    __backend__ = True

    @classmethod
    def setup_class(cls):
        global metadata
        metadata = MetaData(testing.db)

    def teardown(self):
        metadata.drop_all()

    def test_decimal_notation(self):
        numeric_table = Table(
            "numeric_table",
            metadata,
            Column(
                "id",
                Integer,
                Sequence("numeric_id_seq", optional=True),
                primary_key=True,
            ),
            Column("numericcol", Numeric(precision=38,
                                         scale=20,
                                         asdecimal=True)),
        )
        metadata.create_all()
        test_items = [
            decimal.Decimal(d) for d in (
                "1500000.00000000000000000000",
                "-1500000.00000000000000000000",
                "1500000",
                "0.0000000000000000002",
                "0.2",
                "-0.0000000000000000002",
                "-2E-2",
                "156666.458923543",
                "-156666.458923543",
                "1",
                "-1",
                "-1234",
                "1234",
                "2E-12",
                "4E8",
                "3E-6",
                "3E-7",
                "4.1",
                "1E-1",
                "1E-2",
                "1E-3",
                "1E-4",
                "1E-5",
                "1E-6",
                "1E-7",
                "1E-1",
                "1E-8",
                "0.2732E2",
                "-0.2432E2",
                "4.35656E2",
                "-02452E-2",
                "45125E-2",
                "1234.58965E-2",
                "1.521E+15",
                # previously, these were at -1E-25, which were inserted
                # cleanly however we only got back 20 digits of accuracy.
                # pyodbc as of 4.0.22 now disallows the silent truncation.
                "-1E-20",
                "1E-20",
                "1254E-20",
                "-1203E-20",
                "0",
                "-0.00",
                "-0",
                "4585E12",
                "000000000000000000012",
                "000000000000.32E12",
                "00000000000000.1E+12",
                # these are no longer accepted by pyodbc 4.0.22 but it seems
                # they were not actually round-tripping correctly before that
                # in any case
                # '-1E-25',
                # '1E-25',
                # '1254E-25',
                # '-1203E-25',
                # '000000000000.2E-32',
            )
        ]

        with testing.db.connect() as conn:
            for value in test_items:
                result = conn.execute(numeric_table.insert(),
                                      dict(numericcol=value))
                primary_key = result.inserted_primary_key
                returned = conn.scalar(
                    select([numeric_table.c.numericcol
                            ]).where(numeric_table.c.id == primary_key[0]))
                eq_(value, returned)

    def test_float(self):
        float_table = Table(
            "float_table",
            metadata,
            Column(
                "id",
                Integer,
                Sequence("numeric_id_seq", optional=True),
                primary_key=True,
            ),
            Column("floatcol", Float()),
        )

        metadata.create_all()
        try:
            test_items = [
                float(d) for d in (
                    "1500000.00000000000000000000",
                    "-1500000.00000000000000000000",
                    "1500000",
                    "0.0000000000000000002",
                    "0.2",
                    "-0.0000000000000000002",
                    "156666.458923543",
                    "-156666.458923543",
                    "1",
                    "-1",
                    "1234",
                    "2E-12",
                    "4E8",
                    "3E-6",
                    "3E-7",
                    "4.1",
                    "1E-1",
                    "1E-2",
                    "1E-3",
                    "1E-4",
                    "1E-5",
                    "1E-6",
                    "1E-7",
                    "1E-8",
                )
            ]
            for value in test_items:
                float_table.insert().execute(floatcol=value)
        except Exception as e:
            raise e

    # todo this should suppress warnings, but it does not
    @emits_warning_on("mssql+mxodbc", r".*does not have any indexes.*")
    def test_dates(self):
        "Exercise type specification for date types."

        columns = [
            # column type, args, kwargs, expected ddl
            (mssql.MSDateTime, [], {}, "DATETIME", []),
            (types.DATE, [], {}, "DATE", [">=", (10, )]),
            (types.Date, [], {}, "DATE", [">=", (10, )]),
            (types.Date, [], {}, "DATETIME", ["<", (10, )], mssql.MSDateTime),
            (mssql.MSDate, [], {}, "DATE", [">=", (10, )]),
            (mssql.MSDate, [], {}, "DATETIME", ["<",
                                                (10, )], mssql.MSDateTime),
            (types.TIME, [], {}, "TIME", [">=", (10, )]),
            (types.Time, [], {}, "TIME", [">=", (10, )]),
            (mssql.MSTime, [], {}, "TIME", [">=", (10, )]),
            (mssql.MSTime, [1], {}, "TIME(1)", [">=", (10, )]),
            (types.Time, [], {}, "DATETIME", ["<", (10, )], mssql.MSDateTime),
            (mssql.MSTime, [], {}, "TIME", [">=", (10, )]),
            (mssql.MSSmallDateTime, [], {}, "SMALLDATETIME", []),
            (mssql.MSDateTimeOffset, [], {}, "DATETIMEOFFSET", [">=", (10, )]),
            (
                mssql.MSDateTimeOffset,
                [1],
                {},
                "DATETIMEOFFSET(1)",
                [">=", (10, )],
            ),
            (mssql.MSDateTime2, [], {}, "DATETIME2", [">=", (10, )]),
            (mssql.MSDateTime2, [0], {}, "DATETIME2(0)", [">=", (10, )]),
            (mssql.MSDateTime2, [1], {}, "DATETIME2(1)", [">=", (10, )]),
        ]

        table_args = ["test_mssql_dates", metadata]
        for index, spec in enumerate(columns):
            type_, args, kw, res, requires = spec[0:5]
            if (requires and testing._is_excluded("mssql", *requires)
                    or not requires):
                c = Column("c%s" % index, type_(*args, **kw), nullable=None)
                testing.db.dialect.type_descriptor(c.type)
                table_args.append(c)
        dates_table = Table(*table_args)
        gen = testing.db.dialect.ddl_compiler(testing.db.dialect,
                                              schema.CreateTable(dates_table))
        for col in dates_table.c:
            index = int(col.name[1:])
            testing.eq_(
                gen.get_column_specification(col),
                "%s %s" % (col.name, columns[index][3]),
            )
            self.assert_(repr(col))
        dates_table.create(checkfirst=True)
        reflected_dates = Table("test_mssql_dates",
                                MetaData(testing.db),
                                autoload=True)
        for col in reflected_dates.c:
            self.assert_types_base(col, dates_table.c[col.key])

    @testing.metadata_fixture()
    def date_fixture(self, metadata):
        t = Table(
            "test_dates",
            metadata,
            Column("adate", Date),
            Column("atime1", Time),
            Column("atime2", Time),
            Column("adatetime", DateTime),
            Column("adatetimeoffset", DATETIMEOFFSET),
        )

        d1 = datetime.date(2007, 10, 30)
        t1 = datetime.time(11, 2, 32)
        d2 = datetime.datetime(2007, 10, 30, 11, 2, 32)
        return t, (d1, t1, d2)

    def test_date_roundtrips(self, date_fixture):
        t, (d1, t1, d2) = date_fixture
        with testing.db.begin() as conn:
            conn.execute(t.insert(),
                         adate=d1,
                         adatetime=d2,
                         atime1=t1,
                         atime2=d2)

            row = conn.execute(t.select()).first()
            eq_(
                (row.adate, row.adatetime, row.atime1, row.atime2),
                (d1, d2, t1, d2.time()),
            )

    @testing.metadata_fixture()
    def datetimeoffset_fixture(self, metadata):
        t = Table(
            "test_dates",
            metadata,
            Column("adatetimeoffset", DATETIMEOFFSET),
        )

        return t

    @testing.combinations(
        ("dto_param_none", lambda: None, None, False),
        (
            "dto_param_datetime_aware_positive",
            lambda: datetime.datetime(
                2007,
                10,
                30,
                11,
                2,
                32,
                123456,
                util.timezone(datetime.timedelta(hours=1)),
            ),
            1,
            False,
        ),
        (
            "dto_param_datetime_aware_negative",
            lambda: datetime.datetime(
                2007,
                10,
                30,
                11,
                2,
                32,
                123456,
                util.timezone(datetime.timedelta(hours=-5)),
            ),
            -5,
            False,
        ),
        (
            "dto_param_datetime_aware_seconds_frac_fail",
            lambda: datetime.datetime(
                2007,
                10,
                30,
                11,
                2,
                32,
                123456,
                util.timezone(datetime.timedelta(seconds=4000)),
            ),
            None,
            True,
            testing.requires.python37,
        ),
        (
            "dto_param_datetime_naive",
            lambda: datetime.datetime(2007, 10, 30, 11, 2, 32, 123456),
            0,
            False,
        ),
        (
            "dto_param_string_one",
            lambda: "2007-10-30 11:02:32.123456 +01:00",
            1,
            False,
        ),
        # wow
        (
            "dto_param_string_two",
            lambda: "October 30, 2007 11:02:32.123456",
            0,
            False,
        ),
        ("dto_param_string_invalid", lambda: "this is not a date", 0, True),
        id_="iaaa",
        argnames="dto_param_value, expected_offset_hours, should_fail",
    )
    def test_datetime_offset(
        self,
        datetimeoffset_fixture,
        dto_param_value,
        expected_offset_hours,
        should_fail,
    ):
        t = datetimeoffset_fixture
        dto_param_value = dto_param_value()

        with testing.db.begin() as conn:
            if should_fail:
                assert_raises(
                    sa.exc.DBAPIError,
                    conn.execute,
                    t.insert(),
                    adatetimeoffset=dto_param_value,
                )
                return

            conn.execute(
                t.insert(),
                adatetimeoffset=dto_param_value,
            )

            row = conn.execute(t.select()).first()

            if dto_param_value is None:
                is_(row.adatetimeoffset, None)
            else:
                eq_(
                    row.adatetimeoffset,
                    datetime.datetime(
                        2007,
                        10,
                        30,
                        11,
                        2,
                        32,
                        123456,
                        util.timezone(
                            datetime.timedelta(hours=expected_offset_hours)),
                    ),
                )

    @emits_warning_on("mssql+mxodbc", r".*does not have any indexes.*")
    @testing.provide_metadata
    def _test_binary_reflection(self, deprecate_large_types):
        "Exercise type specification for binary types."

        columns = [
            # column type, args, kwargs, expected ddl from reflected
            (mssql.MSBinary, [], {}, "BINARY(1)"),
            (mssql.MSBinary, [10], {}, "BINARY(10)"),
            (types.BINARY, [], {}, "BINARY(1)"),
            (types.BINARY, [10], {}, "BINARY(10)"),
            (mssql.MSVarBinary, [], {}, "VARBINARY(max)"),
            (mssql.MSVarBinary, [10], {}, "VARBINARY(10)"),
            (types.VARBINARY, [10], {}, "VARBINARY(10)"),
            (types.VARBINARY, [], {}, "VARBINARY(max)"),
            (mssql.MSImage, [], {}, "IMAGE"),
            (mssql.IMAGE, [], {}, "IMAGE"),
            (
                types.LargeBinary,
                [],
                {},
                "IMAGE" if not deprecate_large_types else "VARBINARY(max)",
            ),
        ]

        metadata = self.metadata
        metadata.bind = engines.testing_engine(
            options={"deprecate_large_types": deprecate_large_types})
        table_args = ["test_mssql_binary", metadata]
        for index, spec in enumerate(columns):
            type_, args, kw, res = spec
            table_args.append(
                Column("c%s" % index, type_(*args, **kw), nullable=None))
        binary_table = Table(*table_args)
        metadata.create_all()
        reflected_binary = Table("test_mssql_binary",
                                 MetaData(testing.db),
                                 autoload=True)
        for col, spec in zip(reflected_binary.c, columns):
            eq_(
                str(col.type),
                spec[3],
                "column %s %s != %s" % (col.key, str(col.type), spec[3]),
            )
            c1 = testing.db.dialect.type_descriptor(col.type).__class__
            c2 = testing.db.dialect.type_descriptor(
                binary_table.c[col.name].type).__class__
            assert issubclass(
                c1, c2), "column %s: %r is not a subclass of %r" % (col.key,
                                                                    c1, c2)
            if binary_table.c[col.name].type.length:
                testing.eq_(col.type.length,
                            binary_table.c[col.name].type.length)

    def test_binary_reflection_legacy_large_types(self):
        self._test_binary_reflection(False)

    @testing.only_on("mssql >= 11")
    def test_binary_reflection_sql2012_large_types(self):
        self._test_binary_reflection(True)

    def test_autoincrement(self):
        Table(
            "ai_1",
            metadata,
            Column("int_y", Integer, primary_key=True, autoincrement=True),
            Column("int_n", Integer, DefaultClause("0"), primary_key=True),
        )
        Table(
            "ai_2",
            metadata,
            Column("int_y", Integer, primary_key=True, autoincrement=True),
            Column("int_n", Integer, DefaultClause("0"), primary_key=True),
        )
        Table(
            "ai_3",
            metadata,
            Column("int_n", Integer, DefaultClause("0"), primary_key=True),
            Column("int_y", Integer, primary_key=True, autoincrement=True),
        )

        Table(
            "ai_4",
            metadata,
            Column("int_n", Integer, DefaultClause("0"), primary_key=True),
            Column("int_n2", Integer, DefaultClause("0"), primary_key=True),
        )
        Table(
            "ai_5",
            metadata,
            Column("int_y", Integer, primary_key=True, autoincrement=True),
            Column("int_n", Integer, DefaultClause("0"), primary_key=True),
        )
        Table(
            "ai_6",
            metadata,
            Column("o1", String(1), DefaultClause("x"), primary_key=True),
            Column("int_y", Integer, primary_key=True, autoincrement=True),
        )
        Table(
            "ai_7",
            metadata,
            Column("o1", String(1), DefaultClause("x"), primary_key=True),
            Column("o2", String(1), DefaultClause("x"), primary_key=True),
            Column("int_y", Integer, autoincrement=True, primary_key=True),
        )
        Table(
            "ai_8",
            metadata,
            Column("o1", String(1), DefaultClause("x"), primary_key=True),
            Column("o2", String(1), DefaultClause("x"), primary_key=True),
        )
        metadata.create_all()

        table_names = [
            "ai_1",
            "ai_2",
            "ai_3",
            "ai_4",
            "ai_5",
            "ai_6",
            "ai_7",
            "ai_8",
        ]
        mr = MetaData(testing.db)

        for name in table_names:
            tbl = Table(name, mr, autoload=True)
            tbl = metadata.tables[name]

            # test that the flag itself reflects appropriately
            for col in tbl.c:
                if "int_y" in col.name:
                    is_(col.autoincrement, True)
                    is_(tbl._autoincrement_column, col)
                else:
                    eq_(col.autoincrement, "auto")
                    is_not_(tbl._autoincrement_column, col)

            # mxodbc can't handle scope_identity() with DEFAULT VALUES

            if testing.db.driver == "mxodbc":
                eng = [
                    engines.testing_engine(
                        options={"implicit_returning": True})
                ]
            else:
                eng = [
                    engines.testing_engine(
                        options={"implicit_returning": False}),
                    engines.testing_engine(
                        options={"implicit_returning": True}),
                ]

            for counter, engine in enumerate(eng):
                with engine.begin() as conn:
                    conn.execute(tbl.insert())
                    if "int_y" in tbl.c:
                        eq_(
                            conn.execute(select([tbl.c.int_y])).scalar(),
                            counter + 1,
                        )
                        assert (list(conn.execute(
                            tbl.select()).first()).count(counter + 1) == 1)
                    else:
                        assert 1 not in list(
                            conn.execute(tbl.select()).first())
                    conn.execute(tbl.delete())
Exemple #4
0
    def test_date_roundtrip(self):
        t = Table(
            "test_dates",
            metadata,
            Column(
                "id",
                Integer,
                Sequence("datetest_id_seq", optional=True),
                primary_key=True,
            ),
            Column("adate", Date),
            Column("atime", Time),
            Column("adatetime", DateTime),
            Column("adatetimeoffset", DATETIMEOFFSET),
        )
        metadata.create_all()
        d1 = datetime.date(2007, 10, 30)
        t1 = datetime.time(11, 2, 32)
        d2 = datetime.datetime(2007, 10, 30, 11, 2, 32)
        dto = datetime.datetime(
            2007,
            10,
            30,
            11,
            2,
            32,
            0,
            util.timezone(datetime.timedelta(hours=1)),
        )
        t.insert().execute(adate=d1,
                           adatetime=d2,
                           atime=t1,
                           adatetimeoffset=dto)

        # NOTE: this previously passed 'd2' for "adate" even though
        # "adate" is a date column; we asserted that it truncated w/o issue.
        # As of pyodbc 4.0.22, this is no longer accepted, was accepted
        # in 4.0.21.  See also the new pyodbc assertions regarding numeric
        # precision.
        t.insert().execute(adate=d1,
                           adatetime=d2,
                           atime=d2,
                           adatetimeoffset=dto)

        x = t.select().execute().fetchall()[0]
        self.assert_(x.adate.__class__ == datetime.date)
        self.assert_(x.atime.__class__ == datetime.time)
        self.assert_(x.adatetime.__class__ == datetime.datetime)
        self.assert_(x.adatetimeoffset.__class__ == datetime.datetime)

        t.delete().execute()

        t.insert().execute(adate=d1,
                           adatetime=d2,
                           atime=t1,
                           adatetimeoffset=dto)

        eq_(
            select(
                [t.c.adate, t.c.atime, t.c.adatetime, t.c.adatetimeoffset],
                t.c.adate == d1,
            ).execute().fetchall(),
            [(d1, t1, d2, dto)],
        )