def test_mssql_varbinary_max(self): stream1 = self._load_stream("binary_data_one.dat") self._test_round_trip(mssql.VARBINARY("max"), stream1)
def test_mssql_legacy_varbinary_max(self): stream1 = self._load_stream("binary_data_one.dat") self._test_round_trip(mssql.VARBINARY("max"), stream1, deprecate_large_types=False)
class BinaryTest(fixtures.TestBase): __only_on__ = "mssql" __requires__ = ("non_broken_binary", ) __backend__ = True @testing.combinations( ( mssql.MSVarBinary(800), b("some normal data"), None, True, None, False, ), ( mssql.VARBINARY("max"), "binary_data_one.dat", None, False, None, False, ), ( mssql.VARBINARY("max"), "binary_data_one.dat", None, True, None, False, ), ( sqltypes.LargeBinary, "binary_data_one.dat", None, False, None, False, ), (sqltypes.LargeBinary, "binary_data_one.dat", None, True, None, False), (mssql.MSImage, "binary_data_one.dat", None, True, None, False), (PickleType, pickleable.Foo("im foo 1"), None, True, None, False), ( MyPickleType, pickleable.Foo("im foo 1"), pickleable.Foo("im foo 1", stuff="BINDim stuffRESULT"), True, None, False, ), (types.BINARY(100), "binary_data_one.dat", None, True, 100, False), (types.VARBINARY(100), "binary_data_one.dat", None, True, 100, False), (mssql.VARBINARY(100), "binary_data_one.dat", None, True, 100, False), (types.BINARY(100), "binary_data_two.dat", None, True, 99, True), (types.VARBINARY(100), "binary_data_two.dat", None, True, 99, False), (mssql.VARBINARY(100), "binary_data_two.dat", None, True, 99, False), argnames="type_, data, expected, deprecate_large_types, " "slice_, zeropad", ) def test_round_trip( self, metadata, type_, data, expected, deprecate_large_types, slice_, zeropad, ): if (testing.db.dialect.deprecate_large_types is not deprecate_large_types): engine = engines.testing_engine( options={"deprecate_large_types": deprecate_large_types}) else: engine = testing.db binary_table = Table( "binary_table", metadata, Column("id", Integer, primary_key=True), Column("data", type_), ) binary_table.create(engine) if isinstance(data, str) and (data == "binary_data_one.dat" or data == "binary_data_two.dat"): data = self._load_stream(data) if slice_ is not None: data = data[0:slice_] if expected is None: if zeropad: expected = data[0:slice_] + b"\x00" else: expected = data with engine.begin() as conn: conn.execute(binary_table.insert(), dict(data=data)) eq_(conn.scalar(select(binary_table.c.data)), expected) eq_( conn.scalar( text("select data from binary_table").columns( binary_table.c.data)), expected, ) conn.execute(binary_table.delete()) conn.execute(binary_table.insert(), dict(data=None)) eq_(conn.scalar(select(binary_table.c.data)), None) eq_( conn.scalar( text("select data from binary_table").columns( binary_table.c.data)), None, ) def _load_stream(self, name, len_=3000): fp = open(os.path.join(os.path.dirname(__file__), "..", "..", name), "rb") stream = fp.read(len_) fp.close() return stream