def test_binary(self): testobj1 = pickleable.Foo('im foo 1') testobj2 = pickleable.Foo('im foo 2') testobj3 = pickleable.Foo('im foo 3') stream1 = self.load_stream('binary_data_one.dat') stream2 = self.load_stream('binary_data_two.dat') binary_table.insert().execute( primary_id=1, misc='binary_data_one.dat', data=stream1, data_image=stream1, data_slice=stream1[0:100], pickled=testobj1, mypickle=testobj3, ) binary_table.insert().execute( primary_id=2, misc='binary_data_two.dat', data=stream2, data_image=stream2, data_slice=stream2[0:99], pickled=testobj2, ) # TODO: pyodbc does not seem to accept "None" for a VARBINARY # column (data=None). error: [Microsoft][ODBC SQL Server # Driver][SQL Server]Implicit conversion from data type varchar # to varbinary is not allowed. Use the CONVERT function to run # this query. (257) binary_table.insert().execute(primary_id=3, # misc='binary_data_two.dat', data=None, data_image=None, # data_slice=stream2[0:99], pickled=None) binary_table.insert().execute( primary_id=3, misc='binary_data_two.dat', data_image=None, data_slice=stream2[0:99], pickled=None) for stmt in \ binary_table.select(order_by=binary_table.c.primary_id), \ text( 'select * from binary_table order by ' 'binary_table.primary_id', typemap=dict( data=mssql.MSVarBinary(8000), data_image=mssql.MSImage, data_slice=types.BINARY(100), pickled=PickleType, mypickle=MyPickleType), bind=testing.db): l = stmt.execute().fetchall() eq_(list(stream1), list(l[0]['data'])) paddedstream = list(stream1[0:100]) paddedstream.extend(['\x00'] * (100 - len(paddedstream))) eq_(paddedstream, list(l[0]['data_slice'])) eq_(list(stream2), list(l[1]['data'])) eq_(list(stream2), list(l[1]['data_image'])) eq_(testobj1, l[0]['pickled']) eq_(testobj2, l[1]['pickled']) eq_(testobj3.moredata, l[0]['mypickle'].moredata) eq_(l[0]['mypickle'].stuff, 'this is the right stuff')
def _test_binary(self, deprecate_large_types): testobj1 = pickleable.Foo('im foo 1') testobj2 = pickleable.Foo('im foo 2') testobj3 = pickleable.Foo('im foo 3') stream1 = self._load_stream('binary_data_one.dat') stream2 = self._load_stream('binary_data_two.dat') engine = engines.testing_engine( options={"deprecate_large_types": deprecate_large_types}) binary_table = self._fixture(engine) with engine.connect() as conn: conn.execute( binary_table.insert(), primary_id=1, misc='binary_data_one.dat', data=stream1, data_image=stream1, data_slice=stream1[0:100], pickled=testobj1, mypickle=testobj3, ) conn.execute( binary_table.insert(), primary_id=2, misc='binary_data_two.dat', data=stream2, data_image=stream2, data_slice=stream2[0:99], pickled=testobj2, ) for stmt in \ binary_table.select(order_by=binary_table.c.primary_id), \ text( 'select * from binary_table order by ' 'binary_table.primary_id', typemap=dict( data=mssql.MSVarBinary(8000), data_image=mssql.MSImage, data_slice=types.BINARY(100), pickled=PickleType, mypickle=MyPickleType), bind=testing.db): with engine.connect() as conn: result = conn.execute(stmt).fetchall() eq_(list(stream1), list(result[0]['data'])) paddedstream = list(stream1[0:100]) paddedstream.extend(['\x00'] * (100 - len(paddedstream))) eq_(paddedstream, list(result[0]['data_slice'])) eq_(list(stream2), list(result[1]['data'])) eq_(list(stream2), list(result[1]['data_image'])) eq_(testobj1, result[0]['pickled']) eq_(testobj2, result[1]['pickled']) eq_(testobj3.moredata, result[0]['mypickle'].moredata) eq_(result[0]['mypickle'].stuff, 'this is the right stuff')
def test_binary(self): testobj1 = pickleable.Foo('im foo 1') testobj2 = pickleable.Foo('im foo 2') testobj3 = pickleable.Foo('im foo 3') stream1 = self.load_stream('binary_data_one.dat') stream2 = self.load_stream('binary_data_two.dat') binary_table.insert().execute( primary_id=1, misc='binary_data_one.dat', data=stream1, data_image=stream1, data_slice=stream1[0:100], pickled=testobj1, mypickle=testobj3, ) binary_table.insert().execute( primary_id=2, misc='binary_data_two.dat', data=stream2, data_image=stream2, data_slice=stream2[0:99], pickled=testobj2, ) for stmt in \ binary_table.select(order_by=binary_table.c.primary_id), \ text( 'select * from binary_table order by ' 'binary_table.primary_id', typemap=dict( data=mssql.MSVarBinary(8000), data_image=mssql.MSImage, data_slice=types.BINARY(100), pickled=PickleType, mypickle=MyPickleType), bind=testing.db): l = stmt.execute().fetchall() eq_(list(stream1), list(l[0]['data'])) paddedstream = list(stream1[0:100]) paddedstream.extend(['\x00'] * (100 - len(paddedstream))) eq_(paddedstream, list(l[0]['data_slice'])) eq_(list(stream2), list(l[1]['data'])) eq_(list(stream2), list(l[1]['data_image'])) eq_(testobj1, l[0]['pickled']) eq_(testobj2, l[1]['pickled']) eq_(testobj3.moredata, l[0]['mypickle'].moredata) eq_(l[0]['mypickle'].stuff, 'this is the right stuff')
def test_custom_pickle(self): class MyPickleType(types.TypeDecorator): impl = PickleType def process_bind_param(self, value, dialect): if value: value.stuff = "BIND" + value.stuff return value def process_result_value(self, value, dialect): if value: value.stuff = value.stuff + "RESULT" return value data = pickleable.Foo("im foo 1") expected = pickleable.Foo("im foo 1") expected.stuff = "BINDim stuffRESULT" self._test_round_trip(MyPickleType, data, expected=expected)
def test_plain_pickle(self): self._test_round_trip(PickleType, pickleable.Foo("im foo 1"))
class BinaryTest(fixtures.TestBase): __only_on__ = "mssql" __requires__ = ("non_broken_binary", ) __backend__ = True @testing.combinations( ( mssql.MSVarBinary(800), b("some normal data"), None, True, None, False, ), ( mssql.VARBINARY("max"), "binary_data_one.dat", None, False, None, False, ), ( mssql.VARBINARY("max"), "binary_data_one.dat", None, True, None, False, ), ( sqltypes.LargeBinary, "binary_data_one.dat", None, False, None, False, ), (sqltypes.LargeBinary, "binary_data_one.dat", None, True, None, False), (mssql.MSImage, "binary_data_one.dat", None, True, None, False), (PickleType, pickleable.Foo("im foo 1"), None, True, None, False), ( MyPickleType, pickleable.Foo("im foo 1"), pickleable.Foo("im foo 1", stuff="BINDim stuffRESULT"), True, None, False, ), (types.BINARY(100), "binary_data_one.dat", None, True, 100, False), (types.VARBINARY(100), "binary_data_one.dat", None, True, 100, False), (mssql.VARBINARY(100), "binary_data_one.dat", None, True, 100, False), (types.BINARY(100), "binary_data_two.dat", None, True, 99, True), (types.VARBINARY(100), "binary_data_two.dat", None, True, 99, False), (mssql.VARBINARY(100), "binary_data_two.dat", None, True, 99, False), argnames="type_, data, expected, deprecate_large_types, " "slice_, zeropad", ) def test_round_trip( self, metadata, type_, data, expected, deprecate_large_types, slice_, zeropad, ): if (testing.db.dialect.deprecate_large_types is not deprecate_large_types): engine = engines.testing_engine( options={"deprecate_large_types": deprecate_large_types}) else: engine = testing.db binary_table = Table( "binary_table", metadata, Column("id", Integer, primary_key=True), Column("data", type_), ) binary_table.create(engine) if isinstance(data, str) and (data == "binary_data_one.dat" or data == "binary_data_two.dat"): data = self._load_stream(data) if slice_ is not None: data = data[0:slice_] if expected is None: if zeropad: expected = data[0:slice_] + b"\x00" else: expected = data with engine.begin() as conn: conn.execute(binary_table.insert(), dict(data=data)) eq_(conn.scalar(select(binary_table.c.data)), expected) eq_( conn.scalar( text("select data from binary_table").columns( binary_table.c.data)), expected, ) conn.execute(binary_table.delete()) conn.execute(binary_table.insert(), dict(data=None)) eq_(conn.scalar(select(binary_table.c.data)), None) eq_( conn.scalar( text("select data from binary_table").columns( binary_table.c.data)), None, ) def _load_stream(self, name, len_=3000): fp = open(os.path.join(os.path.dirname(__file__), "..", "..", name), "rb") stream = fp.read(len_) fp.close() return stream