def test_base(self): with self.new_access('writer').repo_cnx() as cnx: projeid = cnx.execute('Project P').one() afile = cnx.create_entity('XFile', data=Binary('foo')) ver = new_version(cnx, projeid) tick = new_ticket(cnx, projeid, ver) patch = new_patch(cnx, tick, afile) cnx.commit() with self.new_access('reader').repo_cnx() as cnx: projeid = cnx.execute('Project P').get_entity(0, 0) afile = cnx.create_entity('XFile', data=Binary('foo')) ver = new_version(cnx, projeid, u'0.2.0') tick = new_ticket(cnx, projeid, ver) patch = new_patch(cnx, tick, afile) with self.assertRaises(Unauthorized): cnx.commit() cnx.rollback() ver = new_version(cnx, projeid, u'0.3.0') with self.assertRaises(Unauthorized): cnx.commit() with self.new_access('user').repo_cnx() as cnx: self.assertEqual(0, cnx.execute('Project P').rowcount) self.assertEqual(0, cnx.execute('Any P,E WHERE E project P').rowcount)
def _process_form_value(self, form): posted = form._cw.form if self.input_name(form, u'__detach') in posted: # drop current file value on explictily asked to detach return None try: value = posted[self.input_name(form)] except KeyError: # raise UnmodifiedField instead of returning None, since the later # will try to remove already attached file if any raise UnmodifiedField() # value is a 2-uple (filename, stream) or a list of such # tuples (multiple files) try: if isinstance(value, list): value = value[0] form.warning('mutiple files provided, however ' 'only the first will be picked') filename, stream = value except ValueError: raise UnmodifiedField() # XXX avoid in memory loading of posted files. Requires Binary handling changes... value = Binary(stream.read()) if not value.getvalue(): # usually an unexistant file value = None else: # set filename on the Binary instance, may be used later in hooks value.filename = normalize_filename(filename) return value
def test_bfss_update_with_different_extension_rolled_back(self): with self.admin_access.repo_cnx() as cnx: f1 = cnx.create_entity('File', data=Binary(b'some data'), data_format=u'text/plain', data_name=u'foo.txt') # NOTE: do not use cw_set() which would automatically # update f1's local dict. We want the pure rql version to work cnx.commit() old_path = self.fspath(cnx, f1) old_data = f1.data.getvalue() self.assertTrue(osp.isfile(old_path)) self.assertEqual(osp.splitext(old_path)[1], '.txt') cnx.execute( 'SET F data %(d)s, F data_name %(dn)s, ' 'F data_format %(df)s WHERE F eid %(f)s', { 'd': Binary(b'some other data'), 'f': f1.eid, 'dn': u'bar.jpg', 'df': u'image/jpeg' }) cnx.rollback() # the new file exists with correct extension # the old file is dead f2 = cnx.execute('Any F WHERE F eid %(f)s, F is File', { 'f': f1.eid }).get_entity(0, 0) new_path = self.fspath(cnx, f2) new_data = f2.data.getvalue() self.assertTrue(osp.isfile(new_path)) self.assertEqual(osp.splitext(new_path)[1], '.txt') self.assertEqual(old_path, new_path) self.assertEqual(old_data, new_data)
def _process_form_value_with_suffix(self, form, suffix=u''): """ add suffix parameter & use it """ posted = form._cw.form if self.input_name(form, u'__detach') in posted: # drop current file value on explictily asked to detach return None try: value = posted[self.input_name(form, suffix)] except KeyError: # raise UnmodifiedField instead of returning None, since the later # will try to remove already attached file if any raise ff.UnmodifiedField() # value is a 2-uple (filename, stream) try: filename, stream = value except ValueError: raise ff.UnmodifiedField() # XXX avoid in memory loading of posted files. Requires Binary handling changes... value = Binary(stream.read()) if not value.getvalue(): # usually an unexistant file value = None else: # set filename on the Binary instance, may be used later in hooks value.filename = ff.normalize_filename(filename) return value
def test_init(self): Binary() Binary(b'toto') Binary(bytearray(b'toto')) Binary(memoryview(b'toto')) with self.assertRaises((AssertionError, TypeError)): # TypeError is raised by BytesIO if python runs with -O Binary(u'toto')
def test_write(self): b = Binary() b.write(b'toto') b.write(bytearray(b'toto')) b.write(memoryview(b'toto')) with self.assertRaises((AssertionError, TypeError)): # TypeError is raised by BytesIO if python runs with -O b.write(u'toto')
def test_from_file_to_file(self): with tempdir() as dpath: fpath = osp.join(dpath, 'binary.bin') with open(fpath, 'wb') as fobj: Binary(b'binaryblob').to_file(fobj) bobj = Binary.from_file(fpath) self.assertEqual(bobj.getvalue(), b'binaryblob')
def test_npts_import(self): with self.admin_access.repo_cnx() as cnx: orig = self._create_npts(cnx) cnx.commit() for ext, fmt in (('.csv', 'text/csv'),): if not is_supported(ext): continue fname = self.datapath('npts' + ext) blob = Binary(open(fname, 'rb').read()) blob.filename = fname ts = cnx.create_entity('NonPeriodicTimeSeries', data=blob) self.assertEqual(orig.timestamped_array(), ts.timestamped_array())
def grok_data(self): # XXX when data is a csv/txt/xl file, we want to read timestamps in # there too # XXX hooks won't catch change to timestamps if super(NPTSImportAdapter, self).grok_data(): return # already processed numpy_array = self.grok_timestamps() tstamp_data = Binary() compressed_data = zlib.compress(pickle.dumps(numpy_array, protocol=2)) tstamp_data.write(compressed_data) self.entity.cw_edited['timestamps'] = tstamp_data self.entity.timestamps_array = numpy_array
def entity_added(self, entity, attr): """an entity using this storage for attr has been added""" if entity._cw.transaction_data.get('fs_importing'): binary = Binary.from_file(entity.cw_edited[attr].getvalue()) entity._cw_dont_cache_attribute(attr, repo_side=True) else: binary = entity.cw_edited.pop(attr) if binary is not None: fd, fpath = self.new_fs_path(entity, attr) # bytes storage used to store file's path binary_obj = Binary(fpath.encode('utf-8')) entity.cw_edited.edited_attribute(attr, binary_obj) self._writecontent(fd, binary) AddFileOp.get_instance(entity._cw).add_data(fpath) return binary
def resize(self, size): size = tuple(int(s) for s in size.split('x')) idownloadable = self.entity.cw_adapt_to('IDownloadable') ctype = idownloadable.download_content_type() fmt = ctype and ctype.split('/', 1)[1] or None if fmt is None: self.error('unable to resize') raise UnResizeable data = idownloadable.download_data() pilimg = pilopen(Binary(data)) pilimg.thumbnail(size, ANTIALIAS) stream = Binary() pilimg.save(stream, fmt) stream.seek(0) stream.filename = idownloadable.download_file_name() return stream
def test_rdef2rql(self): self.assertListEqual([ ('INSERT CWAttribute X: X cardinality %(cardinality)s,X defaultval %(defaultval)s,' 'X description %(description)s,X formula %(formula)s,X fulltextindexed %(fulltextindexed)s,' 'X indexed %(indexed)s,X internationalizable %(internationalizable)s,' 'X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,' 'X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s', {'se': None, 'rt': None, 'oe': None, 'description': u'', 'internationalizable': True, 'fulltextindexed': False, 'ordernum': 3, 'defaultval': Binary.zpickle(u'text/plain'), 'indexed': False, 'formula': None, 'cardinality': u'?1'}), ('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X ' 'WHERE CT eid %(ct)s, EDEF eid %(x)s', {'x': None, 'value': u'{"msg": null, "values": ["text/rest", "text/markdown", ' '"text/html", "text/plain"]}', 'ct': 'FormatConstraint_eid'}), ('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X ' 'WHERE CT eid %(ct)s, EDEF eid %(x)s', {'x': None, 'value': u'{"max": 50, "min": null, "msg": null}', 'ct': 'SizeConstraint_eid'})], list(rdef2rql(schema['description_format'].rdefs[('CWRType', 'String')], cstrtypemap)))
def test_editablefilefield(self): class EFFForm(EntityFieldsForm): data = EditableFileField(format_field=StringField('data_format', max_length=50, eidparam=True, role='subject'), encoding_field=StringField( 'data_encoding', max_length=20, eidparam=True, role='subject'), eidparam=True, role='subject') with self.admin_access.web_request() as req: file = req.create_entity('File', data_name=u"pouet.txt", data_encoding=u'UTF-8', data=Binary(b'new widgets system')) form = EFFForm(req, redirect_path='perdu.com', entity=file) self.assertMultiLineEqual( self._render_entity_field(req, 'data', form), '''<input id="data-subject:%(eid)s" name="data-subject:%(eid)s" type="file" value="" /> <a href="javascript: toggleVisibility('data-subject:%(eid)s-advanced')" title="show advanced fields"><img src="http://testing.fr/cubicweb/data/puce_down.png" alt="show advanced fields"/></a> <div id="data-subject:%(eid)s-advanced" class="hidden"> <label for="data_format-subject:%(eid)s">data_format</label><input id="data_format-subject:%(eid)s" maxlength="50" name="data_format-subject:%(eid)s" size="45" type="text" value="text/plain" /><br/> <label for="data_encoding-subject:%(eid)s">data_encoding</label><input id="data_encoding-subject:%(eid)s" maxlength="20" name="data_encoding-subject:%(eid)s" size="20" type="text" value="UTF-8" /><br/> </div> <br/> <input name="data-subject__detach:%(eid)s" type="checkbox" /> detach attached file <p><b>You can either submit a new file using the browse button above, or choose to remove already uploaded file by checking the "detach attached file" check-box, or edit file content online with the widget below.</b></p> <textarea cols="80" name="data-subject:%(eid)s" onkeyup="autogrow(this)" rows="3">new widgets system</textarea>''' % {'eid': file.eid})
def convert_defaultval(cwattr, default): from decimal import Decimal import yams from cubicweb import Binary if default is None: return if isinstance(default, Binary): # partially migrated instance, try to be idempotent return default atype = cwattr.to_entity[0].name if atype == 'Boolean': # boolean attributes with default=False were stored as '' assert default in ('True', 'False', ''), repr(default) default = default == 'True' elif atype in ('Int', 'BigInt'): default = int(default) elif atype == 'Float': default = float(default) elif atype == 'Decimal': default = Decimal(default) elif atype in ('Date', 'Datetime', 'TZDatetime', 'Time'): try: # handle NOW and TODAY, keep them stored as strings yams.KEYWORD_MAP[atype][default.upper()] default = default.upper() except KeyError: # otherwise get an actual date or datetime default = yams.DATE_FACTORY_MAP[atype](default) else: assert atype == 'String', atype return Binary.zpickle(default)
def test_bfss_storage(self): with self.admin_access.web_request() as req: cnx = req.cnx f1 = self.create_file(req) filepaths = glob(osp.join(self.tempdir, '%s_data_*' % f1.eid)) self.assertEqual(len(filepaths), 1, filepaths) expected_filepath = filepaths[0] # file should be read only self.assertFalse(os.access(expected_filepath, os.W_OK)) self.assertEqual(open(expected_filepath).read(), 'the-data') cnx.rollback() self.assertFalse(osp.isfile(expected_filepath)) filepaths = glob(osp.join(self.tempdir, '%s_data_*' % f1.eid)) self.assertEqual(len(filepaths), 0, filepaths) f1 = self.create_file(req) cnx.commit() filepaths = glob(osp.join(self.tempdir, '%s_data_*' % f1.eid)) self.assertEqual(len(filepaths), 1, filepaths) expected_filepath = filepaths[0] self.assertEqual(open(expected_filepath).read(), 'the-data') # add f1 back to the entity cache with req as _cw f1 = req.entity_from_eid(f1.eid) f1.cw_set(data=Binary(b'the new data')) cnx.rollback() self.assertEqual(open(expected_filepath).read(), 'the-data') f1.cw_delete() self.assertTrue(osp.isfile(expected_filepath)) cnx.rollback() self.assertTrue(osp.isfile(expected_filepath)) f1.cw_delete() cnx.commit() self.assertFalse(osp.isfile(expected_filepath))
def source_execute(self, source, session, value): fpath = source.binary_to_str(value) try: return Binary(fpath) except OSError as ex: source.critical("can't open %s: %s", fpath, ex) return None
def _set_cookie(self, response): # Save the value in the database data = Binary(pickle.dumps(dict(self))) sessioneid = self.sessioneid with self.request.registry['cubicweb.repository'].internal_cnx( ) as cnx: if not sessioneid: session = cnx.create_entity('CWSession', cwsessiondata=data) sessioneid = session.eid else: try: session = cnx.entity_from_eid(sessioneid) except UnknownEid: # Might occur if CWSession entity got dropped (e.g. # the whole db got recreated) while user's cookie is # still valid. We recreate the CWSession in this case. sessioneid = cnx.create_entity('CWSession', cwsessiondata=data).eid else: session.cw_set(cwsessiondata=data) cnx.commit() # Only if needed actually set the cookie if self.new or self.accessed - self.renewed > self._reissue_time: dict.clear(self) dict.__setitem__(self, 'sessioneid', sessioneid) return super(CWSession, self)._set_cookie(response) return True
def test_filefield(self): class FFForm(EntityFieldsForm): data = FileField(format_field=StringField(name='data_format', max_length=50, eidparam=True, role='subject'), encoding_field=StringField(name='data_encoding', max_length=20, eidparam=True, role='subject'), eidparam=True, role='subject') with self.admin_access.web_request() as req: file = req.create_entity('File', data_name=u"pouet.txt", data_encoding=u'UTF-8', data=Binary(b'new widgets system')) form = FFForm(req, redirect_path='perdu.com', entity=file) self.assertMultiLineEqual( self._render_entity_field(req, 'data', form), '''<input id="data-subject:%(eid)s" name="data-subject:%(eid)s" type="file" value="" /> <a href="javascript: toggleVisibility('data-subject:%(eid)s-advanced')" title="show advanced fields"><img src="http://testing.fr/cubicweb/data/puce_down.png" alt="show advanced fields"/></a> <div id="data-subject:%(eid)s-advanced" class="hidden"> <label for="data_format-subject:%(eid)s">data_format</label><input id="data_format-subject:%(eid)s" maxlength="50" name="data_format-subject:%(eid)s" size="45" type="text" value="text/plain" /><br/> <label for="data_encoding-subject:%(eid)s">data_encoding</label><input id="data_encoding-subject:%(eid)s" maxlength="20" name="data_encoding-subject:%(eid)s" size="20" type="text" value="UTF-8" /><br/> </div> <br/> <input name="data-subject__detach:%(eid)s" type="checkbox" /> detach attached file''' % {'eid': file.eid})
def setup_database(self): with self.admin_access.repo_cnx() as cnx: proj = cnx.create_entity('Project', name=u'Babar') projeid = proj.eid ver = new_version(cnx, projeid) tick = new_ticket(cnx, projeid, ver) card = new_card(cnx) tick.cw_set(requirement=card) afile = cnx.create_entity('XFile', data=Binary('foo')) patch = new_patch(cnx, tick, afile) doc1 = cnx.create_entity('XFile', data=Binary('How I became King')) fold1 = cnx.create_entity('Folder', name=u'Babar documentation', element=doc1, documents=projeid) card = new_card(cnx, u'Some doc bit') fold1.cw_set(element=card) # a subproject proj = cnx.create_entity('Project', name=u'Celeste', subproject_of=proj) projeid = proj.eid ver = new_version(cnx, projeid) tick = new_ticket(cnx, projeid, ver, name=u'write bio', descr=u'do it') card = new_card(cnx, u'Write me') tick.cw_set(requirement=card) afile = cnx.create_entity('XFile', data=Binary('foo')) patch = new_patch(cnx, tick, afile, name=u'bio part one') doc2 = cnx.create_entity('XFile', data=Binary('How I met Babar')) fold2 = cnx.create_entity('Folder', name=u'Celeste bio', element=doc2, documents=projeid) card = new_card(cnx, u'A general doc item') fold2.cw_set(element=card) cnx.commit()
def setup_database(self): super(BaseSecurityTC, self).setup_database() with self.admin_access.client_cnx() as cnx: self.create_user(cnx, u'iaminusersgrouponly') hash = _CRYPTO_CTX.encrypt('oldpassword', scheme='des_crypt') self.create_user(cnx, u'oldpassword', password=Binary(hash.encode('ascii')))
def test_ts_import(self): with self.admin_access.repo_cnx() as cnx: orig = self._create_ts(cnx, granularity=u'daily') cnx.commit() for ext, fmt in (('.xls', 'application/vnd.ms-excel'), ('.xlsx', 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'), ('.csv', 'text/csv')): if not is_supported(ext): continue fname = self.datapath('ts' + ext) blob = Binary(open(fname, 'rb').read()) blob.filename = fname ts = cnx.create_entity('TimeSeries', granularity=u'daily', start_date=datetime(2009, 10, 1), data=blob) self.assertEqual(orig.timestamped_array(), ts.timestamped_array())
def test_score_entity_selector(self): with self.admin_access.web_request() as req: req.create_entity('File', data_name=u'bim.png', data=Binary(b'bim')) # image/ehtml primary view priority rset = req.execute('File X WHERE X data_name "bim.png"') self.assertIsInstance( self.vreg['views'].select('image', req, rset=rset), idownloadable.ImageView) self.assertRaises(NoSelectableObject, self.vreg['views'].select, 'ehtml', req, rset=rset) fileobj = req.create_entity('File', data_name=u'bim.html', data=Binary(b'<html>bam</html')) # image/ehtml primary view priority rset = req.execute('File X WHERE X data_name "bim.html"') self.assertIsInstance( self.vreg['views'].select('ehtml', req, rset=rset), idownloadable.EHTMLView) self.assertRaises(NoSelectableObject, self.vreg['views'].select, 'image', req, rset=rset) fileobj = req.create_entity('File', data_name=u'bim.txt', data=Binary(b'boum')) # image/ehtml primary view priority rset = req.execute('File X WHERE X data_name "bim.txt"') self.assertRaises(NoSelectableObject, self.vreg['views'].select, 'image', req, rset=rset) self.assertRaises(NoSelectableObject, self.vreg['views'].select, 'ehtml', req, rset=rset)
def entity_updated(self, entity, attr): """an entity using this storage for attr has been updated""" # get the name of the previous file containing the value oldpath = self.current_fs_path(entity, attr) if entity._cw.transaction_data.get('fs_importing'): # If we are importing from the filesystem, the file already exists. # We do not need to create it but we need to fetch the content of # the file as the actual content of the attribute fpath = entity.cw_edited[attr].getvalue() entity._cw_dont_cache_attribute(attr, repo_side=True) assert fpath is not None binary = Binary.from_file(fpath) else: # We must store the content of the attributes # into a file to stay consistent with the behaviour of entity_add. # Moreover, the BytesFileSystemStorage expects to be able to # retrieve the current value of the attribute at anytime by reading # the file on disk. To be able to rollback things, use a new file # and keep the old one that will be removed on commit if everything # went ok. # # fetch the current attribute value in memory binary = entity.cw_edited.pop(attr) if binary is None: fpath = None else: # Get filename for it fd, fpath = self.new_fs_path(entity, attr) # write attribute value on disk self._writecontent(fd, binary) # Mark the new file as added during the transaction. # The file will be removed on rollback AddFileOp.get_instance(entity._cw).add_data(fpath) # reinstall poped value if fpath is None: entity.cw_edited.edited_attribute(attr, None) else: # register the new location for the file. binary_obj = Binary(fpath.encode('utf-8')) entity.cw_edited.edited_attribute(attr, binary_obj) if oldpath is not None and oldpath != fpath: # Mark the old file as useless so the file will be removed at # commit. DeleteFileOp.get_instance(entity._cw).add_data(oldpath) return binary
def test_bfss_update_with_existing_data(self): with self.admin_access.repo_cnx() as cnx: f1 = cnx.create_entity('File', data=Binary(b'some data'), data_format=u'text/plain', data_name=u'foo') # NOTE: do not use cw_set() which would automatically # update f1's local dict. We want the pure rql version to work cnx.execute('SET F data %(d)s WHERE F eid %(f)s', { 'd': Binary(b'some other data'), 'f': f1.eid }) self.assertEqual(f1.data.getvalue(), b'some other data') cnx.commit() f2 = cnx.execute('Any F WHERE F eid %(f)s, F is File', { 'f': f1.eid }).get_entity(0, 0) self.assertEqual(f2.data.getvalue(), b'some other data')
def test_interface_selector(self): with self.admin_access.web_request() as req: req.create_entity('File', data_name=u'bim.png', data=Binary(b'bim')) # image primary view priority rset = req.execute('File X WHERE X data_name "bim.png"') self.assertIsInstance( self.vreg['views'].select('primary', req, rset=rset), idownloadable.IDownloadablePrimaryView)
def test_bfss_fs_importing_doesnt_touch_path(self): with self.admin_access.repo_cnx() as cnx: cnx.transaction_data['fs_importing'] = True filepath = osp.abspath(__file__) f1 = cnx.create_entity( 'File', data=Binary(filepath.encode(sys.getfilesystemencoding())), data_format=u'text/plain', data_name=u'foo') self.assertEqual(self.fspath(cnx, f1), filepath)
def test_etype_priority(self): with self.admin_access.web_request() as req: f = req.create_entity('FakeFile', data_name=u'hop.txt', data=Binary(b'hop'), data_format=u'text/plain') rset = f.as_rset() anyscore = is_instance('Any')(f.__class__, req, rset=rset) idownscore = adaptable('IDownloadable')(f.__class__, req, rset=rset) self.assertTrue(idownscore > anyscore, (idownscore, anyscore)) filescore = is_instance('FakeFile')(f.__class__, req, rset=rset) self.assertTrue(filescore > idownscore, (filescore, idownscore))
def callback(self, source, cnx, value): """sql generator callback when some attribute with a custom storage is accessed """ fpath = source.binary_to_str(value) try: return Binary.from_file(fpath) except EnvironmentError as ex: source.critical("can't open %s: %s", value, ex) return None
def grok_data(self): """ self.data is something such as an excel file or CSV data or a pickled numpy array or an already processed binary. Ensure it's a pickle numpy array before storing object in db. If data seems to be already processed, return True, else return False. """ entity = self.entity try: filename = entity.data.filename.lower() except AttributeError: data = entity.data if isinstance(data, Binary): return True # if not isinstance(data, numpy.ndarray): # raise TypeError('data is neither a Binary nor a numpy array (%s)' % type(data)) numpy_array = data else: adapter = self._cw.vreg['adapters'].select_or_none( 'source_to_numpy_array', self._cw, entity=entity, filename=filename) if adapter is None: msg = self._cw._( 'Unsupported file type %s') % entity.data.filename raise ValidationError(entity.eid, {'data': msg}) numpy_array = adapter.to_numpy_array(entity.data, filename) if numpy_array.ndim != 1: raise ValidationError( entity.eid, {'data': _('data must be a 1-dimensional array')}) if numpy_array.size == 0: raise ValidationError( entity.eid, {'data': _('data must have at least one value')}) data = Binary() compressed_data = zlib.compress(pickle.dumps(numpy_array, protocol=2)) data.write(compressed_data) entity.cw_edited['data'] = data entity.array = numpy_array return False
def test_bfss_update_with_fs_importing(self): with self.admin_access.repo_cnx() as cnx: f1 = cnx.create_entity('File', data=Binary(b'some data'), data_format=u'text/plain', data_name=u'foo') old_fspath = self.fspath(cnx, f1) cnx.transaction_data['fs_importing'] = True new_fspath = osp.join(self.tempdir, 'newfile.txt') open(new_fspath, 'w').write('the new data') cnx.execute( 'SET F data %(d)s WHERE F eid %(f)s', { 'd': Binary(new_fspath.encode( sys.getfilesystemencoding())), 'f': f1.eid }) cnx.commit() self.assertEqual(f1.data.getvalue(), b'the new data') self.assertEqual(self.fspath(cnx, f1), new_fspath) self.assertFalse(osp.isfile(old_fspath))
def thumbnail(self, shadow=False, size=None): if size is None: size = self._cw.vreg.config['image-thumb-size'] size = tuple(int(s) for s in size.split('x')) idownloadable = self.entity.cw_adapt_to('IDownloadable') data = idownloadable.download_data() try: pilimg = pilopen(Binary(data)) except IOError: raise UnResizeable if shadow: self.warning('[1.15.0] the shadow parameter is now unused ' 'and you should use css rules to lay shadows out', DeprecationWarning) pilimg.thumbnail(size, ANTIALIAS) stream = Binary() pilimg.save(stream, 'png') stream.seek(0) ithumbnail = self.entity.cw_adapt_to('IThumbnail') stream.filename = ithumbnail.thumbnail_file_name() return stream
def test_binary_in_values(self): with self.admin_access.repo_cnx() as cnx: importer = self.importer(cnx) # Use a list to put a Binary in "values" (since Binary is not # hashable, a set cannot be used). personne = ExtEntity('Personne', 1, { 'photo': [Binary(b'poilu')], 'nom': set([u'alf']) }) importer.import_entities([personne]) cnx.commit() entity = cnx.find('Personne').one() self.assertEqual(entity.photo.getvalue(), b'poilu')