def __init__(self, **kwargs): super(NyBlobFile, self).__init__(**kwargs) kwargs.setdefault('filename', None) kwargs.setdefault('content_type', 'application/octet-stream') for key, value in kwargs.iteritems(): setattr(self, key, value) self._blob = Blob()
def __init__(self, stream=None, mimetype=None, title=u''): """ The constructor of a File object. ``stream`` should be a filelike object (an object with a ``read`` method that takes a size argument) or ``None``. If stream is ``None``, the blob attached to this file object is created empty. ``title`` must be a string or Unicode object. ``mimetype`` may be any of the following: - ``None``, meaning set this file object's mimetype to ``application/octet-stream`` (the default). - A mimetype string (e.g. ``image/gif``) - The constant :attr:`substanced.file.USE_MAGIC`, which will derive the mimetype from the stream content (if ``stream`` is also supplied) using the ``python-magic`` library. .. warning:: On non-Linux systems, successful use of :attr:`substanced.file.USE_MAGIC` requires the installation of additional dependencies. See :ref:`optional_dependencies`. """ self.blob = Blob() self.mimetype = mimetype or 'application/octet-stream' self.title = title or u'' if stream is not None: if mimetype is USE_MAGIC: hint = USE_MAGIC else: hint = None self.upload(stream, mimetype_hint=hint)
def saveFileToBlob(filepath): blob = Blob() bfile = blob.open('w') with open(filepath, 'rb') as fi: bfile.write(fi.read()) bfile.close() return blob
def create(html, css): try: registry = getUtility(IRegistry) prince_server_url = registry.get( 'castle.princexml_server_url', 'http://localhost:6543/convert') if prince_server_url is None: logger.warning( 'error converting pdf, no princexmlserver defined') return logger.info('start converting pdf') xml = fromstring(html) # save styles resp = requests.post( prince_server_url, data={'xml': tostring(xml), 'css': json.dumps(css)}) if resp.status_code != 200: raise PDFGenerationError('status: {}, data: {}'.format( resp.status_code, resp.text)) data = resp.content blob = Blob() bfile = blob.open('w') bfile.write(data) bfile.close() return blob except Exception: logger.info(traceback.format_exc()) raise
def __call__(self): from ZODB.blob import Blob from plone.app.blob.iterators import BlobStreamIterator myblob = Blob() with myblob.open('w') as fd: fd.write('Hi, Blob!') return BlobStreamIterator(myblob)
def create(html, css): try: registry = getUtility(IRegistry) prince_server_url = registry.get('castle.princexml_server_url', 'http://localhost:6543/convert') if prince_server_url is None: logger.warn('error converting pdf') return logger.info('start converting pdf') xml = fromstring(html) # save styles resp = requests.post(prince_server_url, data={ 'xml': tostring(xml), 'css': json.dumps(css) }) data = resp.content blob = Blob() bfile = blob.open('w') bfile.write(data) bfile.close() return blob except: logger.info(traceback.format_exc()) raise
def crop_factory(fieldname, direction='keep', **parameters): blob = Blob() result = blob.open('w') _, image_format, dimensions = scaleImage( data['data'], result=result, **parameters) result.close() return blob, image_format, dimensions
def _deserialize(kls, data): blob = Blob() bfile = blob.open('w') data = base64.b64decode(data['data']) bfile.write(data) bfile.close() return blob
def _deserialize(cls, data): blob = Blob() bfile = blob.open('w') data = base64.b64decode(data['data']) bfile.write(data) bfile.close() return blob
class NamedBlobFile(Persistent): """A file stored in a ZODB BLOB, with a filename""" filename = FieldProperty(INamedFile['filename']) def __init__(self, data='', contentType='', filename=None): if ( filename is not None and contentType in ('', 'application/octet-stream') ): contentType = get_contenttype(filename=filename) self.contentType = contentType self._blob = Blob() f = self._blob.open('w') f.write('') f.close() self._setData(data) self.filename = filename def open(self, mode='r'): if mode != 'r' and 'size' in self.__dict__: del self.__dict__['size'] return self._blob.open(mode) def openDetached(self): return open(self._blob.committed(), 'rb') def _setData(self, data): if 'size' in self.__dict__: del self.__dict__['size'] # Search for a storable that is able to store the data dottedName = '.'.join((data.__class__.__module__, data.__class__.__name__)) log.debug('Storage selected for data: %s', dottedName) storable = getUtility(IStorage, name=dottedName) storable.store(data, self._blob) def _getData(self): fp = self._blob.open('r') data = fp.read() fp.close() return data _data = property(_getData, _setData) data = property(_getData, _setData) @property def size(self): if 'size' in self.__dict__: return self.__dict__['size'] reader = self._blob.open() reader.seek(0, 2) size = int(reader.tell()) reader.close() self.__dict__['size'] = size return size def getSize(self): return self.size
def test_merge_blobs_on_open(self): from ZODB.DB import DB from ZODB.blob import Blob import transaction storage = self._closing(self.make_storage( blob_dir='blobs', shared_blob_dir=False)) db = self._closing(DB(storage)) conn = db.open() blob = Blob() base_chunk = b"This is my base blob." with blob.open('w') as f: f.write(base_chunk) conn.root().blob = blob transaction.commit() # Insert some extra chunks. Get them big to be sure we loop # properly second_chunk = b'second chunk' * 800 cursor = conn._storage._store_connection.cursor cursor.execute(""" INSERT INTO blob_chunk (zoid, chunk_num, tid, chunk) SELECT zoid, 1, tid, lo_from_bytea(0, %s) FROM blob_chunk WHERE chunk_num = 0; """, (second_chunk,)) third_chunk = b'third chunk' * 900 cursor.execute(""" INSERT INTO blob_chunk (zoid, chunk_num, tid, chunk) SELECT zoid, 2, tid, lo_from_bytea(0, %s) FROM blob_chunk WHERE chunk_num = 0; """, (third_chunk,)) cursor.execute('SELECT COUNT(*) FROM blob_chunk') self.assertEqual(3, cursor.fetchone()[0]) cursor.connection.commit() # Now open again and find everything put together. # But we need to use a new blob dir, because # we changed data behind its back. conn.close() db.close() storage = self._closing(self.make_storage(blob_dir='blobs2', shared_blob_dir=False, zap=False)) db = self._closing(DB(storage)) conn = db.open() blob = conn.root().blob with blob.open('r') as f: data = f.read() cursor = conn._storage._load_connection.cursor cursor.execute('SELECT COUNT(*) FROM blob_chunk') self.assertEqual(1, cursor.fetchone()[0]) self.assertEqual(data, base_chunk + second_chunk + third_chunk) conn.close() db.close()
def __call__(self): from ZODB.blob import Blob from plone.app.blob.iterators import BlobStreamIterator myblob = Blob() f = myblob.open("w") f.write("Hi, Blob!") f.close() return BlobStreamIterator(myblob)
def saveFileToBlob(filepath): blob = Blob() fi = open(filepath) bfile = blob.open('w') bfile.write(fi.read()) bfile.close() fi.close() return blob
def __init__(self, data=None, compress=False): """ instantiate the blob file """ self._blob = Blob() self._compress = compress if data: self.set(data)
def crop_factory(fieldname, direction='keep', **parameters): blob = Blob() result = blob.open('w') _, image_format, dimensions = scaleImage(data['data'], result=result, **parameters) result.close() return blob, image_format, dimensions
def testBlobbableOFSFileWithoutFileName(self): obj = File('foo', 'Foo', getFile('plone.pdf'), 'application/pdf') blobbable = IBlobbable(obj) target = Blob() blobbable.feed(target) self.assertEqual(target.open('r').read(), getFile('plone.pdf').read()) self.assertEqual(blobbable.filename(), '') self.assertEqual(blobbable.mimetype(), 'application/pdf')
def _store_resized_image(self, key, data): """ store a blob image as attribute """ blob = Blob() f = blob.open('w') f.write(data['data']) f.close() setattr(self, key, blob) self._p_changed = 1
class NamedBlobFile(Persistent): """A file stored in a ZODB BLOB, with a filename""" filename = FieldProperty(INamedFile['filename']) def __init__(self, data=b'', contentType='', filename=None): if ( filename is not None and contentType in ('', 'application/octet-stream') ): contentType = get_contenttype(filename=filename) self.contentType = contentType self._blob = Blob() f = self._blob.open('w') f.write(b'') f.close() self._setData(data) self.filename = filename def open(self, mode='r'): if mode != 'r' and 'size' in self.__dict__: del self.__dict__['size'] return self._blob.open(mode) def openDetached(self): return open(self._blob.committed(), 'rb') def _setData(self, data): if 'size' in self.__dict__: del self.__dict__['size'] # Search for a storable that is able to store the data dottedName = '.'.join((data.__class__.__module__, data.__class__.__name__)) log.debug('Storage selected for data: %s', dottedName) storable = getUtility(IStorage, name=dottedName) storable.store(data, self._blob) def _getData(self): fp = self._blob.open('r') data = fp.read() fp.close() return data _data = property(_getData, _setData) data = property(_getData, _setData) @property def size(self): if 'size' in self.__dict__: return self.__dict__['size'] with self._blob.open() as reader: reader.seek(0, 2) size = int(reader.tell()) self.__dict__['size'] = size return size def getSize(self): return self.size
def addfile(fname): myblob = Blob() b=myblob.open('w') o=open(fname) data = o.read() b.write(data) print b.name b.close() return myblob
def __init__(self, mimetype, fd): self.mimetype = mimetype blob = Blob() blobfd = blob.open('w') copyfileobj(fd, blobfd) blobfd.close() self.data = blob
def create(self, context, **parameters): wrapper = self.field.get(context) if wrapper: blob = Blob() result = blob.open('w') _, format, dimensions = scaleImage(wrapper.getBlob().open('r'), result=result, **parameters) result.close() return blob, format, dimensions
def addfile(fname): myblob = Blob() b = myblob.open('w') o = open(fname) data = o.read() b.write(data) print b.name b.close() return myblob
class NamedBlobFile(Persistent): """A file stored in a ZODB BLOB, with a filename""" implements(INamedBlobFile) filename = FieldProperty(INamedFile["filename"]) def __init__(self, data="", contentType="", filename=None): if filename is not None and contentType in ("", "application/octet-stream"): contentType = get_contenttype(filename=filename) self.contentType = contentType self._blob = Blob() f = self._blob.open("w") f.write("") f.close() self._setData(data) self.filename = filename def open(self, mode="r"): if mode != "r" and "size" in self.__dict__: del self.__dict__["size"] return self._blob.open(mode) def openDetached(self): return open(self._blob.committed(), "rb") def _setData(self, data): if "size" in self.__dict__: del self.__dict__["size"] # Search for a storable that is able to store the data dottedName = ".".join((data.__class__.__module__, data.__class__.__name__)) storable = getUtility(IStorage, name=dottedName) storable.store(data, self._blob) def _getData(self): fp = self._blob.open("r") data = fp.read() fp.close() return data _data = property(_getData, _setData) data = property(_getData, _setData) @property def size(self): if "size" in self.__dict__: return self.__dict__["size"] reader = self._blob.open() reader.seek(0, 2) size = int(reader.tell()) reader.close() self.__dict__["size"] = size return size def getSize(self): return self.size
def testBlobbableOFSImage(self): gif = getImage() obj = Image('foo', 'Foo', StringIO(gif)) obj.filename = 'foo.gif' blobbable = IBlobbable(obj) target = Blob() blobbable.feed(target) self.assertEqual(target.open('r').read(), gif) self.assertEqual(blobbable.filename(), 'foo.gif') self.assertEqual(blobbable.mimetype(), 'image/gif')
def __init__(self, data='', contentType='', filename=None): if filename is not None and contentType in ('', 'application/octet-stream'): contentType = get_contenttype(filename=filename) self.contentType = contentType self._blob = Blob() f = self._blob.open('w') f.write('') f.close() self._setData(data) self.filename = filename
def getBlob(self, filename): """Create a blob from a file""" from ZODB.blob import Blob myblob = Blob() b = myblob.open('w') o = open(filename) data = o.read() b.write(data) b.close() return myblob
class File(Persistent): """A persistent content component storing binary file data.""" implements(zope.app.publication.interfaces.IFileContent, interfaces.IBlobFile) def __init__(self, data='', contentType=''): self.contentType = contentType self._blob = Blob() f = self._blob.open('w') f.write('') f.close() self._setData(data) def open(self, mode='r'): if mode != 'r' and 'size' in self.__dict__: del self.__dict__['size'] return self._blob.open(mode) def openDetached(self): return open(self._blob.committed(), 'rb') def _setData(self, data): if 'size' in self.__dict__: del self.__dict__['size'] # Search for a storable that is able to store the data dottedName = ".".join((data.__class__.__module__, data.__class__.__name__)) storable = zope.component.getUtility(interfaces.IStorage, name=dottedName) storable.store(data, self._blob) def _getData(self): fp = self._blob.open('r') data = fp.read() fp.close() return data _data = property(_getData, _setData) data = property(_getData, _setData) @property def size(self): if 'size' in self.__dict__: return self.__dict__['size'] reader = self._blob.open() reader.seek(0,2) size = int(reader.tell()) reader.close() self.__dict__['size'] = size return size def getSize(self): return self.size
def screenshot(blob): blobfi = openBlob(blob) filepath = docsplit.dump_image(blobfi.read(), '1000', 'gif') blobfi.close() blob = Blob() bfile = blob.open('w') sfi = open(filepath, 'rb') bfile.write(sfi.read()) bfile.close() sfi.close() return blob
def testBlobbableBinaryFile(self): _file = os.path.join(os.path.dirname(__file__), 'data', 'image.gif') with open(_file, 'rb') as f: obj = Binary(f) obj.filename = 'image.gif' blobbable = IBlobbable(obj) target = Blob() blobbable.feed(target) self.assertEqual(target.open('r').read(), getFile('image.gif').read()) self.assertEquals(blobbable.filename(), 'image.gif') self.assertEquals(blobbable.mimetype(), 'image/gif')
def put(self, key, src, cache_tag, headers=()): blobfile = Blob() self.persistent_map[key] = (headers, cache_tag, blobfile) f = blobfile.open('w') size = 0 while 1: data = src.read(1 << 21) if not data: break size += len(data) f.write(data) f.close()
def test_multiple_blobs(self): # We can open more than one blob object during the course of a single # transaction, and we can write to a single blob using # multiple handles. blob1, conn1 = self._make_and_commit_blob(close=False) root = conn1.root() with blob1.open('a') as f: f.write(b'woot') blob1_second_object = root['blob1'] self.assertEqual(blob1_second_object._p_oid, blob1._p_oid) self.assertIs(blob1_second_object, blob1) with blob1_second_object.open('a') as f: f.write(b'!') blob2 = Blob() blob2_contents = b'this is blob 2' with blob2.open('w') as f: f.write(blob2_contents) root['blob2'] = blob2 transaction.commit() # Since we committed the current transaction above, the aggregate # changes we've made to blob, blob1a (these refer to the same object) and # blob2 (a different object) should be evident:: blob1_contents = self.DATA1 + b'woot!' self._check_blob_contents(blob1, blob1_contents) self._check_blob_contents(blob1_second_object, blob1_contents) self._check_blob_contents(blob2, blob2_contents) transaction.abort() conn1.close() if IStorageUndoable.providedBy(self.blob_storage): # Whether or not we're using a shared or unshared # blob-dir, when we keep history we will have three blob # files on disk: two revisions of blob1, and one revision # of blob2 self.assertEqual(3, self._count_blobs_in_directory()) else: # If we are a shared blob directory, we didn't remove anything; # that waits until pack time. if IAuthoritativeBlobHelper.providedBy( self.blob_storage.blobhelper): self.assertEqual(3, self._count_blobs_in_directory()) else: # We will just have two blobs on disk. The earlier revision # was automatically removed. self.assertTrue( ICachedBlobHelper.providedBy(self.blob_storage.blobhelper)) self.assertEqual(2, self._count_blobs_in_directory())
def testBlobbableBinaryFile(self): _file = os.path.join(os.path.dirname(__file__), 'data', 'image.gif') with open(_file, 'rb') as f: obj = Binary(f) obj.filename = 'image.gif' blobbable = IBlobbable(obj) target = Blob() blobbable.feed(target) self.assertEqual( target.open('r').read(), getFile('image.gif').read()) self.assertEquals(blobbable.filename(), 'image.gif') self.assertEquals(blobbable.mimetype(), 'image/gif')
def __init__(self, title=u'', stream=None, mimetype=u'', filename=u'', creator=u''): self.title = unicode(title) self.mimetype = mimetype self.filename = filename self.creator = unicode(creator) self.modified_by = self.creator self.blobfile = Blob() if stream is not None: self.upload(stream)
def __init__(self, data='', contentType=''): self.contentType = contentType self._blob = Blob() f = self._blob.open('w') f.write('') f.close() self._setData(data)
def __init__(self, title='', artist='', timings='', stream=None): self.title = title self.artist = artist self.timings = timings self.blob = Blob() with self.blob.open("w") as fp: shutil.copyfileobj(stream, fp)
def testUndo(self): database = DB(self._storage) connection = database.open() root = connection.root() transaction.begin() blob = Blob() with blob.open('w') as f: f.write(b'this is state 1') root['blob'] = blob transaction.commit() transaction.begin() blob = root['blob'] with blob.open('w') as f: f.write(b'this is state 2') transaction.commit() database.undo(database.undoLog(0, 1)[0]['id']) transaction.commit() with blob.open('r') as f: data = f.read() self.assertEqual(data, b'this is state 1') database.close()
def testDeepCopyCanInvalidate(self): """ Tests regression for invalidation problems related to missing readers and writers values in cloned objects (see http://mail.zope.org/pipermail/zodb-dev/2008-August/012054.html) """ import ZODB.MappingStorage database = DB( ZODB.blob.BlobStorage('blobs', ZODB.MappingStorage.MappingStorage())) connection = database.open() root = connection.root() transaction.begin() root['blob'] = Blob() transaction.commit() stream = StringIO() p = Pickler(stream, 1) p.dump(root['blob']) u = Unpickler(stream) stream.seek(0) clone = u.load() clone._p_invalidate() # it should also be possible to open the cloned blob # (even though it won't contain the original data) clone.open() # tearDown database.close()
def test_packing_with_uncommitted_data_non_undoing(self): """ This covers regression for bug #130459. When uncommitted data exists it formerly was written to the root of the blob_directory and confused our packing strategy. We now use a separate temporary directory that is ignored while packing. """ blob_storage = self.blob_storage database = self.database connection = database.open() root = connection.root() root['blob'] = Blob() connection.add(root['blob']) with root['blob'].open('w') as f: _ = f.write(b'test') blob_storage.pack(time.time(), referencesf) # Clean up: transaction.abort() connection.close() blob_storage.close() database.close()
def evolve(root, registry): logger.info('Running substanced evolve step 7: reset all blob mimetypes ' 'to nominal USE_MAGIC value') if magic: objectmap = find_objectmap(root) if objectmap is not None: oids = objectmap.get_extent(get_dotted_name(File)) if oids is not None: for oid in oids: f = objectmap.object_for(oid) try: if f.get_size(): blob = f.blob fp = blob.open('r') for chunk in chunks(fp): m = magic.Magic(mime=True) mimetype = m.from_buffer(chunk) f.mimetype = mimetype break except POSKeyError: logger.error( 'Missing blob for file %s, overwriting with ' 'empty blob' % resource_path(f)) f.blob = Blob() f.mimetype = 'application/octet-stream'
def test_loadblob_tmpstore(self): """ This is a test for assuring that the TmpStore's loadBlob implementation falls back correctly to loadBlob on the backend. """ # First, let's setup a regular database and store a blob: blob_storage = self.blob_storage database = self.database connection = database.open() root = connection.root() root['blob'] = Blob() connection.add(root['blob']) with root['blob'].open('w') as f: f.write(b'test') transaction.commit() blob_oid = root['blob']._p_oid tid = connection._storage.lastTransaction() # Now we open a database with a TmpStore in front: from ZODB.Connection import TmpStore tmpstore = TmpStore(blob_storage) # We can access the blob correctly: self.assertEqual(tmpstore.loadBlob(blob_oid, tid), blob_storage.loadBlob(blob_oid, tid)) connection.close() blob_storage.close() tmpstore.close() database.close()
class File(BaseContent): meta_type = 'File' label = 'File' admin_view_path = '@@info' file = '' # FIXME: temporary while we fix the edit form add_form = FileAddForm edit_form = FileEditForm def __init__(self): BaseContent.__init__(self) self.blob = Blob() def upload(self, mimetype, filename, stream): self.mimetype = mimetype self.filename = filename f = self.blob.open('w') size = upload_stream(stream, f) f.close() self.size = size def get_icon(self, request): """Return icon and alernate text that correspond to the MIME type of the file. """ label, icon = ICONS.get(self.mimetype, ('Unknown', 'mime_unknown.png')) icon = request.static_url('petrel:static/img/%s' % icon) return icon, label
def __init__(self, path, ct, size): from karl.utils import get_random_string self.code = get_random_string(25) self.blob = Blob() self.path = path self.ct = ct self.size = size
class Tutorial(Persistent): implements(ITutorial) attachment_name = None attachment_data = None attachment_mimetype = None def __init__(self, title, author_name, text, url=None, code=None, language=None, stream=None, file_name=None, mime_type=None): self.title = title self.author_name = author_name self.url = url self.text = text self.code = code self.language = language self.date = datetime.now() self.attachment_data = Blob() self.attachment_name = file_name self.attachment_mimetype = mime_type self.upload(stream) def upload(self, stream): if stream is not None: f = self.attachment_data.open('w') size = save_data(stream, f) f.close() self.attachment_size = size
class CommunityFile(Persistent): implements(ICommunityFile) modified_by = None # Sorry, persistence is_image = False # Sorry, persistence def __init__(self, title, stream, mimetype, filename, creator=u''): self.title = unicode(title) self.mimetype = mimetype self.filename = filename self.creator = unicode(creator) self.modified_by = self.creator self.blobfile = Blob() self.upload(stream) self._init_image() def _init_image(self): if not self.mimetype.startswith('image'): return try: image = PIL.Image.open(self.blobfile.open()) except IOError: return self._thumbs = OOBTree() self.image_size = image.size self.is_image = True alsoProvides(self, IImage) def image(self): assert self.is_image, "Not an image." return PIL.Image.open(self.blobfile.open()) def thumbnail(self, size): assert self.is_image, "Not an image." key = '%dx%d' % size thumbnail = self._thumbs.get(key, None) if thumbnail is None: self._thumbs[key] = thumbnail = Thumbnail(self.image(), size) return thumbnail def upload(self, stream): f = self.blobfile.open('w') size = upload_stream(stream, f) f.close() self.size = size
def set_remixing(self, stream): if self.remixing_blob is None: self.remixing_blob = Blob() with self.remixing_blob.open("w") as saveto: shutil.copyfileobj(stream, saveto) # cache duration for use in progress self.remixing_duration = audioread.audio_open( self.remixing_blob._p_blob_uncommitted).duration
def __init__(self, title, stream, mimetype, filename, creator=u''): self.title = unicode(title) self.mimetype = mimetype self.filename = filename self.creator = unicode(creator) self.modified_by = self.creator self.blobfile = Blob() self.upload(stream) self._init_image()
def __init__(self, title=u"", stream=None, mimetype=u"", filename=u"", creator=u""): self.title = unicode(title) self.mimetype = mimetype self.filename = filename self.creator = unicode(creator) self.modified_by = self.creator self.blobfile = Blob() if stream is not None: self.upload(stream)
def set_unmixed(self, stream): if self.dry_blob is None: self.dry_blob = Blob() if self.remixing_blob is None: self.remixing_blob = Blob() with self.dry_blob.open('w') as dry: with self.remixing_blob.open('w') as remixing: while True: data = stream.read(1<<19) # 512K if not data: break dry.write(data) remixing.write(data) # cache duration for use in progress duration = audioread.audio_open( self.dry_blob._p_blob_uncommitted).duration # cache duration for use in progress self.dry_duration = duration self.remixing_duration = duration
class File(Persistent): # prevent view tab from sorting first (it would display the file when # manage_main clicked) __tab_order__ = ('properties', 'acl_edit', 'view') __propschema__ = fileschema def __init__(self, stream, mimetype='application/octet-stream'): self.mimetype = mimetype self.blob = Blob() self.upload(stream) def get_properties(self): filedata = dict( fp=None, uid=str(self.__objectid__), filename=self.__name__, ) return dict( name=self.__name__, file=filedata, mimetype=self.mimetype ) def set_properties(self, struct): newname = struct['name'] file = struct['file'] mimetype = struct['mimetype'] if file and file.get('fp'): fp = file['fp'] fp.seek(0) self.upload(fp) filename = file['filename'] mimetype = mimetypes.guess_type(filename, strict=False)[0] if not newname: newname = filename if not mimetype: mimetype = 'application/octet-stream' self.mimetype = mimetype oldname = self.__name__ if newname and newname != oldname: self.__parent__.rename(oldname, newname) def upload(self, stream): if not stream: stream = StringIO.StringIO() fp = self.blob.open('w') size = 0 for chunk in chunks(stream): size += len(chunk) fp.write(chunk) fp.close() self.size = size