class NamedBlobFile(Persistent): """A file stored in a ZODB BLOB, with a filename""" filename = FieldProperty(INamedFile['filename']) def __init__(self, data='', contentType='', filename=None): if ( filename is not None and contentType in ('', 'application/octet-stream') ): contentType = get_contenttype(filename=filename) self.contentType = contentType self._blob = Blob() f = self._blob.open('w') f.write('') f.close() self._setData(data) self.filename = filename def open(self, mode='r'): if mode != 'r' and 'size' in self.__dict__: del self.__dict__['size'] return self._blob.open(mode) def openDetached(self): return open(self._blob.committed(), 'rb') def _setData(self, data): if 'size' in self.__dict__: del self.__dict__['size'] # Search for a storable that is able to store the data dottedName = '.'.join((data.__class__.__module__, data.__class__.__name__)) log.debug('Storage selected for data: %s', dottedName) storable = getUtility(IStorage, name=dottedName) storable.store(data, self._blob) def _getData(self): fp = self._blob.open('r') data = fp.read() fp.close() return data _data = property(_getData, _setData) data = property(_getData, _setData) @property def size(self): if 'size' in self.__dict__: return self.__dict__['size'] reader = self._blob.open() reader.seek(0, 2) size = int(reader.tell()) reader.close() self.__dict__['size'] = size return size def getSize(self): return self.size
class NamedBlobFile(Persistent): """A file stored in a ZODB BLOB, with a filename""" implements(INamedBlobFile) filename = FieldProperty(INamedFile["filename"]) def __init__(self, data="", contentType="", filename=None): if filename is not None and contentType in ("", "application/octet-stream"): contentType = get_contenttype(filename=filename) self.contentType = contentType self._blob = Blob() f = self._blob.open("w") f.write("") f.close() self._setData(data) self.filename = filename def open(self, mode="r"): if mode != "r" and "size" in self.__dict__: del self.__dict__["size"] return self._blob.open(mode) def openDetached(self): return open(self._blob.committed(), "rb") def _setData(self, data): if "size" in self.__dict__: del self.__dict__["size"] # Search for a storable that is able to store the data dottedName = ".".join((data.__class__.__module__, data.__class__.__name__)) storable = getUtility(IStorage, name=dottedName) storable.store(data, self._blob) def _getData(self): fp = self._blob.open("r") data = fp.read() fp.close() return data _data = property(_getData, _setData) data = property(_getData, _setData) @property def size(self): if "size" in self.__dict__: return self.__dict__["size"] reader = self._blob.open() reader.seek(0, 2) size = int(reader.tell()) reader.close() self.__dict__["size"] = size return size def getSize(self): return self.size
class File(Persistent): """A persistent content component storing binary file data.""" implements(zope.app.publication.interfaces.IFileContent, interfaces.IBlobFile) def __init__(self, data='', contentType=''): self.contentType = contentType self._blob = Blob() f = self._blob.open('w') f.write('') f.close() self._setData(data) def open(self, mode='r'): if mode != 'r' and 'size' in self.__dict__: del self.__dict__['size'] return self._blob.open(mode) def openDetached(self): return open(self._blob.committed(), 'rb') def _setData(self, data): if 'size' in self.__dict__: del self.__dict__['size'] # Search for a storable that is able to store the data dottedName = ".".join((data.__class__.__module__, data.__class__.__name__)) storable = zope.component.getUtility(interfaces.IStorage, name=dottedName) storable.store(data, self._blob) def _getData(self): fp = self._blob.open('r') data = fp.read() fp.close() return data _data = property(_getData, _setData) data = property(_getData, _setData) @property def size(self): if 'size' in self.__dict__: return self.__dict__['size'] reader = self._blob.open() reader.seek(0,2) size = int(reader.tell()) reader.close() self.__dict__['size'] = size return size def getSize(self): return self.size
def crop_factory(fieldname, direction='keep', **parameters): blob = Blob() result = blob.open('w') _, image_format, dimensions = scaleImage( data['data'], result=result, **parameters) result.close() return blob, image_format, dimensions
def _deserialize(kls, data): blob = Blob() bfile = blob.open('w') data = base64.b64decode(data['data']) bfile.write(data) bfile.close() return blob
class File(BaseContent): meta_type = 'File' label = 'File' admin_view_path = '@@info' file = '' # FIXME: temporary while we fix the edit form add_form = FileAddForm edit_form = FileEditForm def __init__(self): BaseContent.__init__(self) self.blob = Blob() def upload(self, mimetype, filename, stream): self.mimetype = mimetype self.filename = filename f = self.blob.open('w') size = upload_stream(stream, f) f.close() self.size = size def get_icon(self, request): """Return icon and alernate text that correspond to the MIME type of the file. """ label, icon = ICONS.get(self.mimetype, ('Unknown', 'mime_unknown.png')) icon = request.static_url('petrel:static/img/%s' % icon) return icon, label
class Tutorial(Persistent): implements(ITutorial) attachment_name = None attachment_data = None attachment_mimetype = None def __init__(self, title, author_name, text, url=None, code=None, language=None, stream=None, file_name=None, mime_type=None): self.title = title self.author_name = author_name self.url = url self.text = text self.code = code self.language = language self.date = datetime.now() self.attachment_data = Blob() self.attachment_name = file_name self.attachment_mimetype = mime_type self.upload(stream) def upload(self, stream): if stream is not None: f = self.attachment_data.open('w') size = save_data(stream, f) f.close() self.attachment_size = size
def __call__(self): from ZODB.blob import Blob from plone.app.blob.iterators import BlobStreamIterator myblob = Blob() with myblob.open('w') as fd: fd.write('Hi, Blob!') return BlobStreamIterator(myblob)
def __call__(self): from ZODB.blob import Blob from plone.app.blob.iterators import BlobStreamIterator myblob = Blob() f = myblob.open("w") f.write("Hi, Blob!") f.close() return BlobStreamIterator(myblob)
def saveFileToBlob(filepath): blob = Blob() fi = open(filepath) bfile = blob.open('w') bfile.write(fi.read()) bfile.close() fi.close() return blob
def _store_resized_image(self, key, data): """ store a blob image as attribute """ blob = Blob() f = blob.open('w') f.write(data['data']) f.close() setattr(self, key, blob) self._p_changed = 1
class CommunityFile(Persistent): implements(ICommunityFile) modified_by = None # Sorry, persistence is_image = False # Sorry, persistence def __init__(self, title, stream, mimetype, filename, creator=u''): self.title = unicode(title) self.mimetype = mimetype self.filename = filename self.creator = unicode(creator) self.modified_by = self.creator self.blobfile = Blob() self.upload(stream) self._init_image() def _init_image(self): if not self.mimetype.startswith('image'): return try: image = PIL.Image.open(self.blobfile.open()) except IOError: return self._thumbs = OOBTree() self.image_size = image.size self.is_image = True alsoProvides(self, IImage) def image(self): assert self.is_image, "Not an image." return PIL.Image.open(self.blobfile.open()) def thumbnail(self, size): assert self.is_image, "Not an image." key = '%dx%d' % size thumbnail = self._thumbs.get(key, None) if thumbnail is None: self._thumbs[key] = thumbnail = Thumbnail(self.image(), size) return thumbnail def upload(self, stream): f = self.blobfile.open('w') size = upload_stream(stream, f) f.close() self.size = size
def testBlobbableOFSFileWithoutFileName(self): obj = File('foo', 'Foo', getFile('plone.pdf'), 'application/pdf') blobbable = IBlobbable(obj) target = Blob() blobbable.feed(target) self.assertEqual(target.open('r').read(), getFile('plone.pdf').read()) self.assertEqual(blobbable.filename(), '') self.assertEqual(blobbable.mimetype(), 'application/pdf')
def create(self, context, **parameters): wrapper = self.field.get(context) if wrapper: blob = Blob() result = blob.open('w') _, format, dimensions = scaleImage(wrapper.getBlob().open('r'), result=result, **parameters) result.close() return blob, format, dimensions
def __init__(self, mimetype, fd): self.mimetype = mimetype blob = Blob() blobfd = blob.open('w') copyfileobj(fd, blobfd) blobfd.close() self.data = blob
def addfile(fname): myblob = Blob() b=myblob.open('w') o=open(fname) data = o.read() b.write(data) print b.name b.close() return myblob
def testBlobbableOFSImage(self): gif = getImage() obj = Image('foo', 'Foo', StringIO(gif)) obj.filename = 'foo.gif' blobbable = IBlobbable(obj) target = Blob() blobbable.feed(target) self.assertEqual(target.open('r').read(), gif) self.assertEqual(blobbable.filename(), 'foo.gif') self.assertEqual(blobbable.mimetype(), 'image/gif')
class File(Persistent): # prevent view tab from sorting first (it would display the file when # manage_main clicked) __tab_order__ = ('properties', 'acl_edit', 'view') __propschema__ = fileschema def __init__(self, stream, mimetype='application/octet-stream'): self.mimetype = mimetype self.blob = Blob() self.upload(stream) def get_properties(self): filedata = dict( fp=None, uid=str(self.__objectid__), filename=self.__name__, ) return dict( name=self.__name__, file=filedata, mimetype=self.mimetype ) def set_properties(self, struct): newname = struct['name'] file = struct['file'] mimetype = struct['mimetype'] if file and file.get('fp'): fp = file['fp'] fp.seek(0) self.upload(fp) filename = file['filename'] mimetype = mimetypes.guess_type(filename, strict=False)[0] if not newname: newname = filename if not mimetype: mimetype = 'application/octet-stream' self.mimetype = mimetype oldname = self.__name__ if newname and newname != oldname: self.__parent__.rename(oldname, newname) def upload(self, stream): if not stream: stream = StringIO.StringIO() fp = self.blob.open('w') size = 0 for chunk in chunks(stream): size += len(chunk) fp.write(chunk) fp.close() self.size = size
def testBlobbableBinaryFile(self): _file = os.path.join(os.path.dirname(__file__), 'data', 'image.gif') with open(_file, 'rb') as f: obj = Binary(f) obj.filename = 'image.gif' blobbable = IBlobbable(obj) target = Blob() blobbable.feed(target) self.assertEqual(target.open('r').read(), getFile('image.gif').read()) self.assertEquals(blobbable.filename(), 'image.gif') self.assertEquals(blobbable.mimetype(), 'image/gif')
def put(self, key, src, cache_tag, headers=()): blobfile = Blob() self.persistent_map[key] = (headers, cache_tag, blobfile) f = blobfile.open('w') size = 0 while 1: data = src.read(1 << 21) if not data: break size += len(data) f.write(data) f.close()
class ImageFile(Persistent): #pragma NO COVERAGE """ Deprecated. """ implements(IImageFile) def __init__(self, stream, mimetype,): self._set_mimetype(mimetype) self.blobfile = Blob() self.upload(stream) def upload(self, stream): f = self.blobfile.open('w') size = upload_stream(stream, f) f.close() self.size = size @property def extension(self): return extensions.get(self.mimetype) @property def stream(self): return self.blobfile.open("r") def _set_mimetype(self, mimetype): if mimetype in ie_types: mimetype = ie_types[mimetype] if mimetype not in extensions: raise ValueError("Unsupported mime type: %s" % mimetype) self._mimetype = mimetype def _get_mimetype(self): # Upgrade old versions in zodb that used plain mimetype attribute if self.__dict__.has_key("mimetype"): self._mimetype = self.__dict__.pop("mimetype") return self._mimetype mimetype = property(_get_mimetype, _set_mimetype)
class TheBlob(object): """ contain a Blob which we can also use for compressed data Note: you can't subclass a Blob (google for it) so we use composition instead of subtyping """ def __init__(self, data=None, compress=False): """ instantiate the blob file """ self._blob = Blob() self._compress = compress if data: self.set(data) def __deepcopy__(self, memo): cls = self.__class__ result = cls.__new__(cls) memo[id(self)] = result new_blob = Blob() memo[id(new_blob)] = new_blob memo[id(new_blob)] = new_blob setattr(result, '_compress', self._compress) setattr(result, '_blob', new_blob) result.set(self.get()) return result def set(self, data): """ store the data in the blob. Compress if necessary """ if self._compress: self._blob.open('w').write(zlib.compress(data)) else: self._blob.open('w').write(data) def get(self): """ retrieve the blob data. Decompress if necessary """ if self._compress: return zlib.decompress(self._blob.open('r').read()) else: return self._blob.open('r').read() def open_blob(self, mode="r"): """ pass on to the blob. Not sure if this is a good idea with regards to persistance.. """ return self._blob.open(mode)
class File(Persistent): def __init__(self, stream, mimetype='application/octet-stream'): self.mimetype = mimetype self.blob = Blob() self.upload(stream) def upload(self, stream): if not stream: stream = StringIO.StringIO() fp = self.blob.open('w') size = 0 for chunk in chunks(stream): size += len(chunk) fp.write(chunk) fp.close() self.size = size
class Song(persistent.Persistent): genre = 'Unknown' recordings = multireference_target_property(RecordingToSong) liked_by = multireference_target_property(PerformerLikesSong) liked_by_ids = multireference_targetid_property(PerformerLikesSong) @property def likes(self): return len(self.liked_by) def __init__(self, title='', artist='', timings='', stream=None): self.title = title self.artist = artist self.timings = timings self.blob = Blob() with self.blob.open("w") as fp: shutil.copyfileobj(stream, fp)
def test_zapping_with_many_blobs(self): # https://github.com/zodb/relstorage/issues/468 # If a database has many blobs (more than 4600 by default) # it couldn't be zapped. from ZODB.DB import DB from ZODB.blob import Blob import transaction storage = self._closing(self.make_storage( blob_dir='blobs', shared_blob_dir=False)) db = self._closing(DB(storage)) conn = db.open() if self.REALLY_EXHAUST_SHARED_MEMORY: # pragma: no cover # NOTE: When actually testing the shared memory exhaustion, # this test is slow; it takes about 45s with default # server settings and psycopg2, and 1:50 under pg8000 # First, figure out how many blobs we need to create to exceed # the limit and fail: max_locks_per_transaction * max_connections cursor = conn._storage._load_connection.cursor cursor.execute("SELECT CURRENT_SETTING('max_locks_per_transaction')") max_locks = cursor.fetchall()[0][0] cursor.execute("SELECT CURRENT_SETTING('max_connections')") max_conn = cursor.fetchall()[0][0] # max_locks * max_conn is the documented limit of the locks, # but it seems to actually be memory based. For example, with # max_locks = 64 (the default) and max_conn = 300 (3x the default) # we calculate a max_blobs of 19,200. And the server easily handles that. # However, dropping down to max_conn = 100 (the default), the server # fails to zap the 19,200 blobs, though it does zap the 6,400 blobs fine. # Hence the final * 3 max_blobs = int(max_locks) * int(max_conn) * 3 else: # Choose a number to let us loop a few times. max_blobs = 3523 blobs = [] for i in range(max_blobs): blob = Blob() with blob.open('w') as f: data = str(i) if not isinstance(data, bytes): data = data.encode('ascii') f.write(data) blobs.append(blob) conn.root().blobs = blobs transaction.commit() conn.close() # Now zop, being sure to use the fast method that originally trigged # this bug. storage.zap_all(slow=False) cursor = storage._load_connection.cursor cursor.execute('SELECT COUNT(*) FROM blob_chunk') self.assertEqual(0, cursor.fetchone()[0])
class NyBlobFile(Persistent): """Naaya container for files stored using ZODB Blob""" implements(INyBlobFile) def __init__(self, **kwargs): kwargs.setdefault('filename', None) kwargs.setdefault('content_type', 'application/octet-stream') for key, value in kwargs.iteritems(): setattr(self, key, value) self._blob = Blob() def open(self): return self._blob.open('r') def open_iterator(self): return filestream_iterator(self._blob.committed(), 'rb') def open_write(self): return self._blob.open('w') def send_data(self, RESPONSE, as_attachment=True, set_filename=True, REQUEST=None): """NyBlobFiles can also be served using X-Sendfile. In order to do so, you need to set X-NaayaEnableSendfile header to "on" by frontend server for each request. Lighttpd.conf example (working in proxy mode):: server.modules += ( "mod_setenv" ) setenv.add-request-header = ( "X-NaayaEnableSendfile" => "on" ) proxy-core.allow-x-sendfile = "enable" """ RESPONSE.setHeader('Content-Length', self.size) RESPONSE.setHeader('Content-Type', self.content_type) if as_attachment: header_value = "attachment" if set_filename: utf8_fname = urllib.quote(self.filename) header_value += ";filename*=UTF-8''%s" % utf8_fname RESPONSE.setHeader('Content-Disposition', header_value) # Test for enabling of X-SendFile if REQUEST is not None: ny_xsendfile = REQUEST.get_header("X-NaayaEnableSendfile") if ny_xsendfile is not None and ny_xsendfile=="on": RESPONSE.setHeader("X-Sendfile", self._current_filename()) return "[body should be replaced by front-end server]" if hasattr(RESPONSE, '_streaming'): return self.open_iterator() else: return self.open().read() def _current_filename(self): """ Convenience function that returns blob's filename """ try: return self._blob.committed() except BlobError: return self._blob._p_blob_uncommitted def __repr__(self): return '<%(cls)s %(fname)r (%(mime)s, %(size)r bytes)>' % { 'cls': self.__class__.__name__, 'fname': self.filename, 'mime': self.content_type, 'size': self.size, }
class NyBlobFile(Item, Persistent, Cacheable, Implicit): """Naaya persistence of file using ZODB blobs""" implements(INyBlobFile) meta_type = "NyBlobFile" security = ClassSecurityInfo() def __init__(self, **kwargs): super(NyBlobFile, self).__init__(**kwargs) kwargs.setdefault('filename', None) kwargs.setdefault('content_type', 'application/octet-stream') for key, value in kwargs.iteritems(): setattr(self, key, value) self._blob = Blob() def is_broken(self): filename = self.get_filename() if not filename: return True try: os.stat(filename) return False except (OSError, POSKeyError): return True def open(self): return self._blob.open('r') def open_iterator(self): return filestream_iterator(self._blob.committed(), 'rb') def open_write(self): return self._blob.open('w') def send_data(self, RESPONSE, as_attachment=True, set_filename=True, REQUEST=None): """NyBlobFiles can also be served using X-Sendfile. In order to do so, you need to set X-NaayaEnableSendfile header to "on" by frontend server for each request. Lighttpd.conf example (working in proxy mode):: server.modules += ( "mod_setenv" ) setenv.add-request-header = ( "X-NaayaEnableSendfile" => "on" ) proxy-core.allow-x-sendfile = "enable" """ RESPONSE.setHeader('Content-Length', self.size) RESPONSE.setHeader('Content-Type', self.content_type) if as_attachment: header_value = "attachment" if set_filename: utf8_fname = urllib.quote(self.filename) header_value += ";filename*=UTF-8''%s" % utf8_fname RESPONSE.setHeader('Content-Disposition', header_value) # Test for enabling of X-SendFile if REQUEST is not None: ny_xsendfile = REQUEST.get_header("X-NaayaEnableSendfile") if ny_xsendfile is not None and ny_xsendfile == "on": RESPONSE.setHeader("X-Sendfile", self._current_filename()) return "[body should be replaced by front-end server]" if hasattr(RESPONSE, '_streaming'): return self.open_iterator() else: return self.open().read() def _current_filename(self): """ Convenience function that returns blob's filename """ try: return self._blob.committed() except POSKeyError: return None except BlobError: return self._blob._p_blob_uncommitted get_filename = _current_filename def __repr__(self): return '<%(cls)s %(fname)r (%(mime)s, %(size)r bytes) stored in %(blob)s>' % { 'cls': self.__class__.__name__, 'fname': self.filename, 'mime': self.content_type, 'size': self.size, 'blob': self._current_filename() } def get_size(self): return self.size security.declareProtected("View", 'index_html') def index_html(self, REQUEST=None, RESPONSE=None, charset='utf-8', disposition='inline'): """ make it directly viewable when entering the objects URL """ if REQUEST is None: REQUEST = self.REQUEST if RESPONSE is None: RESPONSE = REQUEST.RESPONSE RESPONSE.setHeader('Last-Modified', rfc1123_date(self._p_mtime)) RESPONSE.setHeader('Content-Type', self.getContentType()) RESPONSE.setHeader('Accept-Ranges', 'bytes') if handleIfModifiedSince(self, REQUEST, RESPONSE): return '' length = self.get_size() RESPONSE.setHeader('Content-Length', length) filename = self.getFilename() if filename is not None: if not isinstance(filename, unicode): filename = unicode(filename, charset, errors="ignore") filename = IUserPreferredFileNameNormalizer(REQUEST).normalize( filename) header_value = contentDispositionHeader( disposition=disposition, filename=filename) RESPONSE.setHeader("Content-disposition", header_value) request_range = handleRequestRange(self, length, REQUEST, RESPONSE) for fr in self._blob.readers: self._blob.readers.remove(fr) return self.getIterator(**request_range) security.declarePrivate('getIterator') def getIterator(self, **kw): """ return a filestream iterator object from the blob """ return BlobStreamIterator(self._blob, **kw) def getContentType(self): return self.content_type def getFilename(self): return self.filename def raw_data(self): f = self.open() s = f.read() f.close() for fr in self._blob.readers: self._blob.readers.remove(fr) return s def write_data(self, data, content_type=None): if content_type: self.content_type = content_type if isinstance(data, basestring): blob = self.open_write() blob.write(data) blob.seek(0) blob.close() self.size = len(data) return self bf_stream = self.open_write() size = 0 while True: _data = data.read(COPY_BLOCK_SIZE) if not _data: break bf_stream.write(_data) size += len(_data) bf_stream.close() self.size = size return self
class CommunityFile(Persistent): implements(ICommunityFile) modified_by = None # Sorry, persistence is_image = False # Sorry, persistence def __init__(self, title=u'', stream=None, mimetype=u'', filename=u'', creator=u''): self.title = unicode(title) self.mimetype = mimetype self.filename = filename self.creator = unicode(creator) self.modified_by = self.creator self.blobfile = Blob() if stream is not None: self.upload(stream) def image(self): assert self.is_image, "Not an image." return PIL.Image.open(self.blobfile.open()) def thumbnail(self, size): assert self.is_image, "Not an image." key = '%dx%d' % size thumbnail = self._thumbs.get(key, None) if thumbnail is None: image = self.image() if image.format == 'TIFF' and 'compression' in image.info: if image.info['compression'] in ['group3', 'group4']: image = self.get_default_tiff_thumbnail() self._thumbs[key] = thumbnail = Thumbnail(image, size) return thumbnail def get_default_tiff_thumbnail(self): here = os.path.dirname(__file__) path = os.path.join(here, '..', '..', 'views', 'static', 'images', 'tiff.png') tiff = PIL.Image.open(path) return tiff def upload(self, stream): f = self.blobfile.open('w') size = upload_stream(stream, f) f.close() self.size = size self._init_image() def _check_image(self): if not self.mimetype.startswith('image'): return try: image = PIL.Image.open(self.blobfile.open()) except IOError: return return image def _init_image(self): image = self._check_image() if image is not None: self._thumbs = OOBTree() self.image_size = image.size self.is_image = True alsoProvides(self, IImage) elif self.is_image: del self._thumbs del self.image_size self.is_image = False noLongerProvides(self, IImage) def revert(self, version): # catalog document map blows up if you feed it a long int self.docid = int(version.docid) self.created = version.created self.title = version.title self.modified = version.modified self.filename = version.attrs['filename'] self.mimetype = version.attrs['mimetype'] self.creator = version.attrs['creator'] self.modified_by = version.user self.upload(version.blobs['blob']) # make sure file data is re-indexed self._extracted_data = None
class CommunityFile(Persistent): implements(ICommunityFile) modified_by = None # Sorry, persistence is_image = False # Sorry, persistence def __init__(self, title=u"", stream=None, mimetype=u"", filename=u"", creator=u""): self.title = unicode(title) self.mimetype = mimetype self.filename = filename self.creator = unicode(creator) self.modified_by = self.creator self.blobfile = Blob() if stream is not None: self.upload(stream) def image(self): assert self.is_image, "Not an image." return PIL.Image.open(self.blobfile.open()) def thumbnail(self, size): assert self.is_image, "Not an image." key = "%dx%d" % size thumbnail = self._thumbs.get(key, None) if thumbnail is None: self._thumbs[key] = thumbnail = Thumbnail(self.image(), size) return thumbnail def upload(self, stream): f = self.blobfile.open("w") size = upload_stream(stream, f) f.close() self.size = size self._init_image() def _check_image(self): if not self.mimetype.startswith("image"): return try: image = PIL.Image.open(self.blobfile.open()) except IOError: return return image def _init_image(self): image = self._check_image() if image is not None: self._thumbs = OOBTree() self.image_size = image.size self.is_image = True alsoProvides(self, IImage) elif self.is_image: del self._thumbs del self.image_size self.is_image = False noLongerProvides(self, IImage) def revert(self, version): # catalog document map blows up if you feed it a long int self.docid = int(version.docid) self.created = version.created self.title = version.title self.modified = version.modified self.filename = version.attrs["filename"] self.mimetype = version.attrs["mimetype"] self.creator = version.attrs["creator"] self.modified_by = version.user self.upload(version.blobs["blob"])
class File(Persistent): title = _BLANK name = renamer() def __init__(self, stream=None, mimetype=None, title=_BLANK): """ The constructor of a File object. ``stream`` should be a filelike object (an object with a ``read`` method that takes a size argument) or ``None``. If stream is ``None``, the blob attached to this file object is created empty. ``title`` must be a string or Unicode object. ``mimetype`` may be any of the following: - ``None``, meaning set this file object's mimetype to ``application/octet-stream`` (the default). - A mimetype string (e.g. ``image/gif``) - The constant :attr:`substanced.file.USE_MAGIC`, which will derive the mimetype from the stream content (if ``stream`` is also supplied) using the ``python-magic`` library. .. warning:: On non-Linux systems, successful use of :attr:`substanced.file.USE_MAGIC` requires the installation of additional dependencies. See :ref:`optional_dependencies`. """ self.blob = Blob() self.title = title or _BLANK # mimetype will be overridden by upload if there's a stream if mimetype is USE_MAGIC: self.mimetype = "application/octet-stream" else: self.mimetype = mimetype or "application/octet-stream" if stream is not None: if mimetype is USE_MAGIC: hint = USE_MAGIC else: hint = None self.upload(stream, mimetype_hint=hint) def upload(self, stream, mimetype_hint=None): """ Replace the current contents of this file's blob with the contents of ``stream``. ``stream`` must be a filelike object (it must have a ``read`` method that takes a size argument). ``mimetype_hint`` can be any of the following: - ``None``, meaning don't reset the current mimetype. This is the default. If you already know the file's mimetype, and you don't want it divined from a filename or stream content, use ``None`` as the ``mimetype_hint`` value, and set the ``mimetype`` attribute of the file object directly before or after calling this method. - A string containing a filename that has an extension; the mimetype will be derived from the extension in the filename using the Python ``mimetypes`` module, and the result will be set as the mimetype attribute of this object. - The constant :attr:`pyramid.file.USE_MAGIC`, which will derive the mimetype using the ``python-magic`` library based on the stream's actual content. The result will be set as the mimetype attribute of this object. .. warning:: On non-Linux systems, successful use of :attr:`substanced.file.USE_MAGIC` requires the installation of additional dependencies. See :ref:`optional_dependencies`. """ if not stream: stream = io.StringIO() fp = self.blob.open("w") first = True use_magic = False if mimetype_hint is USE_MAGIC: use_magic = True if magic is None: # pragma: no cover warnings.warn( "The python-magic library does not have its requisite " "dependencies installed properly, therefore the " '"USE_MAGIC" flag passed to this method has been ignored ' '(it has been converted to "None"). The mimetype of ' "substanced.file.File objects created may be incorrect as " "a result." ) use_magic = False mimetype_hint = None if not use_magic: if mimetype_hint is not None: mimetype, _ = mimetypes.guess_type(mimetype_hint, strict=False) if mimetype is None: mimetype = "application/octet-stream" self.mimetype = mimetype for chunk in chunks(stream): if use_magic and first: first = False m = magic.Magic(mime=True) mimetype = m.from_buffer(chunk) self.mimetype = u(mimetype) fp.write(chunk) fp.close() def get_response(self, **kw): """ Return a WebOb-compatible response object which uses the blob content as the stream data and the mimetype of the file as the content type. The ``**kw`` arguments will be passed to the ``pyramid.response.FileResponse`` constructor as its keyword arguments.""" if not "content_type" in kw: kw["content_type"] = str(self.mimetype) path = self.blob.committed() response = FileResponse(path, **kw) return response def get_size(self): """ Return the size in bytes of the data in the blob associated with the file""" return os.stat(self.blob.committed()).st_size def get_etag(self): """ Return a token identifying the "version" of the file. """ self._p_activate() mine = self._p_serial blob = self.blob._p_serial if blob == z64: self.blob._p_activate() blob = self.blob._p_serial return oid_repr(max(mine, blob))
class BlobValue(object): """A BlobValue is using a ZODB Blob to store data. It handles both the zope.app.file and zope.file features. It can be used as a blob attribute, for more complex object. It can also be used as a mixin with a Persistent class. """ implements(IBlobFile) filename = FieldProperty(IBlobFile['filename']) mimeType = FieldProperty(IBlobFile['mimeType']) parameters = FieldProperty(IBlobFile['parameters']) def __init__(self, data='', contentType='', filename=None, parameters=None): if filename: filename = clean_filename(filename) self.filename = filename if not contentType and filename: self.mimeType, enc = guess_content_type(name=filename) elif not contentType: self.mimeType = "application/octet-stream" else: self.mimeType = contentType if parameters is None: parameters = {} else: parameters = dict(parameters) self.parameters = parameters self._blob = Blob() self.data = data @property def contentType(self): return self.mimeType def open(self, mode="r"): return self._blob.open(mode) def openDetached(self): return file(self._blob.committed(), 'rb') def __len__(self): if self._blob == "": return 0 reader = self._blob.open() reader.seek(0, 2) size = reader.tell() reader.close() return size @property def size(self): return int(self.__len__()) @apply def data(): """The blob property using a IFileStorage adapter to write down the value. """ def get(self): blob = self._blob.open('r') data = blob.read() blob.close() return data def set(self, value): stored = queryMultiAdapter((self._blob, value), IFileStorage) if stored is not True: raise StorageError( "An error occured during the blob storage. Check the " "value type (%r). This value should implement IFile, " "IString or IUnicode (see `dolmen.builtins`)." % value.__class__) return property(get, set) @property def physical_path(self): try: filename = self._blob.committed() except BlobError: # We retry, the data has now been commited # if possible by the ZODB blob. try: filename = self._blob.committed() except BlobError: # The retry failed, we return None. return None return filename
def test_merge_blobs_on_open(self): from ZODB.DB import DB from ZODB.blob import Blob import transaction storage = self._closing( self.make_storage(blob_dir='blobs', shared_blob_dir=False)) db = self._closing(DB(storage)) conn = db.open() blob = Blob() base_chunk = b"This is my base blob." with blob.open('w') as f: f.write(base_chunk) conn.root().blob = blob transaction.commit() # Insert some extra chunks. Get them big to be sure we loop # properly second_chunk = b'second chunk' * 800 with conn._storage._store_connection_pool.borrowing( commit=True) as store_connection: cursor = store_connection.cursor cursor.execute( """ INSERT INTO blob_chunk (zoid, chunk_num, tid, chunk) SELECT zoid, 1, tid, lo_from_bytea(0, %s) FROM blob_chunk WHERE chunk_num = 0; """, (second_chunk, )) third_chunk = b'third chunk' * 900 cursor.execute( """ INSERT INTO blob_chunk (zoid, chunk_num, tid, chunk) SELECT zoid, 2, tid, lo_from_bytea(0, %s) FROM blob_chunk WHERE chunk_num = 0; """, (third_chunk, )) cursor.execute('SELECT COUNT(*) FROM blob_chunk') self.assertEqual(3, cursor.fetchone()[0]) # Now open again and find everything put together. # But we need to use a new blob dir, because # we changed data behind its back. conn.close() db.close() storage = self._closing( self.make_storage(blob_dir='blobs2', shared_blob_dir=False, zap=False)) db = self._closing(DB(storage)) conn = db.open() blob = conn.root().blob with blob.open('r') as f: data = f.read() cursor = conn._storage._load_connection.cursor cursor.execute('SELECT COUNT(*) FROM blob_chunk') self.assertEqual(1, cursor.fetchone()[0]) self.assertEqual(data, base_chunk + second_chunk + third_chunk) conn.close() db.close()
def update_generated_pdf(self): """ If there isn't a custom pdf version of the review, generate the pdf from an Office document file, (anything supported by abiword). If there isn't an Office file then generate the pdf from the contents of the review text (html) """ has_custom_pdf = hasattr(self, "pdf") and self.pdf.get_size() > 0 if not has_custom_pdf: # Generate the pdf file and save it as a blob pdf_blob = Blob() doc = None try: create_pdf = RunSubprocess( "abiword", input_params="--plugin=AbiCommand -t pdf", output_params="-o") create_pdf.create_tmp_ouput() if hasattr(self, "doc"): doc = self.getDoc() if doc: open_blob = doc.blob.open("r") blob_path = open_blob.name open_blob.close() create_pdf.run(input_path=blob_path) else: review = self.getReview() # Insert the review into a template so we have a # valid html file pdf_template = SimpleZpt( "../browser/templates/htmltopdf.pt") data = pdf_template( context={"review":review}).encode("utf-8") with NamedTemporaryFile() as tmp_input: with NamedTemporaryFile() as tmp_output: tmp_input.write(data) tmp_input.flush() try: SimpleSubprocess( '/usr/bin/tidy', '-utf8', '-numeric', '-o', tmp_output.name, tmp_input.name, exitcodes=[0, 1], ) tmp_output.seek(0) data = tmp_output.read() except RuntimeError: log.error( "Tidy was unable to tidy the html for %s", self.absolute_url(), exc_info=True, ) create_pdf.create_tmp_input(suffix=".html", data=data) try: create_pdf.run() except RuntimeError: log.error( "Abiword was unable to generate a pdf for %s and created an error pdf", self.absolute_url(), exc_info=True, ) create_pdf.create_tmp_input(suffix=".html", data="Could not create PDF") create_pdf.run() pdf_file = open(create_pdf.output_path, "r") pdf_data = pdf_file.read() pdf_blob.open("w").writelines(pdf_data) pdf_file.close() create_pdf.clean_up() self.generatedPdf = pdf_blob except SubprocessException: log.error("The application Abiword does not seem to be available", exc_info=True)
class File(Persistent): title = u'' name = renamer() def __init__(self, stream=None, mimetype=None, title=u''): """ The constructor of a File object. ``stream`` should be a filelike object (an object with a ``read`` method that takes a size argument) or ``None``. If stream is ``None``, the blob attached to this file object is created empty. ``title`` must be a string or Unicode object. ``mimetype`` may be any of the following: - ``None``, meaning set this file object's mimetype to ``application/octet-stream`` (the default). - A mimetype string (e.g. ``image/gif``) - The constant :attr:`substanced.file.USE_MAGIC`, which will derive the mimetype from the stream content (if ``stream`` is also supplied) using the ``python-magic`` library. .. warning:: On non-Linux systems, successful use of :attr:`substanced.file.USE_MAGIC` requires the installation of additional dependencies. See :ref:`optional_dependencies`. """ self.blob = Blob() self.mimetype = mimetype or 'application/octet-stream' self.title = title or u'' if stream is not None: if mimetype is USE_MAGIC: hint = USE_MAGIC else: hint = None self.upload(stream, mimetype_hint=hint) def upload(self, stream, mimetype_hint=None): """ Replace the current contents of this file's blob with the contents of ``stream``. ``stream`` must be a filelike object (it must have a ``read`` method that takes a size argument). ``mimetype_hint`` can be any of the following: - ``None``, meaning don't reset the current mimetype. This is the default. If you already know the file's mimetype, and you don't want it divined from a filename or stream content, use ``None`` as the ``mimetype_hint`` value, and set the ``mimetype`` attribute of the file object directly before or after calling this method. - A string containing a filename that has an extension; the mimetype will be derived from the extension in the filename using the Python ``mimetypes`` module, and the result will be set as the mimetype attribute of this object. - The constant :attr:`pyramid.file.USE_MAGIC`, which will derive the mimetype using the ``python-magic`` library based on the stream's actual content. The result will be set as the mimetype attribute of this object. .. warning:: On non-Linux systems, successful use of :attr:`substanced.file.USE_MAGIC` requires the installation of additional dependencies. See :ref:`optional_dependencies`. """ if not stream: stream = StringIO.StringIO() fp = self.blob.open('w') first = True use_magic = False if mimetype_hint is USE_MAGIC: use_magic = True if magic is None: # pragma: no cover warnings.warn( 'The python-magic library does not have its requisite ' 'dependencies installed properly, therefore the ' '"USE_MAGIC" flag passed to this method has been ignored ' '(it has been converted to "None"). The mimetype of ' 'substanced.file.File objects created may be incorrect as ' 'a result.' ) use_magic = False mimetype_hint = None if not use_magic: if mimetype_hint is not None: mimetype, _ = mimetypes.guess_type(mimetype_hint, strict=False) if mimetype is None: mimetype = 'application/octet-stream' self.mimetype = mimetype for chunk in chunks(stream): if use_magic and first: first = False m = magic.Magic(mime=True) mimetype = m.from_buffer(chunk) self.mimetype = mimetype fp.write(chunk) fp.close() def get_response(self, **kw): """ Return a WebOb-compatible response object which uses the blob content as the stream data and the mimetype of the file as the content type. The ``**kw`` arguments will be passed to the ``pyramid.response.FileResponse`` constructor as its keyword arguments.""" if not 'content_type' in kw: kw['content_type'] = self.mimetype path = self.blob.committed() response = FileResponse(path, **kw) return response def get_size(self): """ Return the size in bytes of the data in the blob associated with the file""" return os.stat(self.blob.committed()).st_size