def monkeypatch_plone_formwidget_namedfile_widget_download__call__(self): """ Patches to plone.formwidget.namedfile.widget.Download.__call__ """ if self.context.ignoreContext: raise NotFound( 'Cannot get the data file from a widget with no context') if self.context.form is not None: content = aq_inner(self.context.form.getContent()) else: content = aq_inner(self.context.context) field = aq_inner(self.context.field) dm = getMultiAdapter(( content, field, ), IDataManager) file_ = dm.get() if file_ is None: raise NotFound(self, self.filename, self.request) if not self.filename: self.filename = getattr(file_, 'filename', None) set_headers(file_, self.request.response, filename=self.filename) if IBlobby.providedBy(file): zodb_blob = file_._blob else: zodb_blob = file_ if set_xsendfile_header(self.request, self.request.response, zodb_blob): return 'collective.xsendfile - proxy missing?' else: return stream_data(file_)
def get_file_tuple(self, named_file, path_prefix): path = u'{0}/{1}'.format(safe_unicode(path_prefix), safe_unicode(named_file.filename)) if HAVE_BLOBS and IBlobby.providedBy(named_file): return (path, named_file.open()) else: return (path, BytesIO(named_file.data))
def monkeypatch_plone_formwidget_namedfile_widget_download__call__(self): """ Patches to plone.formwidget.namedfile.widget.Download.__call__ """ if self.context.ignoreContext: raise NotFound('Cannot get the data file from a widget with no context') if self.context.form is not None: content = aq_inner(self.context.form.getContent()) else: content = aq_inner(self.context.context) field = aq_inner(self.context.field) dm = getMultiAdapter((content, field,), IDataManager) file_ = dm.get() if file_ is None: raise NotFound(self, self.filename, self.request) if not self.filename: self.filename = getattr(file_, 'filename', None) set_headers(file_, self.request.response, filename=self.filename) if IBlobby.providedBy(file): zodb_blob = file_._blob else: zodb_blob = file_ if set_xsendfile_header(self.request, self.request.response, zodb_blob): return 'collective.xsendfile - proxy missing?' else: return stream_data(file_)
def stream_data(file, start=0, end=None): """Return the given file as a stream if possible. """ if IBlobby.providedBy(file): if file._blob._p_blob_uncommitted: return file.data[start:end] return filestream_range_iterator(file._blob.committed(), 'rb', start=start, end=end) return file.data[start:end]
def stream_data(self, named_file): if HAVE_BLOBS: if IBlobby.providedBy(named_file): if named_file._blob._p_blob_uncommitted: filename = named_file._blob._p_blob_uncommitted else: filename = named_file._blob.committed() return self.convert_line_endings(filename) return named_file.data
def monkeypatch_plone_namedfile_browser_Download__call__(self): file = self._getFile() self.set_headers(file) if HAS_NAMEDFILE and IBlobby.providedBy(file): zodb_blob = file._blob else: zodb_blob = file if set_xsendfile_header(self.request, self.request.response, zodb_blob): return 'collective.xsendfile - proxy missing?' else: return stream_data(file)
def _expand_binary_data(self, obj, data): max_size = es_config.max_blobsize is_archetype = False if HAS_ARCHETYPES and IBaseContent.providedBy(obj): is_archetype = True schema = obj.Schema() for fieldname in self._iterate_binary_fields(obj, data): if fieldname not in data: data[fieldname] = None continue if is_archetype: field = schema[fieldname] value = field.get(obj) if value is None: data[fieldname] = None continue data[fieldname + '_meta'] = data[fieldname] if IBlobWrapper.providedBy(value): if max_size and value.get_size() > max_size: data[fieldname] = None del data[fieldname + '_meta'] msg = 'File too big for ElasticSearch Indexing: {0}' logger.info(msg.format(obj.absolute_url(), ), ) continue with value.getBlob().open() as fh: data[fieldname] = base64.b64encode(fh.read()) elif ITextField.providedBy(field): data[fieldname] = base64.b64encode( data[fieldname + '_meta']['data'].encode('utf8')) else: field = getattr(obj, fieldname, None) if field is None: data[fieldname] = None continue data[fieldname + '_meta'] = data[fieldname] if IBlobby.providedBy(field): if max_size and field.getSize() > max_size: data[fieldname] = None del data[fieldname + '_meta'] msg = 'File too big for ElasticSearch Indexing: {0}' logger.info(msg.format(obj.absolute_url(), ), ) continue with field.open() as fh: data[fieldname] = base64.b64encode(fh.read()) elif IRichTextValue.providedBy(field): data[fieldname] = base64.b64encode( data[fieldname + '_meta']['data'].encode('utf8'), ) if max_size and len(data[fieldname]) > max_size: data[fieldname] = None del data[fieldname + '_meta'] logger.info( 'File too big for ElasticSearch Indexing: {0}'.format( obj.absolute_url(), ), )
def stream_data(file): """Return the given file as a stream if possible. """ if IBlobby.providedBy(file): if file._blob._p_blob_uncommitted: return file.data if filestream_iterator is not None: return filestream_iterator(file._blob.committed(), 'rb') return file.data
def monkeypatch_plone_namedfile_browser_Download__call__(self): file = self._getFile() self.set_headers(file) if IBlobby.providedBy(file): zodb_blob = file._blob else: zodb_blob = file if set_xsendfile_header(self.request, self.request.response, zodb_blob): return 'collective.xsendfile - proxy missing?' else: return stream_data(file)
def stream_data(file): """Return the given file as a stream if possible. """ if IBlobby.providedBy(file) and filestream_iterator is not None: # XXX: we may want to use this instead, which would raise an error # in case of uncomitted changes # filename = file._blob.committed() filename = file._blob._p_blob_uncommitted or file._blob.committed() return filestream_iterator(filename, 'rb') return file.data
def _expand_binary_data(self, obj, data): for fieldname in self._iterate_binary_fields(obj, data): if fieldname not in data: continue field = getattr(obj, fieldname, None) if field is None: continue data[fieldname + '_meta'] = data[fieldname] if IBlobby.providedBy(field): with field.open() as fh: data[fieldname] = base64.b64encode(fh.read()) elif IRichTextValue.providedBy(field): data[fieldname] = base64.b64encode( data[fieldname + '_meta']['data'].encode('utf8'), )
def stream_data(self, named_file): if self.fieldname == IOGMail['original_message'].getName(): return super(MailDownload, self).stream_data(named_file) if HAVE_BLOBS: if IBlobby.providedBy(named_file): if named_file._blob._p_blob_uncommitted: filename = named_file._blob._p_blob_uncommitted else: filename = named_file._blob.committed() return self.convert_line_endings(filename) return named_file.data
def plone_namedfile_scaling_ImageScale_index_html(self): """ download the image """ self.validate_access() set_headers(self.data, self.request.response) if IBlobby.providedBy(self.data): zodb_blob = self.data._blob else: zodb_blob = self.data try: # The very first time a scale is requested, it is created, so a # blob will not exist. So do not serve it with xsendfile until the # transaction is commited. zodb_blob.committed() except BlobError: return stream_data(self.data) if set_xsendfile_header(self.request, self.request.response, zodb_blob): return 'collective.xsendfile - proxy missing?' else: return stream_data(self.data)
def _expand_binary_data(self, obj, data): max_size = es_config.max_blobsize for fieldname in self._iterate_binary_fields(obj, data): if fieldname not in data: data[fieldname] = None continue field = getattr(obj, fieldname, None) if field is None: data[fieldname] = None continue data[fieldname + '_meta'] = data[fieldname] if IBlobby.providedBy(field): with field.open() as fh: data[fieldname] = base64.b64encode(fh.read()) elif IRichTextValue.providedBy(field): data[fieldname] = base64.b64encode( data[fieldname + '_meta']['data'].encode('utf8'), ) if max_size and len(data[fieldname]) > max_size: data[fieldname] = None del data[fieldname + '_meta'] logger.info( 'File too big for ElasticSearch Indexing: {0}'.format( obj.absolute_url(), ), )