def put_attachment(self, content, name=None, content_type=None, content_length=None, domain=None, type_code=None): """Put attachment in blob database See `get_short_identifier()` for restrictions on the upper bound for number of attachments per object. :param content: String or file object. """ db = get_blob_db() if name is None: name = getattr(content, "name", None) if name is None: raise InvalidAttachment("cannot save attachment without name") if self._id is None: raise ResourceNotFound("cannot put attachment on unidentified document") if hasattr(self, "domain"): if domain is not None and self.domain != domain: raise ValueError("domain mismatch: %s != %s" % (self.domain, domain)) domain = self.domain elif domain is None: raise ValueError("domain attribute or argument is required") old_meta = self.blobs.get(name) if isinstance(content, str): content = BytesIO(content.encode("utf-8")) elif isinstance(content, bytes): content = BytesIO(content) # do we need to worry about BlobDB reading beyond content_length? meta = db.put( content, domain=domain or self.domain, parent_id=self._id, name=name, type_code=(self._blobdb_type_code if type_code is None else type_code), content_type=content_type, ) self.external_blobs[name] = BlobMetaRef( key=meta.key, blobmeta_id=meta.id, content_type=content_type, content_length=meta.content_length, ) if self._migrating_blobs_from_couch and self._attachments: self._attachments.pop(name, None) if self._atomic_blobs is None: self.save() if old_meta and old_meta.key: db.delete(key=old_meta.key) elif old_meta and old_meta.key: self._atomic_blobs[name].append(old_meta.key) return True
def put_attachment(self, content, name=None, content_type=None, content_length=None): """Put attachment in blob database See `get_short_identifier()` for restrictions on the upper bound for number of attachments per object. :param content: String or file object. """ db = get_blob_db() if name is None: name = getattr(content, "name", None) if name is None: raise InvalidAttachment("cannot save attachment without name") old_meta = self.blobs.get(name) if isinstance(content, six.text_type): content = BytesIO(content.encode("utf-8")) elif isinstance(content, six.binary_type): content = BytesIO(content) bucket = self._blobdb_bucket() # do we need to worry about BlobDB reading beyond content_length? info = db.put(content, get_short_identifier(), bucket=bucket) self.external_blobs[name] = BlobMeta( id=info.identifier, content_type=content_type, content_length=info.length, digest=info.digest, ) if self._migrating_blobs_from_couch and self._attachments: self._attachments.pop(name, None) if self._atomic_blobs is None: self.save() if old_meta and old_meta.id: db.delete(old_meta.id, bucket) elif old_meta and old_meta.id: self._atomic_blobs[name].append(old_meta) return True