def fetch_attachment(self, name, stream=False): """Get named attachment :param stream: When true, return a file-like object that can be read at least once (streamers should not expect to seek within or read the contents of the returned file more than once). """ db = get_blob_db() try: try: key = self.external_blobs[name].key except KeyError: if self._migrating_blobs_from_couch: return super(BlobMixin, self) \ .fetch_attachment(name, stream=stream) raise NotFound(name) blob = db.get(key=key) except NotFound: raise ResourceNotFound( "{model} {model_id} attachment: {name!r}".format( model=type(self).__name__, model_id=self._id, name=name, )) if stream: return blob with blob: return blob.read()
def size(self, identifier=None, bucket=DEFAULT_BUCKET, key=None): if not (identifier is None and bucket == DEFAULT_BUCKET): # legacy: can be removed with old API assert key is None, key key = self.get_path(identifier, bucket) check_safe_key(key) with maybe_not_found(throw=NotFound(key)), self.report_timing('size', key): return self._s3_bucket().Object(key).content_length
def exists(self, identifier, bucket=DEFAULT_BUCKET): path = self.get_path(identifier, bucket) try: with maybe_not_found(throw=NotFound(identifier, bucket)): self._s3_bucket().Object(path).load() return True except NotFound: return False
def get(self, identifier=None, bucket=DEFAULT_BUCKET, key=None): if not (identifier is None and bucket == DEFAULT_BUCKET): # legacy: can be removed with old API assert key is None, key key = self.get_path(identifier, bucket) check_safe_key(key) with maybe_not_found(throw=NotFound(key)), self.report_timing('get', key): resp = self._s3_bucket().Object(key).get() return BlobStream(resp["Body"], self, key)
def exists(self, key): check_safe_key(key) try: with maybe_not_found(throw=NotFound(key)), self.report_timing( 'exists', key): self._s3_bucket().Object(key).load() return True except NotFound: return False
def size(self, identifier=None, bucket=DEFAULT_BUCKET, key=None): if identifier is None and bucket == DEFAULT_BUCKET: path = self.get_path(key=key) else: assert key is None, key key = join(bucket, identifier) path = self.get_path(identifier, bucket) if not exists(path): datadog_counter('commcare.blobdb.notfound') raise NotFound(key) return _count_size(path).size
def exists(self, identifier=None, bucket=DEFAULT_BUCKET, key=None): if not (identifier is None and bucket == DEFAULT_BUCKET): # legacy: can be removed with old API assert key is None, key key = self.get_path(identifier, bucket) check_safe_key(key) try: with maybe_not_found(throw=NotFound(key)), self.report_timing('exists', key): self._s3_bucket().Object(key).load() return True except NotFound: return False
def get(self, identifier=None, bucket=DEFAULT_BUCKET, key=None): if identifier is None and bucket == DEFAULT_BUCKET: path = self.get_path(key=key) else: # legacy: can be removed with old API assert key is None, key key = join(bucket, identifier) path = self.get_path(identifier, bucket) if not exists(path): datadog_counter('commcare.blobdb.notfound') raise NotFound(key) return open(path, "rb")
def get(self, key=None, type_code=None, meta=None): key = self._validate_get_args(key, type_code, meta) path = self.get_path(key) if not exists(path): metrics_counter('commcare.blobdb.notfound') raise NotFound(key) file_obj = open(path, "rb") if meta and meta.is_compressed: content_length, compressed_length = meta.content_length, meta.compressed_length file_obj = GzipFile(fileobj=file_obj, mode='rb') else: content_length, compressed_length = self.size(key), None return BlobStream(file_obj, self, key, content_length, compressed_length)
def get(self, key=None, type_code=None, meta=None): key = self._validate_get_args(key, type_code, meta) check_safe_key(key) with maybe_not_found(throw=NotFound(key)), self.report_timing( 'get', key): resp = self._s3_bucket().Object(key).get() reported_content_length = resp['ContentLength'] body = resp["Body"] if meta and meta.is_compressed: content_length, compressed_length = meta.content_length, meta.compressed_length body = GzipFile(key, mode='rb', fileobj=body) else: content_length, compressed_length = reported_content_length, None return BlobStream(body, self, key, content_length, compressed_length)
def fetch_attachment(self, name, stream=False, return_bytes=False): """Get named attachment :param stream: When true, return a file-like object that can be read at least once (streamers should not expect to seek within or read the contents of the returned file more than once). """ db = get_blob_db() try: try: key = self.external_blobs[name].key except KeyError: if self._migrating_blobs_from_couch: return super(BlobMixin, self) \ .fetch_attachment(name, stream=stream) raise NotFound(name) blob = db.get(key=key) except NotFound: raise ResourceNotFound( "{model} {model_id} attachment: {name!r}".format( model=type(self).__name__, model_id=self._id, name=name, )) if stream: return blob with blob: body = blob.read() if return_bytes: return body try: body = body.decode("utf-8", "strict") except UnicodeDecodeError: # Return bytes on decode failure, otherwise unicode. # Ugly, but consistent with restkit.wrappers.Response.body_string pass return body
def get(self, identifier, bucket=DEFAULT_BUCKET): path = self.get_path(identifier, bucket) if not exists(path): raise NotFound(identifier, bucket) return open(path, "rb")
def size(self, key): path = self.get_path(key) if not exists(path): metrics_counter('commcare.blobdb.notfound') raise NotFound(key) return _count_size(path).size
def get(self, key): path = self.get_path(key) if not exists(path): metrics_counter('commcare.blobdb.notfound') raise NotFound(key) return open(path, "rb")
def size(self, key): check_safe_key(key) with maybe_not_found(throw=NotFound(key)), self.report_timing( 'size', key): return self._s3_bucket().Object(key).content_length
def get(self, identifier, bucket=DEFAULT_BUCKET): path = self.get_path(identifier, bucket) with maybe_not_found(throw=NotFound(identifier, bucket)): resp = self._s3_bucket().Object(path).get() return BlobStream(resp["Body"], self, path)
def size(self, identifier, bucket=DEFAULT_BUCKET): path = self.get_path(identifier, bucket) if not exists(path): datadog_counter('commcare.blobdb.notfound') raise NotFound(identifier, bucket) return os.path.getsize(path)
def get(self, key): check_safe_key(key) with maybe_not_found(throw=NotFound(key)), self.report_timing( 'get', key): resp = self._s3_bucket().Object(key).get() return BlobStream(resp["Body"], self, key)
def get(self, identifier, bucket=DEFAULT_BUCKET): path = self.get_path(identifier, bucket) with maybe_not_found(throw=NotFound(identifier, bucket)): resp = self._s3_bucket().Object(path).get() return ClosingContextProxy(resp["Body"]) # body stream
def size(self, identifier, bucket=DEFAULT_BUCKET): path = self.get_path(identifier, bucket) with maybe_not_found(throw=NotFound(identifier, bucket)): return self._s3_bucket().Object(path).content_length