def idempotent_save(blobstore: BlobStore, bucket: str, key: str, data: bytes) -> typing.Tuple[bool, bool]: """ idempotent_save attempts to save an object to the BlobStore. Its return values indicate whether the save was made successfully and whether the operation could be completed idempotently. If the data in the blobstore does not match the data parameter, the data in the blobstore is _not_ overwritten. :param blobstore: the blobstore to save the data to :param bucket: the bucket in the blobstore to save the data to :param key: the key of the object to save :param data: the data to save :return: a tuple of booleans (was the data saved?, was the save idempotent?) """ if test_object_exists(blobstore, bucket, key): # fetch the file metadata, compare it to what we have. existing_data = blobstore.get(bucket, key) return False, existing_data == data else: # write manifest to persistent store part_size = 16 * 1024 * 1024 if isinstance(blobstore, S3BlobStore) and len(data) > part_size: with io.BytesIO(data) as fh: multipart_parallel_upload(blobstore.s3_client, bucket, key, fh, part_size=part_size, parallelization_factor=20) else: blobstore.upload_file_handle(bucket, key, io.BytesIO(data)) return True, True
def _idempotent_save(blobstore: BlobStore, bucket: str, key: str, data: dict) -> typing.Tuple[bool, bool]: """ _idempotent_save attempts to save an object to the BlobStore. Its return values indicate whether the save was made successfully and whether the operation could be completed idempotently. If the data in the blobstore does not match the data parameter, the data in the blobstore is _not_ overwritten. :param blobstore: the blobstore to save the data to :param bucket: the bucket in the blobstore to save the data to :param key: the key of the object to save :param data: the data to save :return: a tuple of booleans (was the data saved?, was the save idempotent?) """ if test_object_exists(blobstore, bucket, key): # fetch the file metadata, compare it to what we have. existing_data = json.loads(blobstore.get(bucket, key).decode("utf-8")) return False, existing_data == data else: # write manifest to persistent store blobstore.upload_file_handle( bucket, key, io.BytesIO(json.dumps(data).encode("utf-8")), ) return True, True
def write_file_metadata(handle: BlobStore, dst_bucket: str, file_uuid: str, file_version: str, document: str): # what's the target object name for the file metadata? metadata_key = f"files/{file_uuid}.{file_version}" # if it already exists, then it's a failure. try: handle.get_user_metadata(dst_bucket, metadata_key) except BlobNotFoundError: pass else: raise BlobAlreadyExistsError() handle.upload_file_handle(dst_bucket, metadata_key, io.BytesIO(document.encode("utf-8")))