def delete_batch(self, request): deleted, failed, errors = [], [], [] for object in request.objects: try: delete_request = messages.DeleteRequest(request.bucket, object) self.delete(delete_request) deleted.append(object) except messages.S3ClientError as e: failed.append(object) errors.append(e) return messages.DeleteBatchResponse(deleted, failed, errors)
def delete(self, path): """Deletes a single S3 file object from src to dest. Args: src: S3 file path pattern in the form s3://<bucket>/<name>/. dest: S3 file path pattern in the form s3://<bucket>/<name>/. Returns: List of tuples of (src, dest, exception) in the same order as the src_dest_pairs argument, where exception is None if the operation succeeded or the relevant exception if the operation failed. """ bucket, object_path = parse_s3_path(path) request = messages.DeleteRequest(bucket, object_path) try: self.client.delete(request) except messages.S3ClientError as e: if e.code == 404: return # Same behavior as GCS - don't surface a 404 error else: logging.error('HTTP error while deleting file %s: %s', path, 3) raise e