def _get_bucket(self, bucket_name: str) -> definitions.Bucket: """Get a Minio bucket. :param bucket_name: The Bucket's name identifier. :type bucket_name: str :return: Bucket resource object. :rtype: :class:`minio.definitions.Bucket` :raises NotFoundError: If the bucket does not exist. """ for bucket in self.client.list_buckets(): if bucket.name == bucket_name: return bucket raise NotFoundError(messages.CONTAINER_NOT_FOUND % bucket_name)
def _get_container(self, container_name: str): """Get Rackspace container by name. :param container_name: Container name to get. :type container_name: str :return: Openstack object store container. :rtype: :class:`openstack.object_store.v1.container.Container` :raises NotFoundError: If container does not exist. """ try: return self.object_store.get_container_metadata(container_name) except NotFoundException: raise NotFoundError(messages.CONTAINER_NOT_FOUND % container_name)
def copy_blob(self, container: Container, blob_name: str, destination: Container, dest_blob_name: str) -> Blob: source_blob = self.get_blob(container, blob_name) source_blob_url = self.blob_cdn_url(source_blob) try: self.service.copy_blob(destination.name, dest_blob_name, source_blob_url) except AzureMissingResourceHttpError as err: logger.debug(err) raise NotFoundError(messages.CONTAINER_NOT_FOUND % container_name) except AzureException as err: logger.debug(err) raise CloudStorageError("Error while copy " + blob_name + " into " + dest_blob_name) return self.get_blob(destination, dest_blob_name)
def _get_azure_container(self, container_name: str) -> AzureContainer: """Get Azure Storage container by name. :param container_name: The name of the container to get. :type container_name: str :return: The container matching the name provided. :rtype: :class:`azure.storage.blob.models.Container` """ try: azure_container = self.service.get_container_properties( container_name) except AzureMissingResourceHttpError as err: logger.debug(err) raise NotFoundError(messages.CONTAINER_NOT_FOUND % container_name) return azure_container
def copy_blob(self, container: Container, blob_name: str, destination: Container, dest_blob_name: str) -> Blob: source = {'Bucket': container.name, 'Key': blob_name} try: bucket = self.s3.Bucket(destination.name) bucket.copy(source, dest_blob_name) except ClientError as err: error_code = int(err.response['Error']['Code']) if error_code == 404: raise NotFoundError(messages.BLOB_NOT_FOUND % (container.name, blob_name)) raise CloudStorageError('%s: %s' % (err.response['Error']['Code'], err.response['Error']['Message'])) return self.get_blob(destination, dest_blob_name)
def delete_blob(self, blob: Blob) -> None: # Required parameters params = { "Bucket": blob.container.name, "Key": blob.name, } logger.debug("params=%s", params) try: response = self.s3.meta.client.delete_object(**params) logger.debug("response=%s", response) except ClientError as err: error_code = int(err.response["Error"]["Code"]) if error_code != 200 or error_code != 204: raise NotFoundError(messages.BLOB_NOT_FOUND % (blob.name, blob.container.name)) raise
def delete_blob(self, blob: Blob) -> None: # Required parameters params = { 'Bucket': blob.container.name, 'Key': blob.name, } logger.debug('params=%s' % params) try: response = self.s3.meta.client.delete_object(**params) logger.debug('response=%s' % response) except ClientError as e: error_code = int(e.response['Error']['Code']) if error_code != 200 or error_code != 204: raise NotFoundError(blob_not_found % (blob.name, blob.container.name)) raise
def _get_blob(self, bucket_name: str, blob_name: str) -> GoogleBlob: """Get a blob object by name. :param bucket_name: The name of the container that containers the blob. :type bucket_name: :param blob_name: The name of the blob to get. :type blob_name: str :return: The blob object if it exists. :rtype: :class:`google.client.storage.blob.Blob` """ bucket = self._get_bucket(bucket_name) blob = bucket.get_blob(blob_name) if not blob: raise NotFoundError(messages.BLOB_NOT_FOUND % (blob_name, bucket_name)) return blob
def _get_folder_path(self, container: Container, validate: bool = True) -> str: """Get the container's full folder path. :param container: A container instance. :type container: :class:`.Container` :param validate: If True, verify that folder exists. :type validate: bool :return: Full folder path to the container. :rtype: str :raises NotFoundError: If the container doesn't exist. """ full_path = os.path.join(self.base_path, container.name) if validate and not os.path.isdir(full_path): raise NotFoundError(container_not_found % container.name) return full_path
def _get_object(self, container_name: str, object_name: str): """Get Rackspace container by name. :param container_name: Container name that contains the object. :type container_name: str :param object_name: Object name to get. :type object_name: str :return: Openstack object store object. :rtype: :class:`openstack.object_store.v1.obj.Object` :raises NotFoundError: If object does not exist. """ try: obj = self.object_store.get_object_metadata( obj=object_name, container=container_name) except (ResourceNotFound, NotFoundException): raise NotFoundError(blob_not_found % (object_name, container_name)) return obj
def _make_container(self, folder_name: str) -> Container: """Convert a folder name to a Cloud Storage Container. :param folder_name: The folder name to convert. :type folder_name: str :return: A container instance. :rtype: :class:`.Container` :raises FileNotFoundError: If container does not exist. """ full_path = os.path.join(self.base_path, folder_name) try: stat = os.stat(full_path) except FileNotFoundError: raise NotFoundError(container_not_found % folder_name) created_at = datetime.fromtimestamp(stat.st_ctime, timezone.utc) return Container(name=folder_name, driver=self, meta_data=None, created_at=created_at)
def _get_azure_blob(self, container_name: str, blob_name: str) -> AzureBlob: """Get Azure Storage blob by container and blob name. :param container_name: The name of the container that containers the blob. :type container_name: str :param blob_name: The name of the blob to get. :type blob_name: str :return: The blob object if it exists. :rtype: :class:`azure.storage.blob.models.Blob` """ try: azure_blob = self.service.get_blob_properties( container_name, blob_name) except AzureMissingResourceHttpError as err: logger.debug(err) raise NotFoundError(messages.BLOB_NOT_FOUND % (blob_name, container_name)) return azure_blob
def _get_bucket(self, bucket_name: str, validate: bool = True): """Get a S3 bucket. :param bucket_name: The Bucket's name identifier. :type bucket_name: str :param validate: If True, verify that the bucket exists. :type validate: bool :return: S3 bucket resource object. :rtype: :class:`boto3.s3.Bucket` :raises NotFoundError: If the bucket does not exist. :raises CloudStorageError: Boto 3 client error. """ bucket = self.s3.Bucket(bucket_name) if validate: try: response = self.s3.meta.client.head_bucket(Bucket=bucket_name) logger.debug("response=%s", response) except ClientError as err: error_code = int(err.response["Error"]["Code"]) if error_code == 404: raise NotFoundError(messages.CONTAINER_NOT_FOUND % bucket_name) raise CloudStorageError("%s: %s" % (err.response["Error"]["Code"], err.response["Error"]["Message"])) try: bucket.wait_until_exists() except WaiterError as err: logger.error(err) return bucket
def _get_bucket(self, bucket_name: str, validate: bool = True): """Get a S3 bucket. :param bucket_name: The Bucket's name identifier. :type bucket_name: str :param validate: If True, verify that the bucket exists. :type validate: bool :return: S3 bucket resource object. :rtype: :class:`boto3.s3.Bucket` :raises NotFoundError: If the bucket does not exist. :raises CloudStorageError: Boto 3 client error. """ bucket = self.s3.Bucket(bucket_name) if validate: try: response = self.s3.meta.client.head_bucket(Bucket=bucket_name) logger.debug('response=%s' % response) except ClientError as e: error_code = int(e.response['Error']['Code']) if error_code == 404: raise NotFoundError(container_not_found % bucket_name) raise CloudStorageError('%s: %s' % (e.response['Error']['Code'], e.response['Error']['Message'])) try: bucket.wait_until_exists() except WaiterError as e: logger.error(e) return bucket
def get_blob(self, container: Container, blob_name: str) -> Blob: try: obj = self.client.stat_object(container.name, blob_name) except NoSuchKey: raise NotFoundError(messages.BLOB_NOT_FOUND % (blob_name, container.name)) return self._make_obj(container, obj)
def _make_blob(self, container: Container, object_name: str) -> Blob: """Convert local file name to a Cloud Storage Blob. :param container: Container instance. :type container: :class:`.Container` :param object_name: Filename. :type object_name: str :return: Blob instance. :rtype: :class:`.Blob` """ full_path = os.path.join(self.base_path, container.name, object_name) if not self._check_path_accessible(full_path): raise NotFoundError(messages.BLOB_NOT_FOUND % (object_name, container.name)) object_path = pathlib.Path(full_path) try: stat = os.stat(str(object_path)) except FileNotFoundError: raise NotFoundError(messages.BLOB_NOT_FOUND % (object_name, container.name)) meta_data = {} content_type = None content_disposition = None cache_control = None try: attributes = self._make_xattr(full_path) for attr_key, attr_value in attributes.items(): value_str = None try: value_str = attr_value.decode("utf-8") except UnicodeDecodeError: pass if attr_key.startswith(self._OBJECT_META_PREFIX + "metadata"): meta_key = attr_key.split(".")[-1] meta_data[meta_key] = value_str elif attr_key.endswith("content_type"): content_type = value_str elif attr_key.endswith("content_disposition"): content_disposition = value_str elif attr_key.endswith("cache_control"): cache_control = value_str else: logger.warning("Unknown file attribute '%s'", attr_key) except OSError: logger.warning(messages.LOCAL_NO_ATTRIBUTES) # TODO: QUESTION: Option to disable checksum for large files? # TODO: QUESTION: Save a .hash file for each file? file_hash = file_checksum(full_path, hash_type=self.hash_type) checksum = file_hash.hexdigest() etag = hashlib.sha1(full_path.encode("utf-8")).hexdigest() created_at = datetime.fromtimestamp(stat.st_ctime, timezone.utc) modified_at = datetime.fromtimestamp(stat.st_mtime, timezone.utc) return Blob( name=object_name, checksum=checksum, etag=etag, size=stat.st_size, container=container, driver=self, acl=None, meta_data=meta_data, content_disposition=content_disposition, content_type=content_type, cache_control=cache_control, created_at=created_at, modified_at=modified_at, )