def create_container(self, container_name: str, acl: str = None, meta_data: MetaData = None) -> Container: if meta_data: logger.info(messages.OPTION_NOT_SUPPORTED, "meta_data") # Required parameters params = { "Bucket": container_name, } # type: Dict[Any, Any] if acl: params["ACL"] = acl.lower() params = self._create_bucket_params(params) logger.debug("params=%s", params) try: bucket = self.s3.create_bucket(**params) except ParamValidationError as err: msg = err.kwargs.get("report", messages.CONTAINER_NAME_INVALID) raise CloudStorageError(msg) try: bucket.wait_until_exists() except WaiterError as err: logger.error(err) return self._make_container(bucket)
def _set_container_meta(self, container: OpenStackContainer, meta_data: MetaData) -> None: """Set metadata on container. :param container: Container to set metadata to. :type container: :class:`openstack.object_store.v1.container.Container` :param meta_data: A map of metadata to store with the container. :type meta_data: dict :return: NoneType :rtype: None :raises CloudStorageError: If setting the metadata failed. """ object_url = self._get_server_public_url('cloudFiles') object_url += '/' + quote(container.id) headers = {'X-Auth-Token': self._token} # Add header prefix to user meta data, X-Object-Meta- for meta_key, meta_value in meta_data.items(): headers[self._CONTAINER_META_PREFIX + meta_key] = meta_value response = requests.post(object_url, headers=headers) if response.status_code != HTTPStatus.NO_CONTENT: raise CloudStorageError(response.text)
def create_container(self, container_name: str, acl: str = None, meta_data: MetaData = None) -> Container: if acl: logger.info(messages.OPTION_NOT_SUPPORTED, 'acl') try: cont = self.object_store.create_container(**dict( name=container_name)) except HttpException as err: raise CloudStorageError(err.details) meta_data = meta_data if meta_data is not None else {} self._set_container_meta(cont, meta_data) cont = self._get_container(cont.name) container = self._make_container(cont) # TODO: QUESTION: Automatically enable CDN for public-read? # if acl == 'public-read': # self.enable_container_cdn(container) # else: # logger.info(option_not_supported % 'acl') return container
def create_container(self, container_name: str, acl: str = None, meta_data: MetaData = None) -> Container: meta_data = meta_data if meta_data is not None else {} # Review options: Off, Blob, Container if acl == 'container-public-access': public_access = PublicAccess.Container elif acl == 'blob-public-access': public_access = PublicAccess.Blob else: public_access = None try: self.service.create_container(container_name, metadata=meta_data, public_access=public_access, fail_on_exist=False) except AzureConflictHttpError: logger.debug(messages.CONTAINER_EXISTS, container_name) except AzureHttpError as err: logger.debug(err) raise CloudStorageError(str(err)) azure_container = self._get_azure_container(container_name) return self._convert_azure_container(azure_container)
def lock_local_file(path: str) -> filelock.FileLock: """Platform dependent file lock. :param path: File or directory path to lock. :type path: str :yield: File lock context manager. :yield type: :class:`filelock.FileLock` :raise CloudStorageError: If lock could not be acquired. """ lock = filelock.FileLock(path + ".lock") try: lock.acquire(timeout=0.1) except filelock.Timeout: raise CloudStorageError("Lock timeout") yield lock if lock.is_locked: lock.release() if os.path.exists(lock.lock_file): os.remove(lock.lock_file)
def __init__(self, key: str, secret: str = None, region: str = 'IAD', **kwargs: Dict) -> None: region = region.upper() if region not in self.regions: raise CloudStorageError(region_not_found % region) super().__init__(key=key, secret=secret, region=region) self._conn = connection.Connection(username=key, api_key=secret, region=region)
def delete_container(self, container: Container) -> None: try: self.object_store.delete_container(container.name) except ResourceNotFound: raise NotFoundError(container_not_found % container.name) except HttpException as e: if e.http_status == HTTPStatus.CONFLICT: raise IsNotEmptyError(container_not_empty % container.name) raise CloudStorageError(e.details)
def _set_object_meta(self, obj, meta_data: MetaData) -> None: """Set object meta data. .. note:: The POST request to set metadata deletes all metadata that is not explicitly set in the request. In other words, ALL the object metadata is set at the time of the POST request. If you want to edit or remove one header, include all other headers in the POST request and leave out the header that you want to remove. This means that if you delete one entry without posting the others, the others will also be deleted at that time. References: * `Create or update object metadata <https://developer.rackspace.com/docs/cloud-files/v1/ storage-api-reference/object-services-operations/ #create-or-update-object-metadata>`_ .. todo:: Use conn.object_store.set_object_metadata when OpenStack SDK fixes meta dictionary usage of `iteritems()` to `items()`. :param obj: Openstack object instance. :type obj: :class:`openstack.object_store.v1.obj.Object` :param meta_data: A map of metadata to store with the object. :type meta_data: dict :return: NoneType :rtype: None :raises CloudStorageError: If setting the metadata failed. """ # TODO: BUG: Throws error due to legacy iteritems(). # self.object_store.set_object_metadata( # obj=obj, container=container.name, **meta_data) object_url = self._get_server_public_url('cloudFiles') object_url += '/' + quote(obj.container) + '/' + quote(obj.name) headers = { 'X-Auth-Token': self._token } # Add header prefix to user meta data, X-Object-Meta- for meta_key, meta_value in meta_data.items(): headers[self._OBJECT_META_PREFIX + meta_key] = meta_value # Include extra header params or they get deleted headers['X-Delete-At'] = obj.delete_at headers['X-Delete-After'] = obj.delete_after headers['Content-Type'] = obj.content_type headers['Content-Disposition'] = obj.content_disposition headers['Content-Encoding'] = obj.content_encoding response = requests.post(object_url, headers=headers) if response.status_code != HTTPStatus.ACCEPTED: raise CloudStorageError(response.text)
def delete_container(self, container: Container) -> None: try: self.object_store.delete_container(container.name) except ResourceNotFound: raise NotFoundError(messages.CONTAINER_NOT_FOUND % container.name) except HttpException as err: if err.status_code == HTTPStatus.CONFLICT: raise IsNotEmptyError(messages.CONTAINER_NOT_EMPTY % container.name) raise CloudStorageError(err.details)
def _make_blob(self, container: Container, object_summary) -> Blob: """Convert S3 Object Summary to Blob instance. :param container: The container that holds the blob. :type container: :class:`.Container` :param object_summary: S3 object summary. :type object_summary: :class:`boto3.s3.ObjectSummary` :return: A blob object. :rtype: :class:`.Blob` :raise NotFoundError: If the blob object doesn't exist. """ try: name = object_summary.key #: etag wrapped in quotes checksum = etag = object_summary.e_tag.replace('"', "") size = object_summary.size acl = object_summary.Acl() meta_data = object_summary.meta.data.get("Metadata", {}) content_disposition = object_summary.meta.data.get( "ContentDisposition", None) content_type = object_summary.meta.data.get("ContentType", None) cache_control = object_summary.meta.data.get("CacheControl", None) modified_at = object_summary.last_modified created_at = None expires_at = None # TODO: FEATURE: Delete at / expires at except ClientError as err: error_code = int(err.response["Error"]["Code"]) if error_code == 404: raise NotFoundError(messages.BLOB_NOT_FOUND % (object_summary.key, container.name)) raise CloudStorageError("%s: %s" % (err.response["Error"]["Code"], err.response["Error"]["Message"])) return Blob( name=name, checksum=checksum, etag=etag, size=size, container=container, driver=self, acl=acl, meta_data=meta_data, content_disposition=content_disposition, content_type=content_type, cache_control=cache_control, created_at=created_at, modified_at=modified_at, expires_at=expires_at, )
def delete_container(self, container: Container) -> None: for _ in self.get_blobs(container): raise IsNotEmptyError(container_not_empty % container.name) path = self._get_folder_path(container, validate=True) with lock_local_file(path): try: shutil.rmtree(path) except shutil.Error as e: raise CloudStorageError(e.strerror)
def container_cdn_url(self, container: Container) -> str: endpoint_url = self._get_server_public_url( 'cloudFilesCDN') + '/' + container.name headers = { 'X-Auth-Token': self._token, } response = requests.head(endpoint_url, headers=headers) uri = response.headers.get('x-cdn-ssl-uri') if not uri: raise CloudStorageError(messages.CDN_NOT_ENABLED % container.name) return uri
def create_container(self, container_name: str, acl: str = None, meta_data: MetaData = None) -> Container: if acl: logger.info(messages.OPTION_NOT_SUPPORTED, "acl") if meta_data: logger.info(messages.OPTION_NOT_SUPPORTED, "meta_data") full_path = os.path.join(self.base_path, container_name) if not self._check_path_accessible(full_path): raise CloudStorageError(messages.CONTAINER_NAME_INVALID) self._make_path(full_path, ignore_existing=True) try: with lock_local_file(full_path): self._make_path(full_path, ignore_existing=True) except FileNotFoundError: raise CloudStorageError(messages.CONTAINER_NAME_INVALID) return self._make_container(container_name)
def __init__(self, key: str, secret: str = None, region: str = "IAD", **kwargs: Dict) -> None: region = region.upper() if region not in self.regions: raise CloudStorageError(messages.REGION_NOT_FOUND % region) super().__init__(key=key, secret=secret, region=region, **kwargs) self._conn = connection.Connection(username=key, api_key=secret, region=region)
def _make_blob(self, container: Container, object_summary) -> Blob: """Convert S3 Object Summary to Blob instance. :param container: The container that holds the blob. :type container: :class:`.Container` :param object_summary: S3 object summary. :type object_summary: :class:`boto3.s3.ObjectSummary` :return: A blob object. :rtype: :class:`.Blob` :raise NotFoundError: If the blob object doesn't exist. """ try: name = object_summary.key #: etag wrapped in quotes checksum = etag = object_summary.e_tag.replace('"', '') size = object_summary.size acl = object_summary.Acl() meta_data = object_summary.meta.data.get('Metadata', {}) content_disposition = object_summary.meta.data.get( 'ContentDisposition', None) content_type = object_summary.meta.data.get('ContentType', None) modified_at = object_summary.last_modified created_at = None expires_at = None # TODO: FEATURE: Delete at / expires at except ClientError as e: error_code = int(e.response['Error']['Code']) if error_code == 404: raise NotFoundError(blob_not_found % (container.name, object_summary.key)) raise CloudStorageError( '%s: %s' % (e.response['Error']['Code'], e.response['Error']['Message'])) return Blob(name=name, checksum=checksum, etag=etag, size=size, container=container, driver=self, acl=acl, meta_data=meta_data, content_disposition=content_disposition, content_type=content_type, created_at=created_at, modified_at=modified_at, expires_at=expires_at)
def __init__(self, key: str, secret: str = None, region: str = "us-east-1", **kwargs: Dict) -> None: region = region.lower() super().__init__(key=key, secret=secret, region=region, **kwargs) self._session = boto3.Session(aws_access_key_id=key, aws_secret_access_key=secret, region_name=region) # session required for loading regions list if region not in self.regions: raise CloudStorageError(messages.REGION_NOT_FOUND % region)
def _get_temp_url_key(self) -> str: """Get one of the account metadata keys for signing URLs. :return: Account metadata key. :rtype: str :raises CloudStorageError: If both account metadata keys are empty. """ keys = self.get_account_temp_url_keys() try: return next(item for item in keys if item is not None) except StopIteration: raise CloudStorageError( "Please set a temporary URL key on the driver: " "'storage.set_account_temp_url_keys'")
def copy_blob(self, container: Container, blob_name: str, destination: Container, dest_blob_name: str) -> Blob: source_blob = self.get_blob(container, blob_name) source_blob_url = self.blob_cdn_url(source_blob) try: self.service.copy_blob(destination.name, dest_blob_name, source_blob_url) except AzureMissingResourceHttpError as err: logger.debug(err) raise NotFoundError(messages.CONTAINER_NOT_FOUND % container_name) except AzureException as err: logger.debug(err) raise CloudStorageError("Error while copy " + blob_name + " into " + dest_blob_name) return self.get_blob(destination, dest_blob_name)
def __init__(self, key: str = None, **kwargs: Dict) -> None: super().__init__(key=key) google_application_credentials = os.getenv(self._CREDENTIALS_ENV_NAME) # Set environment variable using credentials json file path. if not google_application_credentials: if not os.path.isfile(key): raise CloudStorageError( "Please set environment variable " "'GOOGLE_APPLICATION_CREDENTIALS' or provider file path " "to Google service account key json file.") os.environ[self._CREDENTIALS_ENV_NAME] = key self._client = storage.Client()
def copy_blob(self, container: Container, blob_name: str, destination: Container, dest_blob_name: str) -> Blob: source = {'Bucket': container.name, 'Key': blob_name} try: bucket = self.s3.Bucket(destination.name) bucket.copy(source, dest_blob_name) except ClientError as err: error_code = int(err.response['Error']['Code']) if error_code == 404: raise NotFoundError(messages.BLOB_NOT_FOUND % (container.name, blob_name)) raise CloudStorageError('%s: %s' % (err.response['Error']['Code'], err.response['Error']['Message'])) return self.get_blob(destination, dest_blob_name)
def create_container(self, container_name: str, acl: str = None, meta_data: MetaData = None) -> Container: if meta_data: logger.warning(option_not_supported % 'meta_data') try: bucket = self.client.create_bucket(container_name) except Conflict: logger.debug(container_exists % container_name) bucket = self._get_bucket(container_name) except ValueError as e: raise CloudStorageError(str(e)) if acl: bucket.acl.save_predefined(acl) return self._make_container(bucket)
def create_container(self, container_name: str, acl: str = None, meta_data: MetaData = None) -> Container: if meta_data: logger.warning(messages.OPTION_NOT_SUPPORTED, 'meta_data') try: bucket = self.client.create_bucket(container_name) except Conflict: logger.debug(messages.CONTAINER_EXISTS, container_name) bucket = self._get_bucket(container_name) except ValueError as err: raise CloudStorageError(str(err)) if acl: bucket.acl.save_predefined(acl) return self._make_container(bucket)
def create_container(self, container_name: str, acl: str = None, meta_data: MetaData = None) -> Container: if meta_data: logger.info(messages.OPTION_NOT_SUPPORTED, 'meta_data') if acl: logger.info(messages.OPTION_NOT_SUPPORTED, 'acl') try: self.client.make_bucket(container_name) except (BucketAlreadyExists, BucketAlreadyOwnedByYou): pass except (InvalidBucketName, InvalidBucketError, ResponseError) as err: raise CloudStorageError(err.message) bucket = self._get_bucket(container_name) return self._make_container(bucket)
def create_container(self, container_name: str, acl: str = None, meta_data: MetaData = None) -> Container: if acl: logger.info(option_not_supported % 'acl') if meta_data: logger.info(option_not_supported % 'meta_data') full_path = os.path.join(self.base_path, container_name) self._make_path(full_path, ignore_existing=True) try: with lock_local_file(full_path) as _: self._make_path(full_path, ignore_existing=True) except FileNotFoundError: raise CloudStorageError(container_name_invalid) return self._make_container(container_name)
def download_blob(self, blob: Blob, destination: FileLike) -> None: blob_path = self._get_file_path(blob) if isinstance(destination, str): base_name = os.path.basename(destination) if not base_name and not os.path.exists(destination): raise CloudStorageError("Path %s does not exist." % destination) if not base_name: file_path = os.path.join(destination, blob.name) else: file_path = destination shutil.copy(blob_path, file_path) else: with open(blob_path, "rb") as blob_file: for data in read_in_chunks(blob_file): destination.write(data)
def get_driver(driver: DriverName) -> Drivers: """Get driver class by DriverName enumeration member. .. code-block:: python >>> from cloudstorage import DriverName, get_driver >>> driver_cls = get_driver(DriverName.LOCAL) <class 'cloudstorage.drivers.local.LocalDriver'> :param driver: DriverName member. :type driver: :class:`.DriverName` :return: DriverName driver class. :rtype: :class:`.CloudDriver` """ if driver in _DRIVER_IMPORTS: mod_name, driver_name = _DRIVER_IMPORTS[driver] _mod = __import__(mod_name, globals(), locals(), [driver_name]) return getattr(_mod, driver_name) raise CloudStorageError("Driver '%s' does not exist." % driver)
def _make_path(path: str, ignore_existing: bool = True) -> None: """Create a folder. :param path: Folder path to create. :type path: str :param ignore_existing: If True, ignore existing folder. :type ignore_existing: bool :return: NoneType :rtype: None :raises CloudStorageError: If folder exists and `ignore_existing` is False. """ try: os.makedirs(path) except OSError: logger.debug(messages.CONTAINER_EXISTS, path) exp = sys.exc_info()[1] if exp.errno == errno.EEXIST and not ignore_existing: raise CloudStorageError(exp.strerror)
def _get_server_public_url(self, service_name: str) -> str: """Return the public endpoint URL for a particular service region. `https://storage101.iad3.clouddrive.com/v1/MossoCloudFS_XXXXX` :param service_name: Service name: `cloudFiles` or `cloudFilesCDN`. :type service_name: str :return: Public URL for the requested service. :rtype: str :raises CloudStorageError: If service name is not found in catalog. """ service_catalog = self.conn.session.auth.auth_ref.service_catalog.catalog for service in service_catalog: if service["name"] == service_name: for endpoint in service["endpoints"]: if endpoint["region"] == self.region: return endpoint["publicURL"] raise CloudStorageError( "Could not determine the public URL for '%s'." % service_name)
def _get_bucket(self, bucket_name: str, validate: bool = True): """Get a S3 bucket. :param bucket_name: The Bucket's name identifier. :type bucket_name: str :param validate: If True, verify that the bucket exists. :type validate: bool :return: S3 bucket resource object. :rtype: :class:`boto3.s3.Bucket` :raises NotFoundError: If the bucket does not exist. :raises CloudStorageError: Boto 3 client error. """ bucket = self.s3.Bucket(bucket_name) if validate: try: response = self.s3.meta.client.head_bucket(Bucket=bucket_name) logger.debug("response=%s", response) except ClientError as err: error_code = int(err.response["Error"]["Code"]) if error_code == 404: raise NotFoundError(messages.CONTAINER_NOT_FOUND % bucket_name) raise CloudStorageError("%s: %s" % (err.response["Error"]["Code"], err.response["Error"]["Message"])) try: bucket.wait_until_exists() except WaiterError as err: logger.error(err) return bucket
def create_container(self, container_name: str, acl: str = None, meta_data: MetaData = None) -> Container: if meta_data: logger.info(messages.OPTION_NOT_SUPPORTED, "meta_data") # Required parameters params = { "Bucket": container_name, } # type: Dict[Any, Any] if acl: params["ACL"] = acl.lower() # TODO: BUG: Creating S3 bucket in us-east-1 # See https://github.com/boto/boto3/issues/125 if self.region != "us-east-1": params["CreateBucketConfiguration"] = { "LocationConstraint": self.region, } logger.debug("params=%s", params) try: bucket = self.s3.create_bucket(**params) except ParamValidationError as err: msg = err.kwargs.get("report", messages.CONTAINER_NAME_INVALID) raise CloudStorageError(msg) try: bucket.wait_until_exists() except WaiterError as err: logger.error(err) return self._make_container(bucket)