async def _get_container(container_name, store_name, **kwargs) -> Container: container = kwargs.get('container', None) if container is not None: assert isinstance(container, Container), ("Invalid container type") else: container_name = container_name or _m['default_container'] store_name = store_name or _m['default_store'] if not container_name and not _m['default_container']: raise CloudStorageError( "container_name must be provided. No default container configured" ) elif not container_name: container_name = _m['default_container'] if store_name and store_name not in _m['confs']: raise CloudStorageError("store name %s not configured" % (store_name)) elif not store_name and _m['default_store'] is None: raise CloudStorageError( "store_name must be provided. No default store configured") elif not store_name: store_name = _m['default_store'] if not store_name or not container_name: raise CloudStorageError( "Unknown error occured in getting container") conf = _m['confs'][store_name] klass = conf['klass'] driver = klass(**conf) container = await driver.get_container(container_name) return container
async def _get_bucket(self, bucket_name: str, validate: bool = True): """Get a Minio bucket. :param bucket_name: The Bucket's name identifier. :type bucket_name: str :param validate: If True, verify that the bucket exists. :type validate: bool :return: Minio bucket resource object. :rtype: :class:`boto3.s3.Bucket` :raises NotFoundError: If the bucket does not exist. :raises CloudStorageError: Boto 3 client error. """ if validate: try: async with self.s3() as s3: response = await s3.head_bucket(Bucket=bucket_name) logger.debug('response=%s', response) except ClientError as err: error_code = int(err.response['Error']['Code']) if error_code == 404: raise NotFoundError(messages.CONTAINER_NOT_FOUND % bucket_name) raise CloudStorageError('%s: %s' % (err.response['Error']['Code'], err.response['Error']['Message'])) return Bucket(bucket_name)
def _make_blob(self, container: Container, object_summary) -> Blob: """Convert Minio Object Summary to Blob instance. :param container: The container that holds the blob. :type container: :class:`.Container` :param object_summary: Minio object summary. :type object_summary: :class:`boto3.s3.ObjectSummary` :return: A blob object. :rtype: :class:`.Blob` :raise NotFoundError: If the blob object doesn't exist. """ try: name = object_summary['Key'] #: etag wrapped in quotes checksum = etag = object_summary['ETag'].replace('"', '') if 'Size' in object_summary: size = object_summary['Size'] elif 'ContentLength' in object_summary: size = object_summary['ContentLength'] else: raise Exception('No size key in response') #acl = object_summary.Acl() acl = None meta_data = object_summary.get('Metadata', {}) content_disposition = object_summary.get('ContentDisposition', None) content_type = object_summary.get('ContentType', None) cache_control = object_summary.get('CacheControl', None) #meta_data = object_summary.meta.data.get('Metadata', {}) #content_disposition = object_summary.meta.data.get( # 'ContentDisposition', None) #content_type = object_summary.meta.data.get('ContentType', None) #cache_control = object_summary.meta.data.get('CacheControl', None) modified_at = object_summary.get('LastModified', None) created_at = None expires_at = None # TODO: FEATURE: Delete at / expires at except Exception as err: raise CloudStorageError('Invalid object summary %s' % (str(err), )) return Blob(name=name, checksum=checksum, etag=etag, size=size, container=container, driver=self, acl=acl, meta_data=meta_data, content_disposition=content_disposition, content_type=content_type, cache_control=cache_control, created_at=created_at, modified_at=modified_at, expires_at=expires_at)
async def _check_driver_valid(conf): klass = conf['klass'] driver = klass(**conf) try: async for container in driver.get_containers(): break except Exception as err: raise CloudStorageError("Error Connecting to driver %s : %s" % (conf['driver'], str(err)))
async def get_blobs(self, container: Container): try: async with self.s3() as s3: resp = await s3.list_objects_v2(Bucket=container.name) except ClientError as err: raise CloudStorageError('%s: %s' % (err.response['Error']['Code'], err.response['Error']['Message'])) for obj_summary in resp.get('Contents', []): blob = self._make_blob(container, obj_summary) yield blob
async def _object_summary(self, bucket_name: str, blob_name: str) -> Dict: try: async with self.s3() as s3: resp = await s3.head_object(Bucket=bucket_name, Key=blob_name) except ClientError as err: error_code = int(err.response['Error']['Code']) if error_code == 404: raise NotFoundError(messages.BLOB_NOT_FOUND % (blob_name, bucket_name)) raise CloudStorageError('%s: %s' % (err.response['Error']['Code'], err.response['Error']['Message'])) resp.pop('ResponseMetadata') return resp
async def delete_container(self, container: Container) -> None: try: async for _ in self.get_blobs(container): raise IsNotEmptyError(messages.CONTAINER_NOT_EMPTY % container.name) path = self._get_folder_path(container, validate=True) except NotFoundError as err: return False with lock_local_file(path): try: shutil.rmtree(path) except shutil.Error as err: raise CloudStorageError(err.strerror) return True
async def upload(filepath: FileLike, destfilename: str = 'random', destpath: str = '', container_name=None, store_name=None, **kwargs): """ destfilename: auto - name generated from filename random - random uuid <provided> - user rovided name """ _check_storage_enabled() if not _m['storage_enabled']: raise CloudStorageError(STORAGE_NOT_ENABLED) container = await _get_container(container_name, store_name, **kwargs) blob = await container.upload_blob(filepath, destfilename, destpath) return blob
async def download_blob(self, blob: Blob, destination: FileLike) -> None: blob_path = self._get_file_path(blob) if isinstance(destination, str): base_name = os.path.basename(destination) if not base_name and not os.path.exists(destination): raise CloudStorageError('Path %s does not exist.' % destination) if not base_name: file_path = os.path.join(destination, blob.name) else: file_path = destination shutil.copy(blob_path, file_path) else: with open(blob_path, 'rb') as blob_file: for data in read_in_chunks(blob_file): destination.write(data)
async def create_container(self, container_name: str, acl: str = None, meta_data: MetaData = None) -> Container: is_valid_bucket_name(container_name, strict=True) if acl: logger.info(messages.OPTION_NOT_SUPPORTED, 'acl') if meta_data: logger.info(messages.OPTION_NOT_SUPPORTED, 'meta_data') full_path = os.path.join(self.base_path, container_name) self._make_path(full_path, ignore_existing=True) try: with lock_local_file(full_path): self._make_path(full_path, ignore_existing=True) except FileNotFoundError: raise CloudStorageError(messages.CONTAINER_NAME_INVALID) return self._make_container(container_name)
def _make_path(path: str, ignore_existing: bool = True) -> None: """Create a folder. :param path: Folder path to create. :type path: str :param ignore_existing: If True, ignore existing folder. :type ignore_existing: bool :return: NoneType :rtype: None :raises CloudStorageError: If folder exists and `ignore_existing` is False. """ try: os.makedirs(path) except OSError: logger.debug(messages.CONTAINER_EXISTS, path) exp = sys.exc_info()[1] if exp.errno == errno.EEXIST and not ignore_existing: raise CloudStorageError(exp.strerror)
def get_driver(driver: DriverName) -> Driver: """Get driver class by DriverName enumeration member. .. code-block:: python >>> from aiocloudstorage import DriverName, get_driver >>> driver_cls = get_driver(DriverName.LOCAL) <class 'aiocloudstorage.drivers.local.LocalDriver'> :param driver: DriverName member. :type driver: :class:`.DriverName` :return: DriverName driver class. :rtype: :class:`.AzureStorageDriver`, :class:`.CloudFilesDriver`, :class:`.GoogleStorageDriver`, :class:`.S3Driver`, :class:`.LocalDriver`, :class:`.MinioDriver` """ if driver in _DRIVER_IMPORTS: mod_name, driver_name = _DRIVER_IMPORTS[driver] _mod = __import__(mod_name, globals(), locals(), [driver_name]) return getattr(_mod, driver_name) raise CloudStorageError("Driver '%s' does not exist." % driver)
def lock_local_file(path: str) -> filelock.FileLock: """Platform dependent file lock. :param path: File or directory path to lock. :type path: str :yield: File lock context manager. :yield type: :class:`filelock.FileLock` :raise CloudStorageError: If lock could not be acquired. """ lock = filelock.FileLock(path + '.lock') try: lock.acquire(timeout=0.1) except filelock.Timeout: raise CloudStorageError('Lock timeout') yield lock if lock.is_locked: lock.release() os.remove(lock.lock_file)
def _check_storage_enabled(): if not _m['storage_enabled']: raise CloudStorageError(STORAGE_NOT_ENABLED)