def _ensure_connection(self): """Defines token, endpoint and temp_url_key if they are not already defined :raises ProviderError: If no temp url key is available """ # Must have a temp url key for download and upload # Currently You must have one for everything however if not self.token or not self.endpoint: data = yield from self._get_token() self.token = data['access']['token']['id'] if self.use_public: self.public_endpoint, _ = self._extract_endpoints(data) self.endpoint = self.public_endpoint else: self.public_endpoint, self.endpoint = self._extract_endpoints( data) if not self.temp_url_key: resp = yield from self.make_request('HEAD', self.endpoint, expects=(204, )) try: self.temp_url_key = resp.headers[ 'X-Account-Meta-Temp-URL-Key'].encode() except KeyError: raise exceptions.ProviderError('No temp url key is available', code=503)
async def _ensure_connection(self): """Defines token, endpoint and temp_url_key if they are not already defined :raises ProviderError: If no temp url key is available """ # Must have a temp url key for download and upload # Currently You must have one for everything however self.metrics.add('ensure_connection.has_token_and_endpoint', True) self.metrics.add('ensure_connection.has_temp_url_key', True) if not self.token or not self.endpoint: self.metrics.add('ensure_connection.has_token_and_endpoint', False) data = await self._get_token() self.token = data['access']['token']['id'] self.metrics.add('ensure_connection.use_public', True if self.use_public else False) if self.use_public: self.public_endpoint, _ = self._extract_endpoints(data) self.endpoint = self.public_endpoint else: self.public_endpoint, self.endpoint = self._extract_endpoints( data) if not self.temp_url_key: self.metrics.add('ensure_connection.has_temp_url_key', False) async with self.request('HEAD', self.endpoint, expects=(204, )) as resp: try: self.temp_url_key = resp.headers[ 'X-Account-Meta-Temp-URL-Key'].encode() except KeyError: raise exceptions.ProviderError( 'No temp url key is available', code=503)
async def prepare(self): """Builds an MFR provider instance, to which it passes the the ``url`` query parameter. From that, the file metadata is extracted. Also builds cached waterbutler providers. """ if self.request.method == 'OPTIONS': return try: self.url = self.request.query_arguments['url'][0].decode('utf-8') except KeyError: raise exceptions.ProviderError( '"url" is a required argument.', provider=settings.PROVIDER_NAME, code=400, ) self.provider = utils.make_provider(settings.PROVIDER_NAME, self.request, self.url) self.metadata = await self.provider.metadata() self.extension_metrics.add('ext', self.metadata.ext) self.cache_provider = waterbutler.core.utils.make_provider( settings.CACHE_PROVIDER_NAME, {}, # User information which can be left blank settings.CACHE_PROVIDER_CREDENTIALS, settings.CACHE_PROVIDER_SETTINGS) self.local_cache_provider = waterbutler.core.utils.make_provider( 'filesystem', {}, {}, settings.LOCAL_CACHE_PROVIDER_SETTINGS) self.source_file_id = uuid.uuid4() self.add_header('X-MFR-REQUEST-ID', str(uuid.uuid4()))
async def _get_file_upload_url(self, article_id, file_id): """Request an upload url and partitioning spec from Figshare. See: https://docs.figshare.com/api/file_uploader/ :param str article_id: the id of the parent article :param str file_id: the name of the file :returns (str, list): the upload url and the parts specification """ # TODO: retry with backoff resp = await self.make_request( 'GET', self.build_url(False, 'articles', article_id, 'files', file_id), expects=(200, 404), ) if resp.status == 404: await resp.release() raise exceptions.ProviderError( 'Could not get upload_url. File creation may have taken more ' 'than {} seconds to finish.'.format( str(settings.FILE_CREATE_WAIT))) upload_json = await resp.json() upload_url = upload_json['upload_url'] parts_resp = await self.make_request( 'GET', upload_url, expects=(200, ), ) parts_json = await parts_resp.json() return upload_url, parts_json['parts'] # str, list
def create_folder(self, *args, **kwargs): """Create a folder in the current provider returns True if the folder was created; False if it already existed :rtype: :class:`waterbutler.core.metadata.BaseFolderMetadata` :raises: :class:`waterbutler.core.exceptions.FolderCreationError` """ raise exceptions.ProviderError({'message': 'Folder creation not supported.'}, code=405)
def create_folder(self, path, **kwargs): """Create a folder in the current provider at `path`. Returns a `BaseFolderMetadata` object if successful. May throw a 409 Conflict if a directory with the same name already exists. :param str path: user-supplied path to create. must be a directory. :param boolean precheck_folder: flag to check for folder before attempting create :rtype: :class:`waterbutler.core.metadata.BaseFolderMetadata` :raises: :class:`waterbutler.core.exceptions.FolderCreationError` """ raise exceptions.ProviderError({'message': 'Folder creation not supported.'}, code=405)
def _assert_contains_article(self, article_id): articles_json = yield from self._list_articles() try: return next(each for each in articles_json if each['id'] == int(article_id)) except StopIteration: raise exceptions.ProviderError( 'Article {0} not found'.format(article_id), code=http.client.NOT_FOUND, )
async def create_folder(self, path: wb_path.WaterButlerPath, **kwargs) -> wb_metadata.BaseFolderMetadata: """Create a folder in the current provider at `path`. Returns a `BaseFolderMetadata` object if successful. May throw a 409 Conflict if a directory with the same name already exists. :param path: ( :class:`.WaterButlerPath` ) User-supplied path to create. Must be a directory. :rtype: :class:`.BaseFileMetadata` :raises: :class:`.CreateFolderError` """ raise exceptions.ProviderError({'message': 'Folder creation not supported.'}, code=405)
def __new__(cls, auth, credentials, settings): if settings['container_type'] == 'project': return FigshareProjectProvider( auth, credentials, dict(settings, project_id=settings['container_id'])) if settings['container_type'] in ('article', 'fileset'): return FigshareArticleProvider( auth, credentials, dict(settings, article_id=settings['container_id'])) raise exceptions.ProviderError('Invalid "container_type" {0}'.format( settings['container_type']))
def __init__(self, auth, credentials, settings): super().__init__(auth, credentials, settings) self.token = self.credentials['token'] self.container_type = self.settings['container_type'] if self.container_type not in self.VALID_CONTAINER_TYPES: raise exceptions.ProviderError('{} is not a valid container type.'.format(self.container_type)) if self.container_type == 'fileset': self.container_type = 'article' self.container_id = self.settings['container_id'] self.metrics.add('container', { 'given_type': self.settings['container_type'], 'actual_type': self.container_type, })
def __new__(cls, auth, credentials, settings): if settings['container_type'] == 'project': return FigshareProjectProvider( auth, credentials, dict(settings, container_id=settings['container_id']) ) if settings['container_type'] in pd_settings.ARTICLE_CONTAINER_TYPES: return FigshareArticleProvider( auth, credentials, dict(settings, container_id=settings['container_id']) ) raise exceptions.ProviderError( 'Invalid "container_type" {0}'.format(settings['container_type']) )
def _fetch_tree(self, sha, recursive=False): url = furl.furl(self.build_repo_url('git', 'trees', sha)) if recursive: url.args.update({'recursive': 1}) resp = yield from self.make_request('GET', url.url, expects=(200, ), throws=exceptions.MetadataError) tree = yield from resp.json() if tree['truncated']: raise exceptions.ProviderError(( 'Some folder operations on large GitHub repositories cannot be supported without' ' data loss. To carry out this operation, please perform it in a local git' ' repository, then push to the target repository on GitHub.'), code=501) return tree
def revisions(self, path, **kwargs): raise exceptions.ProviderError( {'message': 'figshare does not support file revisions.'}, code=405)