def test_metadata(self, provider, folder_object_metadata, folder_list_metadata): path = WaterButlerPath('/', _ids=(provider.folder, )) list_url = provider.build_url('folders', provider.folder, 'items', fields='id,name,size,modified_at,etag') aiohttpretty.register_json_uri('GET', list_url, body=folder_list_metadata) result = yield from provider.metadata(path) expected = [] for x in folder_list_metadata['entries']: if x['type'] == 'file': expected.append(BoxFileMetadata(x, path.child(x['name']))) else: expected.append( BoxFolderMetadata(x, path.child(x['name'], folder=True))) assert result == expected
def validate_path(self, path, **kwargs): split = path.rstrip('/').split('/')[1:] wbpath = WaterButlerPath('/', _ids=(self.settings['project_id'], ), folder=True) if split: name_or_id = split.pop(0) try: article = yield from self._assert_contains_article(name_or_id) except ValueError: return wbpath.child(name_or_id, folder=False) except exceptions.ProviderError as e: if e.code not in (404, 401): raise return wbpath.child(name_or_id, folder=False) wbpath = wbpath.child(article['title'], article['id'], folder=True) if split: provider = yield from self._make_article_provider( article['id'], check_parent=False) try: return (yield from provider.validate_path('/'.join([''] + split), parent=wbpath)) except exceptions.ProviderError as e: if e.code not in (404, 401): raise return wbpath.child(split.pop(0), folder=False) return wbpath
async def test_metadata(self, provider, root_provider_fixtures): path = WaterButlerPath('/', _ids=(provider.folder, )) list_url = provider.build_url( 'folders', provider.folder, 'items', fields='id,name,size,modified_at,etag,total_count', offset=0, limit=1000) list_metadata = root_provider_fixtures['folder_list_metadata'] aiohttpretty.register_json_uri('GET', list_url, body=list_metadata) result = await provider.metadata(path) expected = [] for x in list_metadata['entries']: if x['type'] == 'file': expected.append(BoxFileMetadata(x, path.child(x['name']))) else: expected.append( BoxFolderMetadata(x, path.child(x['name'], folder=True))) assert result == expected
def validate_path(self, path, **kwargs): split = path.rstrip('/').split('/')[1:] wbpath = WaterButlerPath('/', _ids=(self.settings['project_id'], ), folder=True) if split: name_or_id = split.pop(0) try: article = yield from self._assert_contains_article(name_or_id) except ValueError: return wbpath.child(name_or_id, folder=False) except exceptions.ProviderError as e: if e.code not in (404, 401): raise return wbpath.child(name_or_id, folder=False) wbpath = wbpath.child(article['title'], article['id'], folder=True) if split: provider = yield from self._make_article_provider(article['id'], check_parent=False) try: return (yield from provider.validate_path('/'.join([''] + split), parent=wbpath)) except exceptions.ProviderError as e: if e.code not in (404, 401): raise return wbpath.child(split.pop(0), folder=False) return wbpath
async def revalidate_path(self, base: WaterButlerPath, path: str, folder: bool=None) -> WaterButlerPath: # TODO Research the search api endpoint async with self.request( 'GET', self.build_url('folders', base.identifier, 'items', fields='id,name,type', limit=1000), expects=(200,), throws=exceptions.ProviderError ) as resp: data = await resp.json() lower_name = path.lower() try: item = next( x for x in data['entries'] if x['name'].lower() == lower_name and ( folder is None or (x['type'] == 'folder') == folder ) ) name = path # Use path over x['name'] because of casing issues _id = item['id'] folder = item['type'] == 'folder' except StopIteration: _id = None name = path return base.child(name, _id=_id, folder=folder)
async def test_intra_move_folder(self, provider, intra_fixtures, root_provider_fixtures): item = intra_fixtures['intra_folder_metadata'] list_metadata = root_provider_fixtures['folder_list_metadata'] src_path = WaterButlerPath('/name/', _ids=(provider, item['id'])) dest_path = WaterButlerPath('/charmander/name/', _ids=(provider, item['id'])) file_url = provider.build_url('folders', src_path.identifier) list_url = provider.build_url( 'folders', item['id'], 'items', fields='id,name,size,modified_at,etag,total_count', offset=0, limit=1000) aiohttpretty.register_json_uri('PUT', file_url, body=item) aiohttpretty.register_json_uri('GET', list_url, body=list_metadata) expected_folder = BoxFolderMetadata(item, dest_path) expected_folder._children = [] for child_item in list_metadata['entries']: child_path = dest_path.child( child_item['name'], folder=(child_item['type'] == 'folder')) serialized_child = provider._serialize_item(child_item, child_path) expected_folder._children.append(serialized_child) expected = (expected_folder, True) result = await provider.intra_move(provider, src_path, dest_path) assert result == expected
async def _folder_metadata( self, path: WaterButlerPath, raw: bool = False) -> List[Union[BaseGoogleDriveMetadata, dict]]: query = self._build_query(path.identifier) built_url = self.build_url('files', q=query, alt='json', maxResults=1000) full_resp = [] while built_url: async with self.request( 'GET', built_url, expects=(200, ), throws=exceptions.MetadataError, ) as resp: resp_json = await resp.json() full_resp.extend([ self._serialize_item(path.child(item['title']), item, raw=raw) for item in resp_json['items'] ]) built_url = resp_json.get('nextLink', None) return full_resp
async def revalidate_path(self, base: WaterButlerPath, name: str, folder: bool = None) -> WaterButlerPath: # TODO Redo the logic here folders names ending in /s # Will probably break if '/' in name.lstrip('/') and '%' not in name: # DAZ and MnC may pass unquoted names which break # if the name contains a / in it name = parse.quote(name.lstrip('/'), safe='') if not name.endswith('/') and folder: name += '/' parts = await self._resolve_path_to_ids(name, start_at=[{ 'title': base.name, 'mimeType': 'folder', 'id': base.identifier, }]) _id, name, mime = list( map(parts[-1].__getitem__, ('id', 'title', 'mimeType'))) return base.child(name, _id=_id, folder='folder' in mime)
async def revalidate_path(self, base: wb_path.WaterButlerPath, path: str, folder: bool=None) -> wb_path.WaterButlerPath: # TODO Research the search api endpoint async with self.request( 'GET', self.build_url('folders', base.identifier, 'items', fields='id,name,type', limit=1000), expects=(200,), throws=exceptions.ProviderError ) as resp: data = await resp.json() lower_name = path.lower() try: item = next( x for x in data['entries'] if x['name'].lower() == lower_name and ( folder is None or (x['type'] == 'folder') == folder ) ) name = path # Use path over x['name'] because of casing issues _id = item['id'] folder = item['type'] == 'folder' except StopIteration: _id = None name = path return base.child(name, _id=_id, folder=folder)
async def test_intra_move_folder_replace(self, provider, intra_fixtures, root_provider_fixtures): item = intra_fixtures['intra_folder_metadata'] list_metadata = root_provider_fixtures['folder_list_metadata'] src_path = WaterButlerPath('/name/', _ids=(provider, item['id'])) dest_path = WaterButlerPath('/charmander/name/', _ids=(provider, item['id'], item['id'])) file_url = provider.build_url('folders', src_path.identifier) delete_url = provider.build_url('folders', dest_path.identifier, recursive=True) list_url = provider.build_url('folders', item['id'], 'items', fields='id,name,size,modified_at,etag,total_count', offset=0, limit=1000) aiohttpretty.register_json_uri('PUT', file_url, body=item) aiohttpretty.register_uri('DELETE', delete_url, status=204) aiohttpretty.register_json_uri('GET', list_url, body=list_metadata) expected_folder = BoxFolderMetadata(item, dest_path) expected_folder._children = [] for child_item in list_metadata['entries']: child_path = dest_path.child(child_item['name'], folder=(child_item['type'] == 'folder')) serialized_child = provider._serialize_item(child_item, child_path) expected_folder._children.append(serialized_child) expected = (expected_folder, False) result = await provider.intra_move(provider, src_path, dest_path) assert result == expected assert aiohttpretty.has_call(method='DELETE', uri=delete_url)
async def test_intra_copy_folder(self, provider, intra_fixtures, root_provider_fixtures): item = intra_fixtures['intra_folder_metadata'] list_metadata = root_provider_fixtures['folder_list_metadata'] src_path = WaterButlerPath('/name/', _ids=(provider, item['id'])) dest_path = WaterButlerPath('/charmander/name/', _ids=(provider, item['id'])) file_url = provider.build_url('folders', src_path.identifier, 'copy') list_url = provider.build_url('folders', item['id'], 'items', fields='id,name,size,modified_at,etag,total_count', offset=0, limit=1000) aiohttpretty.register_json_uri('GET', list_url, body=list_metadata) aiohttpretty.register_json_uri('POST', file_url, body=item) expected_folder = BoxFolderMetadata(item, dest_path) expected_folder._children = [] for child_item in list_metadata['entries']: child_path = dest_path.child(child_item['name'], folder=(child_item['type'] == 'folder')) serialized_child = provider._serialize_item(child_item, child_path) expected_folder._children.append(serialized_child) expected = (expected_folder, True) result = await provider.intra_copy(provider, src_path, dest_path) assert result == expected
def test_metadata(self, provider, folder_object_metadata, folder_list_metadata): path = WaterButlerPath("/", _ids=(provider.folder,)) list_url = provider.build_url("folders", provider.folder, "items", fields="id,name,size,modified_at,etag") aiohttpretty.register_json_uri("GET", list_url, body=folder_list_metadata) result = yield from provider.metadata(path) expected = [] for x in folder_list_metadata["entries"]: if x["type"] == "file": expected.append(BoxFileMetadata(x, path.child(x["name"]))) else: expected.append(BoxFolderMetadata(x, path.child(x["name"]))) assert result == expected
def test_metadata(self, provider, folder_object_metadata, folder_list_metadata): path = WaterButlerPath('/', _ids=(provider.folder, )) list_url = provider.build_url('folders', provider.folder, 'items', fields='id,name,size,modified_at,etag') aiohttpretty.register_json_uri('GET', list_url, body=folder_list_metadata) result = yield from provider.metadata(path) expected = [] for x in folder_list_metadata['entries']: if x['type'] == 'file': expected.append(BoxFileMetadata(x, path.child(x['name']))) else: expected.append(BoxFolderMetadata(x, path.child(x['name']))) assert result == expected
async def test_metadata(self, provider, root_provider_fixtures): path = WaterButlerPath('/', _ids=(provider.folder, )) list_url = provider.build_url('folders', provider.folder, 'items', fields='id,name,size,modified_at,etag,total_count', offset=0, limit=1000) list_metadata = root_provider_fixtures['folder_list_metadata'] aiohttpretty.register_json_uri('GET', list_url, body=list_metadata) result = await provider.metadata(path) expected = [] for x in list_metadata['entries']: if x['type'] == 'file': expected.append(BoxFileMetadata(x, path.child(x['name']))) else: expected.append(BoxFolderMetadata(x, path.child(x['name'], folder=True))) assert result == expected
async def revalidate_path(self, base: wb_path.WaterButlerPath, path: str, folder: bool=False) -> wb_path.WaterButlerPath: """Take a path and a base path and build a WaterButlerPath representing `/base/path`. For id-based providers, this will need to lookup the id of the new child object. :param base: ( :class:`.WaterButlerPath` ) The base folder to look under :param path: ( :class:`str`) the path of a child of `base`, relative to `base` :param folder: ( :class:`bool` )whether the returned WaterButlerPath should represent a folder :rtype: :class:`.WaterButlerPath` """ return base.child(path, folder=folder)
async def _get_folder_meta( self, path: WaterButlerPath, raw: bool = False, folder: bool = False) -> Union[dict, List[BoxFolderMetadata]]: if folder: response = await self.make_request( 'GET', self.build_url('folders', path.identifier), expects=(200, ), throws=exceptions.MetadataError, ) data = await response.json() return data if raw else self._serialize_item(data, path) # Box maximum limit is 1000 page_count, page_total, limit = 0, None, 1000 full_resp = {} if raw else [] # type: ignore while page_total is None or page_count < page_total: url = self.build_url( 'folders', path.identifier, 'items', fields='id,name,size,modified_at,etag,total_count', offset=(page_count * limit), limit=limit) response = await self.make_request( 'GET', url, expects=(200, ), throws=exceptions.MetadataError, ) resp_json = await response.json() if raw: full_resp.update(resp_json) # type: ignore else: full_resp.extend([ # type: ignore self._serialize_item( each, path.child(each['name'], folder=(each['type'] == 'folder'))) for each in resp_json['entries'] ]) page_count += 1 if page_total is None: page_total = ( (resp_json['total_count'] - 1) // limit) + 1 # ceiling div self.metrics.add('metadata.folder.pages', page_total) return full_resp
async def _folder_metadata(self, path: WaterButlerPath, raw: bool=False) -> List[Union[BaseGoogleDriveMetadata, dict]]: query = self._build_query(path.identifier) built_url = self.build_url('files', q=query, alt='json', maxResults=1000) full_resp = [] while built_url: async with self.request( 'GET', built_url, expects=(200, ), throws=exceptions.MetadataError, ) as resp: resp_json = await resp.json() full_resp.extend([ self._serialize_item(path.child(item['title']), item, raw=raw) for item in resp_json['items'] ]) built_url = resp_json.get('nextLink', None) return full_resp
async def revalidate_path(self, base: WaterButlerPath, name: str, folder: bool=None) -> WaterButlerPath: # TODO Redo the logic here folders names ending in /s # Will probably break if '/' in name.lstrip('/') and '%' not in name: # DAZ and MnC may pass unquoted names which break # if the name contains a / in it name = parse.quote(name.lstrip('/'), safe='') if not name.endswith('/') and folder: name += '/' parts = await self._resolve_path_to_ids(name, start_at=[{ 'title': base.name, 'mimeType': 'folder', 'id': base.identifier, }]) _id, name, mime = list(map(parts[-1].__getitem__, ('id', 'title', 'mimeType'))) return base.child(name, _id=_id, folder='folder' in mime)
async def test_intra_copy_folder_replace(self, provider, intra_fixtures, root_provider_fixtures): item = intra_fixtures['intra_folder_metadata'] list_metadata = root_provider_fixtures['folder_list_metadata'] src_path = WaterButlerPath('/name/', _ids=(provider, item['id'])) dest_path = WaterButlerPath('/charmander/name/', _ids=(provider, item['id'], item['id'])) file_url = provider.build_url('folders', src_path.identifier, 'copy') delete_url = provider.build_url('folders', dest_path.identifier, recursive=True) list_url = provider.build_url( 'folders', item['id'], 'items', fields='id,name,size,modified_at,etag,total_count', offset=0, limit=1000) aiohttpretty.register_json_uri('GET', list_url, body=list_metadata) aiohttpretty.register_uri('DELETE', delete_url, status=204) aiohttpretty.register_json_uri('POST', file_url, body=item) expected_folder = BoxFolderMetadata(item, dest_path) expected_folder._children = [] for child_item in list_metadata['entries']: child_path = dest_path.child( child_item['name'], folder=(child_item['type'] == 'folder')) serialized_child = provider._serialize_item(child_item, child_path) expected_folder._children.append(serialized_child) expected = (expected_folder, False) result = await provider.intra_copy(provider, src_path, dest_path) assert result == expected assert aiohttpretty.has_call(method='DELETE', uri=delete_url)
async def _get_folder_meta(self, path: WaterButlerPath, raw: bool=False, folder: bool=False) -> Union[dict, List[BoxFolderMetadata]]: if folder: async with self.request( 'GET', self.build_url('folders', path.identifier), expects=(200, ), throws=exceptions.MetadataError, ) as resp: data = await resp.json() return data if raw else self._serialize_item(data, path) # Box maximum limit is 1000 page_count, page_total, limit = 0, None, 1000 full_resp = {} if raw else [] # type: ignore while page_total is None or page_count < page_total: url = self.build_url('folders', path.identifier, 'items', fields='id,name,size,modified_at,etag,total_count', offset=(page_count * limit), limit=limit) async with self.request('GET', url, expects=(200, ), throws=exceptions.MetadataError) as response: resp_json = await response.json() if raw: full_resp.update(resp_json) # type: ignore else: full_resp.extend([ # type: ignore self._serialize_item( each, path.child(each['name'], folder=(each['type'] == 'folder')) ) for each in resp_json['entries'] ]) page_count += 1 if page_total is None: page_total = ((resp_json['total_count'] - 1) // limit) + 1 # ceiling div self.metrics.add('metadata.folder.pages', page_total) return full_resp
def path_from_metadata(self, parent_path: wb_path.WaterButlerPath, meta_data: wb_metadata.BaseMetadata) -> wb_path.WaterButlerPath: return parent_path.child(meta_data.name, _id=meta_data.path.strip('/'), folder=meta_data.is_folder)
def test_child(self): path = WaterButlerPath('/this/is/a/long/') assert path.name == 'long' assert path.child('path').name == 'path'