async def test_ensure_connection(self, provider, auth_json, mock_temp_key): token_url = cloud_settings.AUTH_URL aiohttpretty.register_json_uri('POST', token_url, body=auth_json) await provider._ensure_connection() assert aiohttpretty.has_call(method='POST', uri=token_url)
async def test_upload_new(self, monkeypatch, provider_and_mock_one, file_stream, upload_response, upload_path, mock_time): self.patch_uuid(monkeypatch) url = 'https://waterbutler.io/{}/children/'.format(upload_path.parent.identifier) aiohttpretty.register_json_uri('POST', url, status=201, body=upload_response) provider, inner_provider = provider_and_mock_one inner_provider.metadata = utils.MockCoroutine(return_value=utils.MockFileMetadata()) res, created = await provider.upload(file_stream, upload_path) assert created is True assert res.name == '[TEST]' assert res.extra['version'] == 8 assert res.provider == 'osfstorage' assert res.extra['downloads'] == 0 assert res.extra['checkout'] is None assert upload_path.identifier_path == res.path inner_provider.delete.assert_called_once_with(WaterButlerPath('/patched_path')) expected_path = WaterButlerPath('/' + file_stream.writers['sha256'].hexdigest) inner_provider.metadata.assert_called_once_with(expected_path) inner_provider.upload.assert_called_once_with(file_stream, WaterButlerPath('/patched_path'), check_created=False, fetch_metadata=False)
def test_upload_existing(monkeypatch, provider_and_mock, file_stream): mock_move = asyncio.Future() provider, inner_provider = provider_and_mock basepath = 'waterbutler.providers.osfstorage.provider.{}' aiohttpretty.register_json_uri('POST', 'https://waterbutler.io', status=200, body={'downloads': 10, 'version': 8}) mock_move.set_result({}) inner_provider.metadata.side_effect = exceptions.ProviderError('Boom!') inner_provider.move.return_value = mock_move monkeypatch.setattr(basepath.format('os.rename'), lambda *_: None) monkeypatch.setattr(basepath.format('settings.RUN_TASKS'), False) monkeypatch.setattr(basepath.format('uuid.uuid4'), lambda: 'uniquepath') res, created = yield from provider.upload(file_stream, '/foopath') assert created is False assert res['name'] == 'foopath' assert res['extra']['version'] == 8 assert res['provider'] == 'osfstorage' assert res['extra']['downloads'] == 10 inner_provider.upload.assert_called_once_with(file_stream, '/uniquepath', check_created=False, fetch_metadata=False) inner_provider.metadata.assert_called_once_with('/' + file_stream.writers['sha256'].hexdigest) inner_provider.move.assert_called_once_with(inner_provider, {'path': '/uniquepath'}, {'path': '/' + file_stream.writers['sha256'].hexdigest})
def test_upload_and_tasks(monkeypatch, provider_and_mock, file_stream, credentials, settings): mock_parity = mock.Mock() mock_backup = mock.Mock() mock_move = asyncio.Future() provider, inner_provider = provider_and_mock basepath = 'waterbutler.providers.osfstorage.provider.{}' aiohttpretty.register_json_uri('POST', 'https://waterbutler.io', status=201, body={'version': 42, 'downloads': 30}) mock_move.set_result({}) inner_provider.metadata.side_effect = exceptions.ProviderError('Boom!') inner_provider.move.return_value = mock_move monkeypatch.setattr(basepath.format('backup.main'), mock_backup) monkeypatch.setattr(basepath.format('parity.main'), mock_parity) monkeypatch.setattr(basepath.format('settings.RUN_TASKS'), True) monkeypatch.setattr(basepath.format('os.rename'), lambda *_: None) monkeypatch.setattr(basepath.format('uuid.uuid4'), lambda: 'uniquepath') res, created = yield from provider.upload(file_stream, '/foopath') assert created is True assert res['name'] == 'foopath' assert res['extra']['version'] == 42 assert res['provider'] == 'osfstorage' assert res['extra']['downloads'] == 30 inner_provider.upload.assert_called_once_with(file_stream, '/uniquepath', check_created=False, fetch_metadata=False) complete_path = os.path.join(FILE_PATH_COMPLETE, file_stream.writers['sha256'].hexdigest) mock_parity.assert_called_once_with(complete_path, credentials['parity'], settings['parity']) mock_backup.assert_called_once_with(complete_path, 42, 'https://waterbutler.io', credentials['archive'], settings['parity']) inner_provider.metadata.assert_called_once_with('/' + file_stream.writers['sha256'].hexdigest) inner_provider.move.assert_called_once_with(inner_provider, {'path': '/uniquepath'}, {'path': '/' + file_stream.writers['sha256'].hexdigest})
def test_provider_metadata(monkeypatch, provider): items = [ { 'name': 'foo', 'path': '/foo', 'kind': 'file', 'version': 10, 'downloads': 1, }, { 'name': 'bar', 'path': '/bar', 'kind': 'file', 'version': 10, 'downloads': 1, }, { 'name': 'baz', 'path': '/baz', 'kind': 'folder' } ] aiohttpretty.register_json_uri('GET', 'https://waterbutler.io/metadata', status=200, body=items) res = yield from provider.metadata(path='/unrelatedpath') assert isinstance(res, list) for item in res: assert isinstance(item, dict) assert item['name'] is not None assert item['path'] is not None assert item['provider'] == 'osfstorage' aiohttpretty.has_call(method='GET', uri='https://waterbutler.io', parmas={'path': 'unrelatedpath'})
def test_upload_create_nested(self, provider, file_stream): path = '/ed/sullivan/show.mp3' upload_id = '7' parts = path.split('/') urls, bodies = [], [] for idx, part in enumerate(parts[:-1]): query = provider._build_query(idx or provider.folder['id'], title=parts[idx + 1]) if part == 'sullivan': body = {'items': []} else: body = fixtures.generate_list(idx + 1) url = provider.build_url('files', q=query, alt='json') aiohttpretty.register_json_uri('GET', url, body=body) urls.append(url) bodies.append(body) item = fixtures.list_file['items'][0] start_upload_url = provider._build_upload_url('files', uploadType='resumable') finish_upload_url = provider._build_upload_url('files', uploadType='resumable', upload_id=upload_id) aiohttpretty.register_uri('POST', start_upload_url, headers={'LOCATION': 'http://waterbutler.io?upload_id={}'.format(upload_id)}) aiohttpretty.register_json_uri('PUT', finish_upload_url, body=item) result, created = yield from provider.upload(file_stream, path) assert aiohttpretty.has_call(method='POST', uri=start_upload_url) assert aiohttpretty.has_call(method='PUT', uri=finish_upload_url) assert created is True expected = GoogleDriveFileMetadata(item, '/ed/sullivan').serialized() assert result == expected
async def test_register_json_uri(self): url = 'http://example.com/' desired_response = {'test_key' : 'test_value'} aiohttpretty.register_json_uri('GET', url, body=desired_response) options = aiohttpretty.registry[('GET', 'http://example.com/')] assert json.loads(options['body'].decode('utf-8')) == desired_response
async def test_article_file_contents(self, article_provider, folder_article_metadata, folder_file_metadata): root_parts = article_provider.root_path_parts article_id = str(folder_article_metadata['id']) article_name = folder_article_metadata['title'] file_id = str(folder_file_metadata['id']) file_name = folder_file_metadata['name'] folder_article_metadata_url = article_provider.build_url(False, *root_parts) file_metadata_url = article_provider.build_url(False, *root_parts, 'files', file_id) print("%%%%%%% HERH?: {}".format(file_metadata_url)) aiohttpretty.register_json_uri('GET', folder_article_metadata_url, body=folder_article_metadata) aiohttpretty.register_json_uri('GET', file_metadata_url, body=folder_file_metadata) path = await article_provider.validate_path('/{}'.format(file_id)) result = await article_provider.metadata(path) assert aiohttpretty.has_call(method='GET', uri=folder_article_metadata_url) assert aiohttpretty.has_call(method='GET', uri=file_metadata_url) expected = metadata.FigshareFileMetadata(folder_article_metadata, folder_file_metadata) assert result == expected assert str(result.id) == file_id assert result.name == file_name assert result.path == '/{}/{}'.format(article_id, file_id) assert result.materialized_path == '/{}/{}'.format(article_name, file_name) assert result.article_name == article_name assert result.size == folder_file_metadata['size'] assert result.is_public == (PRIVATE_IDENTIFIER not in folder_article_metadata['url'])
async def test_metadata_no_files(self, provider, empty_native_dataset_metadata): url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id, 'latest'), key=provider.token) aiohttpretty.register_json_uri('GET', url, status=200, body=empty_native_dataset_metadata) path = await provider.validate_path('/') result = await provider.metadata(path, version='latest') assert result == []
async def test_validate_v1_path_file(self, provider, native_dataset_metadata): draft_url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id, 'latest'), key=provider.token) published_url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id, 'latest-published'), key=provider.token) aiohttpretty.register_json_uri('GET', draft_url, status=200, body=native_dataset_metadata) aiohttpretty.register_json_uri('GET', published_url, status=200, body=native_dataset_metadata) path = '/21' try: wb_path_v1 = await provider.validate_v1_path(path) except Exception as exc: pytest.fail(str(exc)) with pytest.raises(exceptions.NotFoundError) as exc: await provider.validate_v1_path(path + '/') assert exc.value.code == client.NOT_FOUND wb_path_v0 = await provider.validate_path(path) assert wb_path_v1 == wb_path_v0
async def test_download(self, provider, native_dataset_metadata): path = '/21' url = provider.build_url(dvs.DOWN_BASE_URL, path, key=provider.token) draft_url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id, 'latest'), key=provider.token) published_url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id, 'latest-published'), key=provider.token) aiohttpretty.register_uri('GET', url, body=b'better', auto_length=True) aiohttpretty.register_json_uri('GET', draft_url, status=200, body=native_dataset_metadata) aiohttpretty.register_json_uri('GET', published_url, status=200, body=native_dataset_metadata) path = await provider.validate_path(path) result = await provider.download(path) content = await result.read() assert content == b'better'
async def test_validate_v1_path(self, provider, path, kind): test_fixtures = fixtures.validate_path default_branch_body = test_fixtures['default_branch'] default_branch = default_branch_body['name'] default_branch_url = provider._build_v1_repo_url('main-branch') aiohttpretty.register_json_uri('GET', default_branch_url, body=default_branch_body) dir_listing_body = test_fixtures['root_dir_listing'] dir_listing_url = provider._build_v1_repo_url('src', default_branch) + '/' aiohttpretty.register_json_uri('GET', dir_listing_url, body=dir_listing_body) try: wb_path_v1 = await provider.validate_v1_path(path) except Exception as exc: pytest.fail(str(exc)) wb_path_v0 = await provider.validate_path(path) assert wb_path_v1 == wb_path_v0 assert wb_path_v1.branch_name == default_branch # TODO: assert commitSha bad_path = path.rstrip('/') if kind == 'folder' else path + '/' with pytest.raises(exceptions.NotFoundError) as exc: await provider.validate_v1_path(bad_path)
def test_metadata_file(self, provider, content_repo_metadata_root_file_txt): path = GitHubPath('/file.txt') url = provider.build_repo_url('contents', path.path) aiohttpretty.register_json_uri('GET', url, body=content_repo_metadata_root_file_txt) result = yield from provider.metadata(str(path)) assert result == GitHubFileContentMetadata(content_repo_metadata_root_file_txt).serialized()
def test_project_upload(self, project_provider, list_project_articles, base_article_metadata, article_metadata, upload_metadata, file_content, file_stream): article_id = str(list_project_articles[0]['id']) list_articles_url = project_provider.build_url('projects', project_provider.project_id, 'articles') article_metadata_url = project_provider.build_url('articles', article_id) article_upload_url = project_provider.build_url('articles', article_id, 'files') create_article_url = project_provider.build_url('articles') add_article_url = project_provider.build_url('projects', project_provider.project_id, 'articles') aiohttpretty.register_json_uri('GET', list_articles_url, body=list_project_articles) aiohttpretty.register_json_uri('GET', article_metadata_url, body=article_metadata) aiohttpretty.register_json_uri('PUT', article_upload_url, body=upload_metadata) aiohttpretty.register_json_uri('POST', create_article_url, body=base_article_metadata) aiohttpretty.register_json_uri('PUT', add_article_url) file_name = 'barricade.gif' path = yield from project_provider.validate_path('/' + file_name) result, created = yield from project_provider.upload(file_stream, path) expected = metadata.FigshareFileMetadata( upload_metadata, parent=base_article_metadata, child=True, ).serialized() assert aiohttpretty.has_call( method='POST', uri=create_article_url, data=json.dumps({ 'title': 'barricade.gif', 'defined_type': 'dataset', }) ) assert aiohttpretty.has_call(method='PUT', uri=article_upload_url) assert aiohttpretty.has_call( method='PUT', uri=add_article_url, data=json.dumps({'article_id': int(article_id)}) ) assert result == expected
def test_upload_existing(self, monkeypatch, provider_and_mock, file_stream): self.patch_tasks(monkeypatch) provider, inner_provider = provider_and_mock path = WaterButlerPath('/foopath', _ids=('Test', 'OtherTest')) url = 'https://waterbutler.io/{}/children/'.format(path.parent.identifier) inner_provider.move.return_value = (utils.MockFileMetadata(), True) inner_provider.metadata.side_effect = exceptions.MetadataError('Boom!', code=404) aiohttpretty.register_json_uri('POST', url, status=200, body={'data': {'downloads': 10, 'version': 8, 'path': '/24601', 'checkout': 'hmoco', 'md5': '1234', 'sha256': '2345'}}) res, created = yield from provider.upload(file_stream, path) assert created is False assert res.name == 'foopath' assert res.path == '/24601' assert res.extra['version'] == 8 assert res.provider == 'osfstorage' assert res.extra['downloads'] == 10 assert res.extra['checkout'] == 'hmoco' inner_provider.metadata.assert_called_once_with(WaterButlerPath('/' + file_stream.writers['sha256'].hexdigest)) inner_provider.upload.assert_called_once_with(file_stream, WaterButlerPath('/uniquepath'), check_created=False, fetch_metadata=False) inner_provider.move.assert_called_once_with(inner_provider, WaterButlerPath('/uniquepath'), WaterButlerPath('/' + file_stream.writers['sha256'].hexdigest))
def test_validate_v1_path_folder(self, provider, search_for_folder_response, actual_folder_response): folder_name = 'foofolder' folder_id = 'whyis6afraidof7' query_url = provider.build_url( 'files', provider.folder['id'], 'children', q="title = '{}'".format(folder_name), fields='items(id)' ) specific_url = provider.build_url('files', folder_id, fields='id,title,mimeType') aiohttpretty.register_json_uri('GET', query_url, body=search_for_folder_response) aiohttpretty.register_json_uri('GET', specific_url, body=actual_folder_response) try: wb_path_v1 = yield from provider.validate_v1_path('/' + folder_name + '/') except Exception as exc: pytest.fail(str(exc)) with pytest.raises(exceptions.NotFoundError) as exc: yield from provider.validate_v1_path('/' + folder_name) assert exc.value.code == client.NOT_FOUND wb_path_v0 = yield from provider.validate_path('/' + folder_name + '/') assert wb_path_v1 == wb_path_v0
def test_upload_and_tasks(self, monkeypatch, provider_and_mock, file_stream, credentials, settings): provider, inner_provider = provider_and_mock basepath = 'waterbutler.providers.osfstorage.provider.{}' path = WaterButlerPath('/foopath', _ids=('Test', 'OtherTest')) url = 'https://waterbutler.io/{}/children/'.format(path.parent.identifier) mock_parity = mock.Mock() mock_backup = mock.Mock() inner_provider.move.return_value = (utils.MockFileMetadata(), True) inner_provider.metadata.side_effect = exceptions.MetadataError('Boom!', code=404) aiohttpretty.register_json_uri('POST', url, status=201, body={'version': 'versionpk', 'data': {'version': 42, 'downloads': 30, 'path': '/alkjdaslke09', 'checkout': None, 'md5': 'abcd', 'sha256': 'bcde'}}) monkeypatch.setattr(basepath.format('backup.main'), mock_backup) monkeypatch.setattr(basepath.format('parity.main'), mock_parity) monkeypatch.setattr(basepath.format('settings.RUN_TASKS'), True) monkeypatch.setattr(basepath.format('os.rename'), lambda *_: None) monkeypatch.setattr(basepath.format('uuid.uuid4'), lambda: 'uniquepath') res, created = yield from provider.upload(file_stream, path) assert created is True assert res.name == 'foopath' assert res.extra['version'] == 42 assert res.provider == 'osfstorage' assert res.extra['downloads'] == 30 assert res.extra['checkout'] is None inner_provider.upload.assert_called_once_with(file_stream, WaterButlerPath('/uniquepath'), check_created=False, fetch_metadata=False) complete_path = os.path.join(FILE_PATH_COMPLETE, file_stream.writers['sha256'].hexdigest) mock_parity.assert_called_once_with(complete_path, credentials['parity'], settings['parity']) mock_backup.assert_called_once_with(complete_path, 'versionpk', 'https://waterbutler.io/hooks/metadata/', credentials['archive'], settings['parity']) inner_provider.metadata.assert_called_once_with(WaterButlerPath('/' + file_stream.writers['sha256'].hexdigest)) inner_provider.move.assert_called_once_with(inner_provider, WaterButlerPath('/uniquepath'), WaterButlerPath('/' + file_stream.writers['sha256'].hexdigest))
async def test_validate_v1_path_commit_sha(self, provider, arg_name, arg_val, attr_name): test_fixtures = fixtures.validate_path dir_listing_body = test_fixtures['root_dir_listing'] base_commit = dir_listing_body['node'] dir_listing_url = provider._build_v1_repo_url('src', arg_val) + '/' aiohttpretty.register_json_uri('GET', dir_listing_url, body=dir_listing_body) path = '/foo-file.txt' kwargs = {arg_name: arg_val} try: wb_path_v1 = await provider.validate_v1_path(path, **kwargs) except Exception as exc: pytest.fail(str(exc)) ref_val = arg_val if attr_name == 'commit_sha' and len(arg_val) < len(base_commit): arg_val = base_commit ref_val = base_commit if attr_name != 'commit_sha': ref_val = base_commit commit_sha = ref_val branch_name = None if attr_name == 'commit_sha' else arg_val assert getattr(wb_path_v1, attr_name) == arg_val assert wb_path_v1.ref == ref_val assert wb_path_v1.extra == { 'commitSha': commit_sha, 'branchName': branch_name, } wb_path_v0 = await provider.validate_path(path, **kwargs) assert wb_path_v1 == wb_path_v0
async def test_upload_existing(self, monkeypatch, provider_and_mock_one, file_stream, upload_path, upload_response, mock_time): self.patch_uuid(monkeypatch) provider, inner_provider = provider_and_mock_one url = 'https://waterbutler.io/{}/children/'.format(upload_path.parent.identifier) inner_provider.move.return_value = (utils.MockFileMetadata(), True) inner_provider.metadata.side_effect = exceptions.MetadataError('Boom!', code=404) aiohttpretty.register_json_uri('POST', url, status=200, body=upload_response) res, created = await provider.upload(file_stream, upload_path) assert created is False assert res.name == '[TEST]' assert res.extra['version'] == 8 assert res.provider == 'osfstorage' assert res.extra['downloads'] == 0 assert res.extra['checkout'] is None assert upload_path.identifier_path == res.path expected_path = WaterButlerPath('/' + file_stream.writers['sha256'].hexdigest) inner_provider.metadata.assert_called_once_with(expected_path) inner_provider.upload.assert_called_once_with(file_stream, WaterButlerPath('/patched_path'), check_created=False, fetch_metadata=False) inner_provider.move.assert_called_once_with(inner_provider, WaterButlerPath('/patched_path'), expected_path)
async def test_intra_move_folder_replace(self, provider, intra_fixtures, root_provider_fixtures): item = intra_fixtures['intra_folder_metadata'] list_metadata = root_provider_fixtures['folder_list_metadata'] src_path = WaterButlerPath('/name/', _ids=(provider, item['id'])) dest_path = WaterButlerPath('/charmander/name/', _ids=(provider, item['id'], item['id'])) file_url = provider.build_url('folders', src_path.identifier) delete_url = provider.build_url('folders', dest_path.identifier, recursive=True) list_url = provider.build_url('folders', item['id'], 'items', fields='id,name,size,modified_at,etag,total_count', offset=0, limit=1000) aiohttpretty.register_json_uri('PUT', file_url, body=item) aiohttpretty.register_uri('DELETE', delete_url, status=204) aiohttpretty.register_json_uri('GET', list_url, body=list_metadata) expected_folder = BoxFolderMetadata(item, dest_path) expected_folder._children = [] for child_item in list_metadata['entries']: child_path = dest_path.child(child_item['name'], folder=(child_item['type'] == 'folder')) serialized_child = provider._serialize_item(child_item, child_path) expected_folder._children.append(serialized_child) expected = (expected_folder, False) result = await provider.intra_move(provider, src_path, dest_path) assert result == expected assert aiohttpretty.has_call(method='DELETE', uri=delete_url)
async def test_intra_copy_folder(self, provider, intra_fixtures, root_provider_fixtures): item = intra_fixtures['intra_folder_metadata'] list_metadata = root_provider_fixtures['folder_list_metadata'] src_path = WaterButlerPath('/name/', _ids=(provider, item['id'])) dest_path = WaterButlerPath('/charmander/name/', _ids=(provider, item['id'])) file_url = provider.build_url('folders', src_path.identifier, 'copy') list_url = provider.build_url('folders', item['id'], 'items', fields='id,name,size,modified_at,etag,total_count', offset=0, limit=1000) aiohttpretty.register_json_uri('GET', list_url, body=list_metadata) aiohttpretty.register_json_uri('POST', file_url, body=item) expected_folder = BoxFolderMetadata(item, dest_path) expected_folder._children = [] for child_item in list_metadata['entries']: child_path = dest_path.child(child_item['name'], folder=(child_item['type'] == 'folder')) serialized_child = provider._serialize_item(child_item, child_path) expected_folder._children.append(serialized_child) expected = (expected_folder, True) result = await provider.intra_copy(provider, src_path, dest_path) assert result == expected
async def test_upload_create(self, provider, file_stream, native_file_metadata, empty_native_dataset_metadata, native_dataset_metadata): path = '/thefile.txt' url = provider.build_url(dvs.EDIT_MEDIA_BASE_URL, 'study', provider.doi) aiohttpretty.register_uri('POST', url, status=201) latest_url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id, 'latest'), key=provider.token) latest_published_url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id, 'latest-published'), key=provider.token) aiohttpretty.register_json_uri('GET', latest_published_url, body={'data': {'files': []}}) aiohttpretty.register_uri('GET', latest_url, responses=[ { 'status': 200, 'body': json.dumps(empty_native_dataset_metadata).encode('utf-8'), 'headers': {'Content-Type': 'application/json'}, }, { 'status': 200, 'body': json.dumps(native_dataset_metadata).encode('utf-8'), 'headers': {'Content-Type': 'application/json'}, }, ]) path = await provider.validate_path(path) metadata, created = await provider.upload(file_stream, path) entry = native_file_metadata['datafile'] expected = DataverseFileMetadata(entry, 'latest') assert created is True assert metadata == expected assert aiohttpretty.has_call(method='POST', uri=url) assert aiohttpretty.has_call(method='GET', uri=latest_url) assert aiohttpretty.has_call(method='GET', uri=latest_published_url)
def test_provider_metadata_empty(monkeypatch, provider): aiohttpretty.register_json_uri('GET', 'https://waterbutler.io/metadata', status_code=200, body=[]) res = yield from provider.metadata(path='/unrelatedpath') assert res == [] aiohttpretty.has_call(method='GET', uri='https://waterbutler.io', parmas={'path': 'unrelatedpath'})
async def test_draft_metadata_missing(self, provider): url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id, 'latest'), key=provider.token) aiohttpretty.register_json_uri('GET', url, status=404) path = await provider.validate_path('/') with pytest.raises(exceptions.MetadataError): await provider.metadata(path, version='latest')
def test_metadata_file_root_not_found(self, provider): path = '/birdie.jpg' query = provider._build_query(provider.folder['id'], title=path.lstrip('/')) list_file_url = provider.build_url('files', q=query, alt='json') aiohttpretty.register_json_uri('GET', list_file_url, body=fixtures.list_file_empty) with pytest.raises(exceptions.MetadataError) as exc_info: yield from provider.metadata(path) assert exc_info.value.code == 404
def test_metadata_file_nested_not_child(self, provider): path = '/ed/sullivan/show.mp3' query = provider._build_query(provider.folder['id'], title='ed') url = provider.build_url('files', q=query, alt='json') aiohttpretty.register_json_uri('GET', url, body={'items': []}) with pytest.raises(exceptions.MetadataError) as exc_info: yield from provider.metadata(path) assert exc_info.value.code == 404
def test_metadata_root_folder(self, provider): path = '/' query = provider._build_query(provider.folder['id']) list_file_url = provider.build_url('files', q=query, alt='json') aiohttpretty.register_json_uri('GET', list_file_url, body=fixtures.list_file) result = yield from provider.metadata(path) expected = GoogleDriveFileMetadata(fixtures.list_file['items'][0], '/').serialized() assert result == [expected]
def test_metadata_not_child(self, provider, folder_object_metadata): provider.folder += 'yourenotmydad' path = BoxPath('/' + provider.folder + '/') object_url = provider.build_url('folders', provider.folder) aiohttpretty.register_json_uri('GET', object_url, body=folder_object_metadata) with pytest.raises(exceptions.MetadataError) as exc_info: yield from provider.metadata(str(path)) assert exc_info.value.code == 404
async def test_no_such_repository(self, provider): provider.repo_id = '456' path = '/' default_branch_url = 'http://base.url/api/v4/projects/456' aiohttpretty.register_json_uri('GET', default_branch_url, body={}, status=404) with pytest.raises(exceptions.NotFoundError) as exc: root_path = await provider.validate_v1_path(path) assert exc.value.code == 404
async def test_uninitialized_repository(self, provider): provider.repo_id = '456' path = '/' default_branch_url = 'http://base.url/api/v4/projects/456' aiohttpretty.register_json_uri('GET', default_branch_url, body={"default_branch": None}) with pytest.raises(exceptions.UninitializedRepositoryError) as exc: root_path = await provider.validate_v1_path(path) assert exc.value.code == 400
async def test_get_path_metadata_without_commit_sha( self, provider, path_metadata_file): path = BitbucketPath('/file0001.20bytes.txt', _ids=[(None, 'develop'), (None, 'develop')]) provider._fetch_branch_commit_sha = MockCoroutine( return_value=COMMIT_SHA) file_metadata = json.loads(path_metadata_file)['root'] query_params = { 'format': 'meta', 'fields': 'commit.hash,commit.date,path,size,links.history.href' } path_meta_url = '{}/?{}'.format( provider._build_v2_repo_url('src', COMMIT_SHA, *path.path_tuple()), urlencode(query_params)) aiohttpretty.register_json_uri('GET', path_meta_url, body=file_metadata) await provider._fetch_path_metadata(path) assert provider._fetch_branch_commit_sha.called assert path.commit_sha == COMMIT_SHA
async def test_revalidate_path_file(self, root_provider, root_provider_fixtures): file_name = 'toes.txt' file_id = root_provider_fixtures['file_id'] root_id = 'root' parent_path = OneDrivePath('/', _ids=[root_id]) expected_path = OneDrivePath('/{}'.format(file_name), _ids=[root_id, file_id]) parent_url = root_provider._build_drive_url( *parent_path.api_identifier, expand='children') aiohttpretty.register_json_uri( 'GET', parent_url, body=root_provider_fixtures['root_metadata'], status=200) actual_path = await root_provider.revalidate_path( parent_path, file_name, False) assert actual_path == expected_path with pytest.raises(exceptions.NotFoundError) as exc: await root_provider.revalidate_path(parent_path, file_name, True)
async def test_revisions(self, provider): path = '/folder1/folder2/file' gl_path = GitLabPath(path, _ids=([('a1b2c3d4', 'master')] * 4)) url = ('http://base.url/api/v4/projects/123/repository/commits' '?path=folder1/folder2/file&ref_name=a1b2c3d4') aiohttpretty.register_json_uri('GET', url, body=fixtures.revisions_for_file()) revisions = await provider.revisions(gl_path) assert len(revisions) == 3 assert revisions[0].serialized() == { 'version': '931aece9275c0d084dfa7f6e0b3b2bb250e4b089', 'modified': '2017-07-24T16:02:17.000-04:00', 'modified_utc': '2017-07-24T20:02:17+00:00', 'versionIdentifier': 'commitSha', 'extra': { 'user': { 'name': 'Fitz Elliott', }, }, }
async def test_revalidate_path(self, provider, native_dataset_metadata): draft_url = provider.build_url(dvs.JSON_BASE_URL.format( provider._id, 'latest'), key=provider.token) published_url = provider.build_url(dvs.JSON_BASE_URL.format( provider._id, 'latest-published'), key=provider.token) aiohttpretty.register_json_uri('GET', draft_url, status=200, body=native_dataset_metadata) aiohttpretty.register_json_uri('GET', published_url, status=200, body=native_dataset_metadata) base = await provider.validate_v1_path('/') wb_path = await provider.revalidate_path(base, '/thefile.txt') assert wb_path.name == 'thefile.txt' wb_path = await provider.revalidate_path(base, '/new_path') assert wb_path.name == 'new_path'
async def test_validate_v1_path_folder(self, provider, provider_fixtures): folder_path = '/Photos' data = {"path": folder_path} metadata_url = provider.build_url('files', 'get_metadata') aiohttpretty.register_json_uri( 'POST', metadata_url, data=data, body=provider_fixtures['folder_metadata']) try: wb_path_v1 = await provider.validate_v1_path(folder_path + '/') except Exception as exc: pytest.fail(str(exc)) wb_path_v1 = None with pytest.raises(core_exceptions.NotFoundError) as exc: await provider.validate_v1_path(folder_path) assert exc.value.code == HTTPStatus.NOT_FOUND wb_path_v0 = await provider.validate_path(folder_path + '/') assert wb_path_v1 == wb_path_v0
async def test_delete_root(self, provider, provider_fixtures): url = provider.build_url('files', 'list_folder') path = await provider.validate_path('/') data = {'path': path.full_path} aiohttpretty.register_json_uri( 'POST', url, data=data, body=provider_fixtures['folder_children'], status=HTTPStatus.OK) path2 = await provider.validate_path('/photos/flower.jpg') url = provider.build_url('files', 'delete_v2') data = { 'path': provider.folder.rstrip('/') + '/' + path2.path.rstrip('/') } aiohttpretty.register_json_uri('POST', url, data=data, status=HTTPStatus.OK) await provider.delete(path, 1) assert aiohttpretty.has_call(method='POST', uri=url)
def test_project_article_download_not_found(self, project_provider, list_project_articles, article_metadata, file_metadata): file_id = str(file_metadata['id'])[::-1] article_id = str(list_project_articles[0]['id']) article_metadata_url = project_provider.build_url( 'articles', article_id) list_articles_url = project_provider.build_url( 'projects', project_provider.project_id, 'articles') aiohttpretty.register_json_uri('GET', list_articles_url, body=list_project_articles) aiohttpretty.register_json_uri('GET', article_metadata_url, body=article_metadata) path = yield from project_provider.validate_path('/{}/{}'.format( article_id, file_id)) with pytest.raises(exceptions.NotFoundError) as exc: yield from project_provider.download(path) assert exc.value.code == 404
async def test_delete_folder_contents(self, provider_one, file_path, folder_path, folder_children_metadata, mock_time): provider_one.validate_path = utils.MockCoroutine( return_value=file_path) provider_one.delete = utils.MockCoroutine() children_url, params = build_signed_url_without_auth( provider_one, 'GET', folder_path.identifier, 'children', user_id=provider_one.auth['id']) aiohttpretty.register_json_uri('GET', children_url, params=params, status=200, body=folder_children_metadata) await provider_one._delete_folder_contents(folder_path) provider_one.delete.assert_called_with(file_path) assert provider_one.delete.call_count == 4
async def test_get_revisions_no_revisions(self, provider): item = fixtures.list_file['items'][0] metadata_url = provider.build_url('files', item['id']) revisions_url = provider.build_url('files', item['id'], 'revisions') path = WaterButlerPath('/birdie.jpg', _ids=('doesntmatter', item['id'])) aiohttpretty.register_json_uri('GET', metadata_url, body=item) aiohttpretty.register_json_uri('GET', revisions_url, body=fixtures.revisions_list_empty) result = await provider.revisions(path) expected = [ GoogleDriveRevision({ 'modifiedDate': item['modifiedDate'], 'id': fixtures.revisions_list_empty['etag'] + ds.DRIVE_IGNORE_VERSION, }) ] assert result == expected
async def test_validate_v1_path_folder(self, provider, branch_metadata, repo_tree_metadata_root): branch_url = provider.build_repo_url('branches', provider.default_branch) tree_url = provider.build_repo_url('git', 'trees', branch_metadata['commit']['commit']['tree']['sha'], recursive=1) aiohttpretty.register_json_uri('GET', branch_url, body=branch_metadata) aiohttpretty.register_json_uri('GET', tree_url, body=repo_tree_metadata_root) tree_path = 'level1' try: wb_path_v1 = await provider.validate_v1_path('/' + tree_path + '/') except Exception as exc: pytest.fail(str(exc)) with pytest.raises(exceptions.NotFoundError) as exc: await provider.validate_v1_path('/' + tree_path) assert exc.value.code == client.NOT_FOUND wb_path_v0 = await provider.validate_path('/' + tree_path + '/') assert wb_path_v1 == wb_path_v0
async def test_upload_new(self, monkeypatch, provider_and_mock_one, file_stream, upload_response, upload_path, mock_time): self.patch_uuid(monkeypatch) url = 'https://waterbutler.io/{}/children/'.format( upload_path.parent.identifier) aiohttpretty.register_json_uri('POST', url, status=201, body=upload_response) provider, inner_provider = provider_and_mock_one inner_provider.metadata = utils.MockCoroutine( return_value=utils.MockFileMetadata()) res, created = await provider.upload(file_stream, upload_path) assert created is True assert res.name == '[TEST]' assert res.extra['version'] == 8 assert res.provider == 'osfstorage' assert res.extra['downloads'] == 0 assert res.extra['checkout'] is None assert upload_path.identifier_path == res.path inner_provider.delete.assert_called_once_with( WaterButlerPath('/patched_path')) expected_path = WaterButlerPath( '/' + file_stream.writers['sha256'].hexdigest) inner_provider.metadata.assert_called_once_with(expected_path) inner_provider.upload.assert_called_once_with( file_stream, WaterButlerPath('/patched_path'), check_created=False, fetch_metadata=False)
async def test_download(monkeypatch, provider_and_mock, osf_response, mock_path, mock_time): provider, inner_provider = provider_and_mock base_url = provider.build_url(mock_path.identifier, 'download', version=None, mode=None) url, _, params = provider.build_signed_url('GET', base_url) aiohttpretty.register_json_uri('GET', url, params=params, body=osf_response) await provider.download(mock_path) assert provider.make_provider.called assert inner_provider.download.called assert aiohttpretty.has_call(method='GET', uri=url, params=params) provider.make_provider.assert_called_once_with(osf_response['settings']) inner_provider.download.assert_called_once_with( path=WaterButlerPath('/test/path'), displayName='unrelatedpath')
async def test_validate_path_nested(self, provider_one, file_lineage, folder_lineage, mock_time): file_id = file_lineage['data'][0]['id'] url, params = build_signed_url_without_auth(provider_one, 'GET', file_id, 'lineage') aiohttpretty.register_json_uri('GET', url, params=params, status=200, body=file_lineage) url, params = build_signed_url_without_auth(provider_one, 'GET', 'New%20Folder', 'lineage') aiohttpretty.register_json_uri('GET', url, params=params, status=200, body=folder_lineage) wb_path_v0 = await provider_one.validate_path('New Folder/' + file_id) assert len(wb_path_v0._parts) == 3 assert wb_path_v0.name == '59a9b628b7d1c903ab5a8f52'
async def test_validate_v1_path_folder(self, provider, subfolder_tree): path = '/files/lfs/' url = ('http://base.url/api/v4/projects/123/repository/tree' '?path=files/&page=1&per_page={}&ref=a1b2c3d4'.format(provider.MAX_PAGE_SIZE)) aiohttpretty.register_json_uri('GET', url, body=subfolder_tree) try: folder_path = await provider.validate_v1_path(path, commitSha='a1b2c3d4', branch='master') except Exception as exc: pytest.fail(str(exc)) assert folder_path.is_folder assert not folder_path.is_root assert folder_path.commit_sha == 'a1b2c3d4' assert folder_path.branch_name == 'master' parent_path = folder_path.parent assert parent_path.commit_sha == 'a1b2c3d4' assert parent_path.branch_name == 'master' root_path = parent_path.parent assert root_path.commit_sha == 'a1b2c3d4' assert root_path.branch_name == 'master'
async def test_intra_move_replace_folder(self, provider, provider_fixtures, error_fixtures): url = provider.build_url('files', 'delete_v2') path = await provider.validate_path('/newfolder/') data = {'path': path.full_path} aiohttpretty.register_json_uri('POST', url, data=data, status=HTTPStatus.OK) url = provider.build_url('files', 'list_folder') data = {'path': path.full_path} aiohttpretty.register_json_uri( 'POST', url, data=data, body=provider_fixtures['folder_children'], status=HTTPStatus.OK ) src_path = WaterButlerPath('/pfile/', prepend=provider.folder) dest_path = WaterButlerPath('/pfile_renamed/', prepend=provider.folder) url = provider.build_url('files', 'move_v2') data = { 'from_path': src_path.full_path.rstrip('/'), 'to_path': dest_path.full_path.rstrip('/') } aiohttpretty.register_json_uri('POST', url, **{ "responses": [ { 'headers': {'Content-Type': 'application/json'}, 'data': data, 'body': json.dumps(error_fixtures['rename_conflict_folder_metadata']).encode('utf-8'), 'status': HTTPStatus.CONFLICT }, { 'headers': {'Content-Type': 'application/json'}, 'data': data, 'body': json.dumps( provider_fixtures['intra_move_copy_folder_metadata_v2']).encode('utf-8') }, ] }) result = await provider.intra_move(provider, src_path, dest_path) expected = DropboxFolderMetadata( provider_fixtures['intra_move_copy_folder_metadata_v2']['metadata'], provider.folder ) expected.children = [ DropboxFileMetadata(item, provider.folder) for item in provider_fixtures['folder_children']['entries'] ] assert expected == result[0]
def test_delete_file(self, provider, native_dataset_metadata): path = '21' url = provider.build_url(dvs.EDIT_MEDIA_BASE_URL, 'file', path) aiohttpretty.register_json_uri('DELETE', url, status=204) draft_url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id, 'latest'), key=provider.token) aiohttpretty.register_json_uri('GET', draft_url, status=200, body=native_dataset_metadata) published_url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id, 'latest-published'), key=provider.token) aiohttpretty.register_json_uri('GET', published_url, status=200, body=native_dataset_metadata) path = yield from provider.validate_path(path) yield from provider.delete(path) assert aiohttpretty.has_call(method='DELETE', uri=url)
async def test_get_metadata_for_folder( self, provider, path_metadata_folder, folder_contents_page_1, folder_contents_page_2, ): path = BitbucketPath('/', _ids=[(None, 'develop')], folder=True) provider._fetch_branch_commit_sha = MockCoroutine( return_value=COMMIT_SHA) folder_metadata = json.loads(path_metadata_folder)['root'] query_params = { 'format': 'meta', 'fields': 'commit.hash,commit.date,path,size,links.history.href' } path_meta_url = '{}/?{}'.format( provider._build_v2_repo_url('src', COMMIT_SHA, *path.path_tuple()), urlencode(query_params)) aiohttpretty.register_json_uri('GET', path_meta_url, body=folder_metadata) dir_contents_first_page = json.loads(folder_contents_page_1) query_params = { 'pagelen': provider.RESP_PAGE_LEN, 'fields': 'values.path,values.size,values.type,next', } dir_list_base_url = provider._build_v2_repo_url( 'src', COMMIT_SHA, *path.path_tuple()) dir_list_first_url = '{}/?{}'.format(dir_list_base_url, urlencode(query_params)) aiohttpretty.register_json_uri('GET', dir_list_first_url, body=dir_contents_first_page) dir_contents_next_page = json.loads(folder_contents_page_2) dir_list_next_url = dir_contents_first_page['next'] aiohttpretty.register_json_uri('GET', dir_list_next_url, body=dir_contents_next_page) result = await provider.metadata(path) assert provider._fetch_branch_commit_sha.called assert len(result) == 15
async def test_validate_v1_path_folder(self, provider, search_for_folder_response, actual_folder_response, no_file_response): folder_name = 'foofolder' folder_id = 'whyis6afraidof7' query_url = provider.build_url('files', provider.folder['id'], 'children', q=_build_title_search_query( provider, folder_name, True), fields='items(id)') wrong_query_url = provider.build_url('files', provider.folder['id'], 'children', q=_build_title_search_query( provider, folder_name, False), fields='items(id)') specific_url = provider.build_url('files', folder_id, fields='id,title,mimeType') aiohttpretty.register_json_uri('GET', query_url, body=search_for_folder_response) aiohttpretty.register_json_uri('GET', wrong_query_url, body=no_file_response) aiohttpretty.register_json_uri('GET', specific_url, body=actual_folder_response) try: wb_path_v1 = await provider.validate_v1_path('/' + folder_name + '/') except Exception as exc: pytest.fail(str(exc)) with pytest.raises(exceptions.NotFoundError) as exc: await provider.validate_v1_path('/' + folder_name) assert exc.value.code == client.NOT_FOUND wb_path_v0 = await provider.validate_path('/' + folder_name + '/') assert wb_path_v1 == wb_path_v0
def test_upload_updates(self, provider, file_stream, native_file_metadata, native_dataset_metadata): path = '/20' url = provider.build_url(dvs.EDIT_MEDIA_BASE_URL, 'study', provider.doi) aiohttpretty.register_uri('POST', url, status=201) published_url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id, 'latest'), key=provider.token) aiohttpretty.register_json_uri('GET', published_url, status=200, body=native_dataset_metadata) delete_url = provider.build_url(dvs.EDIT_MEDIA_BASE_URL, 'file', '/20') # Old file id aiohttpretty.register_json_uri('DELETE', delete_url, status=204) latest_published_url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id, 'latest-published'), key=provider.token) aiohttpretty.register_json_uri('GET', latest_published_url, body={'data': {'files': []}}) path = yield from provider.validate_path(path) metadata, created = yield from provider.upload(file_stream, path) entry = native_file_metadata['datafile'] expected = DataverseFileMetadata(entry, 'latest') assert metadata == expected assert created is False assert aiohttpretty.has_call(method='POST', uri=url) assert aiohttpretty.has_call(method='GET', uri=published_url)
async def test_upload_conflict_keep(self, provider, root_provider_fixtures, file_stream): upload_metadata = root_provider_fixtures['upload_metadata'] item = upload_metadata['entries'][0] path = WaterButlerPath('/newfile', _ids=(provider.folder, item['id'])) upload_url = provider._build_upload_url('files', 'content') aiohttpretty.register_json_uri('POST', upload_url, status=201, body=upload_metadata) metadata_url = provider.build_url('files', path.identifier) aiohttpretty.register_json_uri('GET', metadata_url, body=upload_metadata) list_url = provider.build_url( 'folders', item['path_collection']['entries'][1]['id'], 'items', fields='id,name,type', limit=1000) aiohttpretty.register_json_uri( 'GET', list_url, body=root_provider_fixtures['folder_list_metadata']) metadata, created = await provider.upload(file_stream, path, conflict='keep') expected = BoxFileMetadata(item, path).serialized() # since the metadata for the renamed conflict file isn't actually saved, this one is odd to # test. assert metadata.serialized() == expected assert created is True assert path.identifier_path == metadata.path assert aiohttpretty.has_call(method='POST', uri=upload_url)
async def test_project_delete(self, project_provider, list_project_articles, article_metadata): article_id = str(list_project_articles[0]['id']) list_articles_url = project_provider.build_url( 'projects', project_provider.project_id, 'articles') article_metadata_url = project_provider.build_url( 'articles', article_id) aiohttpretty.register_json_uri('GET', article_metadata_url, body=article_metadata) aiohttpretty.register_json_uri('GET', list_articles_url, body=list_project_articles) aiohttpretty.register_json_uri('DELETE', list_articles_url, body={'article_id': article_id}) path = await project_provider.validate_path('/{}'.format(article_id)) result = await project_provider.delete(path) assert result is None assert aiohttpretty.has_call(method='DELETE', uri=list_articles_url)
def patch_url_method(context, uri, status, method): aiohttpretty.register_json_uri(method.upper(), uri, status=status, body=json.loads(context.text or '{}'))
def patch_url(context, uri, status): aiohttpretty.register_json_uri('GET', uri, status=status, body=json.loads(context.text or '{}'))
def mock_auth(auth_json): aiohttpretty.register_json_uri( 'POST', settings.AUTH_URL, body=auth_json, )
async def test_get_metadata_for_file(self, provider, path_metadata_file, file_history_page_1, file_history_page_2): path = BitbucketPath('/file0001.20bytes.txt', _ids=[(COMMIT_SHA, 'develop'), (COMMIT_SHA, 'develop')]) provider._fetch_branch_commit_sha = MockCoroutine( return_value=COMMIT_SHA) file_metadata = json.loads(path_metadata_file)['root'] query_params = { 'format': 'meta', 'fields': 'commit.hash,commit.date,path,size,links.history.href' } path_meta_url = '{}/?{}'.format( provider._build_v2_repo_url('src', COMMIT_SHA, *path.path_tuple()), urlencode(query_params)) aiohttpretty.register_json_uri('GET', path_meta_url, body=file_metadata) file_history_page_1 = json.loads(file_history_page_1) query_params = { 'pagelen': provider.RESP_PAGE_LEN, 'fields': 'values.commit.hash,values.commit.date,values.commit.author.raw,' 'values.size,values.path,values.type,next' } file_history_first_url = '{}?{}'.format( file_metadata['links']['history']['href'], urlencode(query_params)) aiohttpretty.register_json_uri('GET', file_history_first_url, body=file_history_page_1) file_history_page_2 = json.loads(file_history_page_2) file_history_next_url = file_history_page_1['next'] aiohttpretty.register_json_uri('GET', file_history_next_url, body=file_history_page_2) metadata = await provider.metadata(path) assert not provider._fetch_branch_commit_sha.called assert metadata is not None assert metadata.name == 'file0001.20bytes.txt' assert metadata.path == '/file0001.20bytes.txt' assert metadata.kind == 'file' assert metadata.modified == '2019-04-25T11:58:30+00:00' assert metadata.modified_utc == '2019-04-25T11:58:30+00:00' assert metadata.created_utc == '2019-04-24T12:18:21+00:00' assert metadata.content_type is None assert metadata.size == 20 assert metadata.size_as_int == 20 assert metadata.etag == '{}::{}'.format('/file0001.20bytes.txt', COMMIT_SHA) assert metadata.provider == 'bitbucket' assert metadata.last_commit_sha == 'ad0412ab6f8e' assert metadata.commit_sha == COMMIT_SHA assert metadata.branch_name == BRANCH
async def test_intra_foo_folder_overwrite(self, provider_one, provider_two, folder_children_metadata, mock_time, action, method_name): src_provider = provider_one src_provider.delete = utils.MockCoroutine() src_provider.validate_v1_path = utils.MockCoroutine() src_provider._children_metadata = utils.MockCoroutine() dest_provider = provider_two dest_provider.delete = utils.MockCoroutine() dest_provider.validate_v1_path = utils.MockCoroutine( return_value=WaterButlerPath('/folder1/', _ids=('RootId', 'folder1'))) dest_provider._children_metadata = utils.MockCoroutine( return_value=folder_children_metadata) src_path = WaterButlerPath('/folder1/', _ids=['RootId', 'folder1'], folder=True) dest_path = WaterButlerPath('/folder1/', _ids=['RootId', 'doomedFolder'], folder=True) data = json.dumps({ 'user': src_provider.auth['id'], 'source': src_path.identifier, 'destination': { 'name': dest_path.name, 'node': dest_provider.nid, 'parent': dest_path.parent.identifier } }) url, params = build_signed_url_without_auth(src_provider, 'POST', 'hooks', action, data=data) body = { 'path': '/folder1/', 'id': 'folder1', 'kind': 'folder', 'name': 'folder1' } aiohttpretty.register_json_uri('POST', url, params=params, status=201, body=body) method = getattr(src_provider, method_name) folder_meta, created = await method(dest_provider, src_path, dest_path) assert not created assert isinstance(folder_meta, OsfStorageFolderMetadata) assert len(folder_meta.children) == 4 # these should be called on dest_provider (if at all), not src_provider src_provider.delete.assert_not_called() src_provider.validate_v1_path.assert_not_called() src_provider._children_metadata.assert_not_called() dest_provider.delete.assert_called_once_with( WaterButlerPath('/folder1/')) dest_provider.validate_v1_path.assert_called_once_with('/folder1/') dest_provider._children_metadata.assert_called_once_with( WaterButlerPath('/folder1/'))
async def test_upload_and_tasks(self, monkeypatch, provider_and_mock, file_stream, credentials, settings, mock_time): provider, inner_provider = provider_and_mock basepath = 'waterbutler.providers.osfstorage.provider.{}' path = WaterButlerPath('/foopath', _ids=('Test', 'OtherTest')) url = 'https://waterbutler.io/{}/children/'.format( path.parent.identifier) mock_parity = mock.Mock() mock_backup = mock.Mock() inner_provider.move.return_value = (utils.MockFileMetadata(), True) inner_provider.metadata.side_effect = exceptions.MetadataError( 'Boom!', code=404) aiohttpretty.register_json_uri('POST', url, status=201, body={ 'version': 'versionpk', 'data': { 'version': 42, 'downloads': 30, 'path': '/alkjdaslke09', 'checkout': None, 'md5': 'abcd', 'sha256': 'bcde' } }) monkeypatch.setattr(basepath.format('backup.main'), mock_backup) monkeypatch.setattr(basepath.format('parity.main'), mock_parity) monkeypatch.setattr(basepath.format('settings.RUN_TASKS'), True) monkeypatch.setattr(basepath.format('os.rename'), lambda *_: None) monkeypatch.setattr(basepath.format('uuid.uuid4'), lambda: 'uniquepath') res, created = await provider.upload(file_stream, path) assert created is True assert res.name == 'foopath' assert res.extra['version'] == 42 assert res.provider == 'osfstorage' assert res.extra['downloads'] == 30 assert res.extra['checkout'] is None inner_provider.upload.assert_called_once_with( file_stream, WaterButlerPath('/uniquepath'), check_created=False, fetch_metadata=False) complete_path = os.path.join(FILE_PATH_COMPLETE, file_stream.writers['sha256'].hexdigest) mock_parity.assert_called_once_with(complete_path, credentials['parity'], settings['parity']) mock_backup.assert_called_once_with( complete_path, 'versionpk', 'https://waterbutler.io/hooks/metadata/', credentials['archive'], settings['parity']) inner_provider.metadata.assert_called_once_with( WaterButlerPath('/' + file_stream.writers['sha256'].hexdigest)) inner_provider.move.assert_called_once_with( inner_provider, WaterButlerPath('/uniquepath'), WaterButlerPath('/' + file_stream.writers['sha256'].hexdigest))
def test_push_complete_error(self, event_loop): callback_url = 'https://fakeosf.io/guidz/osfstorage/hooks/metadata/' aiohttpretty.register_json_uri('PUT', callback_url, status=500) with pytest.raises(Exception): backup._push_archive_complete(123, callback_url, {'some': 'metadata'})
async def test_upload_parts(self, provider, root_provider_fixtures): responses = [{ 'body': json.dumps(root_provider_fixtures['upload_part_one']), 'status': 201, 'headers': { 'Content-Type': 'application/json' }, }, { 'body': json.dumps(root_provider_fixtures['upload_part_two']), 'status': 201, 'headers': { 'Content-Type': 'application/json' }, }] session_url = 'https://upload.box.com/api/2.0/files/upload_sessions/fake_session_id' aiohttpretty.register_json_uri('PUT', session_url, status=HTTPStatus.CREATED, responses=responses) session_metadata = root_provider_fixtures['create_session_metadata'] stream = streams.StringStream('tenbytestr'.encode() * 2) parts_metadata = await provider._upload_parts(stream, session_metadata) expected_response = [{ 'offset': 10, 'part_id': '37B0FB1B', 'sha1': '3ff00d99585b8da363f9f9955e791ed763e111c1', 'size': 10 }, { 'offset': 20, 'part_id': '1872DEDA', 'sha1': '0ae5fc290c5c5414cdda245ab712a8440376284a', 'size': 10 }] assert parts_metadata == expected_response assert len(aiohttpretty.calls) == 2 for call in aiohttpretty.calls: assert call['method'] == 'PUT' assert call['uri'] == session_url call_one = aiohttpretty.calls[0] assert call_one['headers'] == { 'Authorization': 'Bearer wrote harry potter', 'Content-Length': '10', 'Content-Range': 'bytes 0-9/20', 'Content-Type:': 'application/octet-stream', 'Digest': 'sha={}'.format('pz4mZbOEOesBeUhR1THUF1Oq1bI=') } call_two = aiohttpretty.calls[1] assert call_two['headers'] == { 'Authorization': 'Bearer wrote harry potter', 'Content-Length': '10', 'Content-Range': 'bytes 10-19/20', 'Content-Type:': 'application/octet-stream', 'Digest': 'sha={}'.format('pz4mZbOEOesBeUhR1THUF1Oq1bI=') }