def test_delete(monkeypatch, provider, mock_path): path = WaterButlerPath('/unrelatedpath', _ids=('Doesntmatter', 'another')) aiohttpretty.register_uri('DELETE', 'https://waterbutler.io/another/', status_code=200) yield from provider.delete(path) aiohttpretty.has_call(method='DELETE', uri='https://waterbutler.io/another/', params={'user': '******'})
async def test_article_upload(self, file_stream, article_provider, folder_article_metadata, get_file_metadata, create_file_metadata, get_upload_metadata, upload_folder_article_metadata): file_name = 'barricade.gif' file_id = str(get_file_metadata['id']) root_parts = article_provider.root_path_parts validate_file_url = article_provider.build_url(False, *root_parts, 'files', file_name) aiohttpretty.register_uri('GET', validate_file_url, status=404) path = await article_provider.validate_path('/' + file_name) create_file_url = article_provider.build_url(False, *root_parts, 'files') file_url = article_provider.build_url(False, *root_parts, 'files', file_id) get_article_url = article_provider.build_url(False, *root_parts) upload_url = get_file_metadata['upload_url'] aiohttpretty.register_json_uri('POST', create_file_url, body=create_file_metadata, status=201) aiohttpretty.register_json_uri('GET', file_url, body=get_file_metadata) aiohttpretty.register_json_uri('GET', get_file_metadata['upload_url'], body=get_upload_metadata) aiohttpretty.register_uri('PUT', '{}/1'.format(upload_url), status=200) aiohttpretty.register_uri('POST', file_url, status=202) aiohttpretty.register_json_uri('GET', get_article_url, body=upload_folder_article_metadata) result, created = await article_provider.upload(file_stream, path) expected = metadata.FigshareFileMetadata( upload_folder_article_metadata, upload_folder_article_metadata['files'][0], ) assert aiohttpretty.has_call(method='PUT', uri='{}/1'.format(upload_url)) assert aiohttpretty.has_call(method='POST', uri=create_file_url) assert result == expected
async def test_project_contents(self, project_provider, list_project_articles, file_article_metadata, folder_article_metadata): root_parts = project_provider.root_path_parts list_articles_url = project_provider.build_url(False, *root_parts, 'articles') file_metadata_url = project_provider.build_url(False, *root_parts,'articles', str(list_project_articles[0]['id'])) folder_metadata_url = project_provider.build_url(False, *root_parts, 'articles', str(list_project_articles[1]['id'])) aiohttpretty.register_json_uri('GET', list_articles_url, body=list_project_articles, params={'page': '1', 'page_size': str(MAX_PAGE_SIZE)}) aiohttpretty.register_json_uri('GET', list_articles_url, body=[], params={'page': '2', 'page_size': str(MAX_PAGE_SIZE)}) aiohttpretty.register_json_uri('GET', file_metadata_url, body=file_article_metadata) aiohttpretty.register_json_uri('GET', folder_metadata_url, body=folder_article_metadata) path = await project_provider.validate_path('/') result = await project_provider.metadata(path) assert aiohttpretty.has_call(method='GET', uri=list_articles_url, params={'page': '1', 'page_size': str(MAX_PAGE_SIZE)}) assert aiohttpretty.has_call(method='GET', uri=file_metadata_url) assert aiohttpretty.has_call(method='GET', uri=folder_metadata_url) assert result == [ metadata.FigshareFileMetadata(file_article_metadata, file_article_metadata['files'][0]), metadata.FigshareFolderMetadata(folder_article_metadata) ]
def test_provider_metadata(monkeypatch, provider): items = [ { 'name': 'foo', 'path': '/foo', 'kind': 'file', 'version': 10, 'downloads': 1, }, { 'name': 'bar', 'path': '/bar', 'kind': 'file', 'version': 10, 'downloads': 1, }, { 'name': 'baz', 'path': '/baz', 'kind': 'folder' } ] aiohttpretty.register_json_uri('GET', 'https://waterbutler.io/metadata', status=200, body=items) res = yield from provider.metadata(path='/unrelatedpath') assert isinstance(res, list) for item in res: assert isinstance(item, dict) assert item['name'] is not None assert item['path'] is not None assert item['provider'] == 'osfstorage' aiohttpretty.has_call(method='GET', uri='https://waterbutler.io', parmas={'path': 'unrelatedpath'})
async def test_upload_file_checksum_mismatch(self, mock_time, mock_provider, file_wb_path, meta_file_raw, meta_file_upload_raw, file_stream_file): file_obj_name = utils.get_obj_name(file_wb_path, is_folder=False) signed_url_upload = mock_provider._build_and_sign_url('PUT', file_obj_name, **{}) # There is no need to use `MultiDict` since the hashes are not used resp_headers_dict = dict(json.loads(meta_file_upload_raw)) resp_headers_dict.update({'etag': '"9e780e1c4ee28c44642160b349b3aab0"'}) resp_headers = utils.get_multi_dict_from_python_dict(resp_headers_dict) aiohttpretty.register_uri( 'PUT', signed_url_upload, headers=resp_headers, status=HTTPStatus.OK ) signed_url_metadata = mock_provider._build_and_sign_url('HEAD', file_obj_name, **{}) # There is no need to use `MultiDict` since the hashes are not used resp_headers = utils.get_multi_dict_from_python_dict(dict(json.loads(meta_file_raw))) aiohttpretty.register_uri( 'HEAD', signed_url_metadata, headers=resp_headers, status=HTTPStatus.OK ) with pytest.raises(exceptions.UploadChecksumMismatchError) as exc: await mock_provider.upload(file_stream_file, file_wb_path) assert exc.value.code == HTTPStatus.INTERNAL_SERVER_ERROR assert aiohttpretty.has_call(method='HEAD', uri=signed_url_metadata) assert aiohttpretty.has_call(method='PUT', uri=signed_url_upload)
def test_upload_encrypted(self, provider, file_content, file_stream, file_metadata, mock_time): # Set trigger for encrypt_key=True in s3.provider.upload provider.encrypt_uploads = True path = WaterButlerPath('/foobah') content_md5 = hashlib.md5(file_content).hexdigest() url = provider.bucket.new_key(path.path).generate_url(100, 'PUT', encrypt_key=True) metadata_url = provider.bucket.new_key(path.path).generate_url(100, 'HEAD') aiohttpretty.register_uri( 'HEAD', metadata_url, responses=[ {'status': 404}, {'headers': file_metadata}, ], ) aiohttpretty.register_uri('PUT', url, status=200, headers={'ETag': '"{}"'.format(content_md5)}) metadata, created = yield from provider.upload(file_stream, path) assert metadata.kind == 'file' assert metadata.extra['encryption'] == 'AES256' assert created assert aiohttpretty.has_call(method='PUT', uri=url) assert aiohttpretty.has_call(method='HEAD', uri=metadata_url)
async def test_upload_file(self, mock_time, mock_provider, file_wb_path, meta_file_raw, meta_file_parsed, meta_file_upload_raw, file_stream_file): file_obj_name = utils.get_obj_name(file_wb_path, is_folder=False) signed_url_upload = mock_provider._build_and_sign_url('PUT', file_obj_name, **{}) resp_headers = utils.get_multi_dict_from_python_dict(dict(json.loads(meta_file_upload_raw))) aiohttpretty.register_uri( 'PUT', signed_url_upload, headers=resp_headers, status=HTTPStatus.OK ) signed_url_metadata = mock_provider._build_and_sign_url('HEAD', file_obj_name, **{}) resp_headers = utils.get_multi_dict_from_python_dict(dict(json.loads(meta_file_raw))) aiohttpretty.register_uri( 'HEAD', signed_url_metadata, headers=resp_headers, status=HTTPStatus.OK ) metadata_json = json.loads(meta_file_parsed) metadata_expected = GoogleCloudFileMetadata(metadata_json) metadata, _ = await mock_provider.upload(file_stream_file, file_wb_path) assert metadata == metadata_expected assert aiohttpretty.has_call(method='PUT', uri=signed_url_upload) assert aiohttpretty.has_call(method='HEAD', uri=signed_url_metadata)
def test_project_upload(self, project_provider, list_project_articles, base_article_metadata, article_metadata, upload_metadata, file_content, file_stream): article_id = str(list_project_articles[0]['id']) list_articles_url = project_provider.build_url('projects', project_provider.project_id, 'articles') article_metadata_url = project_provider.build_url('articles', article_id) article_upload_url = project_provider.build_url('articles', article_id, 'files') create_article_url = project_provider.build_url('articles') add_article_url = project_provider.build_url('projects', project_provider.project_id, 'articles') aiohttpretty.register_json_uri('GET', list_articles_url, body=list_project_articles) aiohttpretty.register_json_uri('GET', article_metadata_url, body=article_metadata) aiohttpretty.register_json_uri('PUT', article_upload_url, body=upload_metadata) aiohttpretty.register_json_uri('POST', create_article_url, body=base_article_metadata) aiohttpretty.register_json_uri('PUT', add_article_url) file_name = 'barricade.gif' path = yield from project_provider.validate_path('/' + file_name) result, created = yield from project_provider.upload(file_stream, path) expected = metadata.FigshareFileMetadata( upload_metadata, parent=base_article_metadata, child=True, ).serialized() assert aiohttpretty.has_call( method='POST', uri=create_article_url, data=json.dumps({ 'title': 'barricade.gif', 'defined_type': 'dataset', }) ) assert aiohttpretty.has_call(method='PUT', uri=article_upload_url) assert aiohttpretty.has_call( method='PUT', uri=add_article_url, data=json.dumps({'article_id': int(article_id)}) ) assert result == expected
def test_upload_create_nested(self, provider, file_stream): path = '/ed/sullivan/show.mp3' upload_id = '7' parts = path.split('/') urls, bodies = [], [] for idx, part in enumerate(parts[:-1]): query = provider._build_query(idx or provider.folder['id'], title=parts[idx + 1]) if part == 'sullivan': body = {'items': []} else: body = fixtures.generate_list(idx + 1) url = provider.build_url('files', q=query, alt='json') aiohttpretty.register_json_uri('GET', url, body=body) urls.append(url) bodies.append(body) item = fixtures.list_file['items'][0] start_upload_url = provider._build_upload_url('files', uploadType='resumable') finish_upload_url = provider._build_upload_url('files', uploadType='resumable', upload_id=upload_id) aiohttpretty.register_uri('POST', start_upload_url, headers={'LOCATION': 'http://waterbutler.io?upload_id={}'.format(upload_id)}) aiohttpretty.register_json_uri('PUT', finish_upload_url, body=item) result, created = yield from provider.upload(file_stream, path) assert aiohttpretty.has_call(method='POST', uri=start_upload_url) assert aiohttpretty.has_call(method='PUT', uri=finish_upload_url) assert created is True expected = GoogleDriveFileMetadata(item, '/ed/sullivan').serialized() assert result == expected
async def test_upload_create(self, provider, file_stream, native_file_metadata, empty_native_dataset_metadata, native_dataset_metadata): path = '/thefile.txt' url = provider.build_url(dvs.EDIT_MEDIA_BASE_URL, 'study', provider.doi) aiohttpretty.register_uri('POST', url, status=201) latest_url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id, 'latest'), key=provider.token) latest_published_url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id, 'latest-published'), key=provider.token) aiohttpretty.register_json_uri('GET', latest_published_url, body={'data': {'files': []}}) aiohttpretty.register_uri('GET', latest_url, responses=[ { 'status': 200, 'body': json.dumps(empty_native_dataset_metadata).encode('utf-8'), 'headers': {'Content-Type': 'application/json'}, }, { 'status': 200, 'body': json.dumps(native_dataset_metadata).encode('utf-8'), 'headers': {'Content-Type': 'application/json'}, }, ]) path = await provider.validate_path(path) metadata, created = await provider.upload(file_stream, path) entry = native_file_metadata['datafile'] expected = DataverseFileMetadata(entry, 'latest') assert created is True assert metadata == expected assert aiohttpretty.has_call(method='POST', uri=url) assert aiohttpretty.has_call(method='GET', uri=latest_url) assert aiohttpretty.has_call(method='GET', uri=latest_published_url)
async def test_upload_updates(self, provider, file_stream, native_file_metadata, native_dataset_metadata): path = '/20' url = provider.build_url(dvs.EDIT_MEDIA_BASE_URL, 'study', provider.doi) aiohttpretty.register_uri('POST', url, status=201) published_url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id, 'latest'), key=provider.token) aiohttpretty.register_json_uri('GET', published_url, status=200, body=native_dataset_metadata) delete_url = provider.build_url(dvs.EDIT_MEDIA_BASE_URL, 'file', '/20') # Old file id aiohttpretty.register_json_uri('DELETE', delete_url, status=204) latest_published_url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id, 'latest-published'), key=provider.token) aiohttpretty.register_json_uri('GET', latest_published_url, body={'data': {'files': []}}) path = await provider.validate_path(path) metadata, created = await provider.upload(file_stream, path) entry = native_file_metadata['datafile'] expected = DataverseFileMetadata(entry, 'latest') assert metadata == expected assert created is False assert aiohttpretty.has_call(method='POST', uri=url) assert aiohttpretty.has_call(method='GET', uri=published_url)
def test_folder_delete(self, provider, contents_and_self, mock_time): path = WaterButlerPath('/some-folder/') params = {'prefix': 'some-folder/'} query_url = provider.bucket.generate_url(100, 'GET') aiohttpretty.register_uri( 'GET', query_url, params=params, body=contents_and_self, status=200, ) query_params = {'delete': ''} (payload, headers) = bulk_delete_body( ['thisfolder/', 'thisfolder/item1', 'thisfolder/item2'] ) delete_url = provider.bucket.generate_url( 100, 'POST', query_parameters=query_params, headers=headers, ) aiohttpretty.register_uri('POST', delete_url, status=204) yield from provider.delete(path) assert aiohttpretty.has_call(method='GET', uri=query_url, params=params) assert aiohttpretty.has_call(method='POST', uri=delete_url)
async def test_article_file_contents(self, article_provider, folder_article_metadata, folder_file_metadata): root_parts = article_provider.root_path_parts article_id = str(folder_article_metadata['id']) article_name = folder_article_metadata['title'] file_id = str(folder_file_metadata['id']) file_name = folder_file_metadata['name'] folder_article_metadata_url = article_provider.build_url(False, *root_parts) file_metadata_url = article_provider.build_url(False, *root_parts, 'files', file_id) print("%%%%%%% HERH?: {}".format(file_metadata_url)) aiohttpretty.register_json_uri('GET', folder_article_metadata_url, body=folder_article_metadata) aiohttpretty.register_json_uri('GET', file_metadata_url, body=folder_file_metadata) path = await article_provider.validate_path('/{}'.format(file_id)) result = await article_provider.metadata(path) assert aiohttpretty.has_call(method='GET', uri=folder_article_metadata_url) assert aiohttpretty.has_call(method='GET', uri=file_metadata_url) expected = metadata.FigshareFileMetadata(folder_article_metadata, folder_file_metadata) assert result == expected assert str(result.id) == file_id assert result.name == file_name assert result.path == '/{}/{}'.format(article_id, file_id) assert result.materialized_path == '/{}/{}'.format(article_name, file_name) assert result.article_name == article_name assert result.size == folder_file_metadata['size'] assert result.is_public == (PRIVATE_IDENTIFIER not in folder_article_metadata['url'])
def test_provider_metadata_empty(monkeypatch, provider): aiohttpretty.register_json_uri('GET', 'https://waterbutler.io/metadata', status_code=200, body=[]) res = yield from provider.metadata(path='/unrelatedpath') assert res == [] aiohttpretty.has_call(method='GET', uri='https://waterbutler.io', parmas={'path': 'unrelatedpath'})
def test_provider_metadata_empty(monkeypatch, provider, mock_folder_path): url = 'https://waterbutler.io/{}/children/'.format(mock_folder_path.identifier) aiohttpretty.register_json_uri('GET', url, status_code=200, body=[]) res = yield from provider.metadata(mock_folder_path) assert res == [] aiohttpretty.has_call(method='GET', uri=url)
def test_delete_file(self, provider, file_metadata): path = DropboxPath(provider.folder, '/The past') url = provider.build_url('fileops', 'delete') data = {'root': 'auto', 'path': path.full_path} file_url = provider.build_url('metadata', 'auto', path.full_path) aiohttpretty.register_json_uri('GET', file_url, body=file_metadata) aiohttpretty.register_uri('POST', url, status=200) yield from provider.delete(str(path)) assert aiohttpretty.has_call(method='GET', uri=file_url) assert aiohttpretty.has_call(method='POST', uri=url, data=data)
def test_large_folder_delete(self, provider, mock_time): path = WaterButlerPath('/some-folder/') query_url = provider.bucket.generate_url(100, 'GET') keys_one = [str(x) for x in range(2500, 3500)] response_one = list_objects_response(keys_one, truncated=True) params_one = {'prefix': 'some-folder/'} keys_two = [str(x) for x in range(3500, 3601)] response_two = list_objects_response(keys_two) params_two = {'prefix': 'some-folder/', 'marker': '3499'} aiohttpretty.register_uri( 'GET', query_url, params=params_one, body=response_one, status=200, ) aiohttpretty.register_uri( 'GET', query_url, params=params_two, body=response_two, status=200, ) query_params = {'delete': None} (payload_one, headers_one) = bulk_delete_body(keys_one) delete_url_one = provider.bucket.generate_url( 100, 'POST', query_parameters=query_params, headers=headers_one, ) aiohttpretty.register_uri('POST', delete_url_one, status=204) (payload_two, headers_two) = bulk_delete_body(keys_two) delete_url_two = provider.bucket.generate_url( 100, 'POST', query_parameters=query_params, headers=headers_two, ) aiohttpretty.register_uri('POST', delete_url_two, status=204) yield from provider.delete(path) assert aiohttpretty.has_call(method='GET', uri=query_url, params=params_one) assert aiohttpretty.has_call(method='GET', uri=query_url, params=params_two) assert aiohttpretty.has_call(method='POST', uri=delete_url_one) assert aiohttpretty.has_call(method='POST', uri=delete_url_two)
def test_project_article_contents_not_in_project(self, project_provider, list_project_articles, article_metadata): list_articles_url = project_provider.build_url('projects', project_provider.project_id, 'articles') article_metadata_url = project_provider.build_url('articles', str(list_project_articles[0]['id'])) aiohttpretty.register_json_uri('GET', list_articles_url, body=[]) aiohttpretty.register_json_uri('GET', article_metadata_url, body=article_metadata) article_id = list_project_articles[0]['id'] path = yield from project_provider.validate_path('/{}/'.format(article_id)) with pytest.raises(exceptions.ProviderError) as exc: yield from project_provider.metadata(path) assert exc.value.code == 404 assert aiohttpretty.has_call(method='GET', uri=list_articles_url) assert not aiohttpretty.has_call(method='GET', uri=article_metadata_url)
def test_delete_folder(self, provider, folder_object_metadata): item = folder_object_metadata path = BoxPath('/{}/{}/'.format(item['id'], item['name'])) url = provider.build_url('folders', path._id) delete_url = url + '?recursive=True' aiohttpretty.register_json_uri('GET', url, body=item) aiohttpretty.register_uri('DELETE', delete_url, status=204) yield from provider.delete(str(path)) assert aiohttpretty.has_call(method='GET', uri=url) assert aiohttpretty.has_call(method='DELETE', uri=delete_url)
def test_delete(self, provider): path = '/birdie.jpg' item = fixtures.list_file['items'][0] query = provider._build_query(provider.folder['id'], title=path.lstrip('/')) list_file_url = provider.build_url('files', q=query, alt='json') delete_url = provider.build_url('files', item['id']) aiohttpretty.register_json_uri('GET', list_file_url, body=fixtures.list_file) aiohttpretty.register_uri('DELETE', delete_url, status=204) result = yield from provider.delete(path) assert aiohttpretty.has_call(method='GET', uri=list_file_url) assert aiohttpretty.has_call(method='DELETE', uri=delete_url) assert result is None
def test_upload_update(self, provider, file_content, file_stream, file_metadata): path = WaterButlerPath('/foobah') content_md5 = hashlib.md5(file_content).hexdigest() url = provider.bucket.new_key(path.path).generate_url(100, 'PUT') metadata_url = provider.bucket.new_key(path.path).generate_url(100, 'HEAD') aiohttpretty.register_uri('HEAD', metadata_url, headers=file_metadata) aiohttpretty.register_uri('PUT', url, status=201, headers={'ETag': '"{}"'.format(content_md5)}) metadata, created = yield from provider.upload(file_stream, path) assert metadata['kind'] == 'file' assert not created assert aiohttpretty.has_call(method='PUT', uri=url) assert aiohttpretty.has_call(method='HEAD', uri=metadata_url)
def test_get_revisions_free_account(self, provider, file_metadata): item = file_metadata['entries'][0] file_id = item['id'] path = BoxPath('/' + file_id) file_url = provider.build_url('files', item['id']) revisions_url = provider.build_url('files', file_id, 'versions') aiohttpretty.register_json_uri('GET', file_url, body=item) aiohttpretty.register_json_uri('GET', revisions_url, body={}, status=403) result = yield from provider.revisions(str(path)) expected = [BoxRevision(item).serialized()] assert result == expected assert aiohttpretty.has_call(method='GET', uri=file_url) assert aiohttpretty.has_call(method='GET', uri=revisions_url)
def test_get_revisions_free_account(self, provider, file_metadata): item = file_metadata['entries'][0] path = WaterButlerPath('/name.txt', _ids=(provider, item['id'])) file_url = provider.build_url('files', path.identifier) revisions_url = provider.build_url('files', path.identifier, 'versions') aiohttpretty.register_json_uri('GET', file_url, body=item) aiohttpretty.register_json_uri('GET', revisions_url, body={}, status=403) result = yield from provider.revisions(path) expected = [BoxRevision(item)] assert result == expected assert aiohttpretty.has_call(method='GET', uri=file_url) assert aiohttpretty.has_call(method='GET', uri=revisions_url)
async def test_version_metadata(self, provider, version_metadata, mock_time): path = WaterButlerPath('/my-image.jpg') url = provider.bucket.generate_url(100, 'GET', query_parameters={'versions': ''}) params = build_folder_params(path) aiohttpretty.register_uri('GET', url, params=params, status=200, body=version_metadata) data = await provider.revisions(path) assert isinstance(data, list) assert len(data) == 3 for item in data: assert hasattr(item, 'extra') assert hasattr(item, 'version') assert hasattr(item, 'version_identifier') assert aiohttpretty.has_call(method='GET', uri=url, params=params)
def test_download(monkeypatch, provider_and_mock, osf_response, mock_path): provider, inner_provider = provider_and_mock url = 'https://waterbutler.io/{}/download/?mode&version'.format( mock_path.identifier) aiohttpretty.register_json_uri('GET', url, body=osf_response) yield from provider.download(mock_path) assert provider.make_provider.called assert inner_provider.download.called assert aiohttpretty.has_call(method='GET', uri=url, check_params=False) provider.make_provider.assert_called_once_with(osf_response['settings']) inner_provider.download.assert_called_once_with( path=WaterButlerPath('/test/path'), displayName='unrelatedpath')
async def test_empty_folder_delete(self, provider, folder_empty_metadata, mock_time): path = WaterButlerPath('/empty-folder/') params = {'prefix': 'empty-folder/'} query_url = provider.bucket.generate_url(100, 'GET') aiohttpretty.register_uri( 'GET', query_url, params=params, body=folder_empty_metadata, status=200, ) with pytest.raises(exceptions.NotFoundError): await provider.delete(path) assert aiohttpretty.has_call(method='GET', uri=query_url, params=params)
def test_upload_create(self, provider, folder_object_metadata, folder_list_metadata, file_metadata, file_stream, settings): path = WaterButlerPath('/newfile', _ids=(provider.folder, None)) upload_url = provider._build_upload_url('files', 'content') folder_object_url = provider.build_url('folders', path.parent.identifier) folder_list_url = provider.build_url('folders', path.parent.identifier, 'items') aiohttpretty.register_json_uri('POST', upload_url, status=201, body=file_metadata) metadata, created = yield from provider.upload(file_stream, path) path.parts[-1]._id = file_metadata['entries'][0]['id'] expected = BoxFileMetadata(file_metadata['entries'][0], path).serialized() assert metadata == expected assert created is True assert aiohttpretty.has_call(method='POST', uri=upload_url)
async def test_download_file(self, mock_time, mock_provider, file_wb_path, file_raw): file_obj_name = utils.get_obj_name(file_wb_path, is_folder=False) signed_url = mock_provider._build_and_sign_url('GET', file_obj_name, **{}) aiohttpretty.register_uri('GET', signed_url, body=file_raw, status=HTTPStatus.OK) resp_stream_reader = await mock_provider.download(file_wb_path) file_content = await resp_stream_reader.read() assert aiohttpretty.has_call(method='GET', uri=signed_url) assert isinstance(resp_stream_reader, ResponseStreamReader) assert file_content == file_raw
async def test_upload_create(self, provider, root_provider_fixtures, file_stream): path = WaterButlerPath('/newfile', _ids=(provider.folder, None)) upload_url = provider._build_upload_url('files', 'content') upload_metadata = root_provider_fixtures['upload_metadata'] aiohttpretty.register_json_uri('POST', upload_url, status=201, body=upload_metadata) metadata, created = await provider.upload(file_stream, path) expected = BoxFileMetadata(upload_metadata['entries'][0], path).serialized() assert metadata.serialized() == expected assert created is True assert path.identifier_path == metadata.path assert aiohttpretty.has_call(method='POST', uri=upload_url)
async def test_metadata_nested(self, provider, root_provider_fixtures): item = root_provider_fixtures['file_metadata']['entries'][0] path = WaterButlerPath('/name.txt', _ids=(provider, item['id'])) file_url = provider.build_url('files', path.identifier) aiohttpretty.register_json_uri('GET', file_url, body=item) result = await provider.metadata(path) expected = BoxFileMetadata(item, path) assert result == expected assert aiohttpretty.has_call(method='GET', uri=file_url) assert result.extra == { 'etag': '3', 'hashes': { 'sha1': '134b65991ed521fcfe4724b7d814ab8ded5185dc', }, }
async def test_provider_metadata_folder(self, provider, folder_path, folder_children_metadata, mock_time): url, params = build_signed_url_without_auth(provider, 'GET', folder_path.identifier, 'children') aiohttpretty.register_json_uri('GET', url, params=params, status=200, body=folder_children_metadata) res = await provider.metadata(folder_path) assert isinstance(res, list) for item in res: assert isinstance(item, metadata.BaseMetadata) assert item.name is not None assert item.path is not None assert item.provider == 'osfstorage' assert aiohttpretty.has_call(method='GET', uri=url, params=params)
async def test_complete_chunked_upload_session(self, provider, root_provider_fixtures): commit_url = 'https://upload.box.com/api/2.0/files/upload_sessions/fake_session_id/commit' aiohttpretty.register_json_uri( 'POST', commit_url, status=201, body=root_provider_fixtures['upload_commit_metadata']) session_metadata = root_provider_fixtures['create_session_metadata'] entry = await provider._complete_chunked_upload_session( session_metadata, root_provider_fixtures['formated_parts'], 'fake_sha') assert root_provider_fixtures['upload_commit_metadata']['entries'][ 0] == entry assert aiohttpretty.has_call(method='POST', uri=commit_url)
async def test_download_file_with_display_name(self, mock_time, mock_provider, file_wb_path, display_name_arg, expected_name): file_obj_name = utils.get_obj_name(file_wb_path, is_folder=False) query = { 'response-content-disposition': ('attachment; filename="{}"; ' 'filename*=UTF-8\'\'{}').format(expected_name, expected_name) } signed_url = mock_provider._build_and_sign_url('GET', file_obj_name, **query) return_url = await mock_provider.download( file_wb_path, accept_url=True, display_name=display_name_arg) assert not aiohttpretty.has_call(method='GET', uri=signed_url) assert isinstance(return_url, str) assert signed_url == return_url
def test_upload(self, provider, file_metadata, file_stream, settings): path = yield from provider.validate_path('/phile') metadata_url = provider.build_url('metadata', 'auto', path.full_path) url = provider._build_content_url('files_put', 'auto', path.full_path) aiohttpretty.register_uri('GET', metadata_url, status=404) aiohttpretty.register_json_uri('PUT', url, status=200, body=file_metadata) metadata, created = yield from provider.upload(file_stream, path) expected = DropboxFileMetadata(file_metadata, provider.folder) assert created is True assert metadata == expected assert aiohttpretty.has_call(method='PUT', uri=url)
async def test_metadata_folder_nested(self, provider): path = GoogleDrivePath('/hugo/kim/pins/', _ids=[str(x) for x in range(4)]) body = fixtures.generate_list(3) item = body['items'][0] query = provider._build_query(path.identifier) url = provider.build_url('files', q=query, alt='json', maxResults=1000) aiohttpretty.register_json_uri('GET', url, body=body) result = await provider.metadata(path) expected = GoogleDriveFileMetadata(item, path.child(item['title'])) assert result == [expected] assert aiohttpretty.has_call(method='GET', uri=url)
async def test_download_range(self, provider): path = WaterButlerPath('/triangles.txt', prepend=provider.folder) url = provider._build_content_url('files', 'download') aiohttpretty.register_uri('POST', url, body=b'be', auto_length=True, status=206) result = await provider.download(path, range=(0, 1)) assert result.partial content = await result.response.read() assert content == b'be' assert aiohttpretty.has_call( method='POST', uri=url, headers={ 'Authorization': 'Bearer wrote harry potter', 'Range': 'bytes=0-1', 'Dropbox-API-Arg': '{"path": "/Photos/triangles.txt"}', 'Content-Type': '' } )
def test_folder_metadata(self, provider): path = GoogleDrivePath('/hugo/kim/pins/', _ids=[str(x) for x in range(4)]) body = fixtures.generate_list(3, **fixtures.folder_metadata) item = body['items'][0] query = provider._build_query(path.identifier) url = provider.build_url('files', q=query, alt='json') aiohttpretty.register_json_uri('GET', url, body=body) result = yield from provider.metadata(path) expected = GoogleDriveFolderMetadata( item, path.child(item['title'], folder=True)).serialized() assert result == [expected] assert aiohttpretty.has_call(method='GET', uri=url)
def test_upload_update(self, provider, folder_object_metadata, folder_list_metadata, file_metadata, file_stream, settings): item = folder_list_metadata['entries'][0] path = WaterButlerPath('/newfile', _ids=(provider.folder, item['id'])) upload_url = provider._build_upload_url('files', item['id'], 'content') aiohttpretty.register_json_uri('POST', upload_url, status=201, body=file_metadata) metadata, created = yield from provider.upload(file_stream, path) expected = BoxFileMetadata(file_metadata['entries'][0], path).serialized() assert metadata.serialized() == expected assert created is False assert aiohttpretty.has_call(method='POST', uri=upload_url)
def test_article_delete(self, article_provider, article_metadata, file_metadata): file_id = str(file_metadata['id']) article_id = article_provider.article_id article_metadata_url = article_provider.build_url( 'articles', article_id) article_delete_url = article_provider.build_url( 'articles', article_id, 'files', file_id) aiohttpretty.register_uri('DELETE', article_delete_url) aiohttpretty.register_json_uri('GET', article_metadata_url, body=article_metadata) path = yield from article_provider.validate_path('/{}'.format(file_id)) result = yield from article_provider.delete(path) assert result is None assert aiohttpretty.has_call(method='DELETE', uri=article_delete_url)
async def test_provider_metadata_empty(self, provider_one, folder_path, mock_time): url, params = build_signed_url_without_auth( provider_one, 'GET', folder_path.identifier, 'children', user_id=provider_one.auth['id']) aiohttpretty.register_json_uri('GET', url, params=params, status_code=200, body=[]) res = await provider_one.metadata(folder_path) assert res == [] assert aiohttpretty.has_call(method='GET', uri=url, params=params)
async def test_chunked_upload_upload_part(self, provider, file_stream, provider_fixtures): assert file_stream.size == 38 provider.CHUNK_SIZE = 4 session_id = provider_fixtures['session_metadata']['session_id'] upload_args = { 'close': False, 'cursor': {'session_id': session_id, 'offset': 20, } } upload_part_url = provider._build_content_url('files', 'upload_session', 'append_v2') aiohttpretty.register_json_uri('POST', upload_part_url, status=200) await provider._upload_part(file_stream, provider.CHUNK_SIZE, upload_args) assert aiohttpretty.has_call(method='POST', uri=upload_part_url) provider.CHUNK_SIZE = CHUNK_SIZE
async def test_provider_metadata(monkeypatch, provider, mock_folder_path, mock_time): items = [{ 'name': 'foo', 'path': '/foo', 'kind': 'file', 'version': 10, 'downloads': 1, 'md5': '1234', 'sha256': '2345', }, { 'name': 'bar', 'path': '/bar', 'kind': 'file', 'version': 10, 'downloads': 1, 'md5': '1234', 'sha256': '2345', }, { 'name': 'baz', 'path': '/baz', 'kind': 'folder' }] url, _, params = provider.build_signed_url( 'GET', provider.build_url(mock_folder_path.identifier, 'children')) aiohttpretty.register_json_uri('GET', url, params=params, status=200, body=items) res = await provider.metadata(mock_folder_path) assert isinstance(res, list) for item in res: assert isinstance(item, metadata.BaseMetadata) assert item.name is not None assert item.path is not None assert item.provider == 'osfstorage' assert aiohttpretty.has_call(method='GET', uri=url, params=params)
async def test_upload(self, provider, file_stream, file_metadata, file_metadata_object): path = WaterButlerPath('/phile', prepend=provider.folder) url = provider._webdav_url_ + path.full_path aiohttpretty.register_uri('PROPFIND', url, body=file_metadata, auto_length=True, status=207) aiohttpretty.register_uri('PUT', url, body=b'squares', auto_length=True, status=201) metadata, created = await provider.upload(file_stream, path) assert created is True assert metadata.name == file_metadata_object.name assert metadata.size == file_metadata_object.size assert aiohttpretty.has_call(method='PUT', uri=url)
async def test_download_file_ruby_response_range(self, provider): """See: https://gitlab.com/gitlab-org/gitlab-ce/issues/31790""" path = '/folder1/folder2/file' gl_path = GitLabPath(path, _ids=([(None, 'my-branch')] * 4)) url = ('http://base.url/api/v4/projects/123/repository/files/' 'folder1%2Ffolder2%2Ffile?ref=my-branch') aiohttpretty.register_uri('GET', url, body=fixtures.weird_ruby_response()) result = await provider.download(gl_path, range=(0, 1)) assert result.partial assert await result.read() == b'ro' assert aiohttpretty.has_call(method='GET', uri=url, headers={ 'Range': 'bytes=0-1', 'PRIVATE-TOKEN': 'naps' })
async def test_delete_root(self, provider, provider_fixtures): url = provider.build_url('files', 'list_folder') path = await provider.validate_path('/') data = {'path': path.full_path} aiohttpretty.register_json_uri( 'POST', url, data=data, body=provider_fixtures['folder_children'], status=HTTPStatus.OK ) path2 = await provider.validate_path('/photos/flower.jpg') url = provider.build_url('files', 'delete_v2') data = {'path': provider.folder.rstrip('/') + '/' + path2.path.rstrip('/')} aiohttpretty.register_json_uri('POST', url, data=data, status=HTTPStatus.OK) await provider.delete(path, 1) assert aiohttpretty.has_call(method='POST', uri=url)
async def test_download_without_auth(self, provider_and_mock, download_response, download_path, mock_time): provider, inner_provider = provider_and_mock provider.auth = {} url, params = build_signed_url_without_auth(provider, 'GET', download_path.identifier, 'download', version=None, mode=None) aiohttpretty.register_json_uri('GET', url, params=params, body=download_response) await provider.download(download_path) assert provider.make_provider.called assert inner_provider.download.called assert aiohttpretty.has_call(method='GET', uri=url, params=params) provider.make_provider.assert_called_once_with(download_response['settings']) expected_path = WaterButlerPath('/' + download_response['data']['path']) expected_display_name = download_response['data']['name'] inner_provider.download.assert_called_once_with(path=expected_path, displayName=expected_display_name)
async def test_upload_keep(self, provider, file_stream, file_metadata, file_metadata_object): path = WaterButlerPath('/phile', prepend=provider.folder) renamed_path = WaterButlerPath('/phile (1)', prepend=provider.folder) path._parts[-1]._id = 'fake_id' provider.handle_name_conflict = utils.MockCoroutine(return_value=(renamed_path, True)) url = provider._webdav_url_ + renamed_path.full_path aiohttpretty.register_uri('PROPFIND', url, body=file_metadata, auto_length=True, status=207) aiohttpretty.register_uri('PUT', provider._webdav_url_ + '/my_folder/phile (1)', body=b'squares', auto_length=True, status=201) metadata, created = await provider.upload(file_stream, path, 'keep') assert created is True assert metadata.name == file_metadata_object.name assert metadata.size == file_metadata_object.size assert metadata.size_as_int == int(file_metadata_object.size) assert aiohttpretty.has_call(method='PUT', uri=url)
async def test_complete_session(self, provider, file_stream, provider_fixtures): assert file_stream.size == 38 provider.CHUNK_SIZE = 4 path = WaterButlerPath('/foobah') session_id = provider_fixtures['session_metadata']['session_id'] complete_part_url = provider._build_content_url('files', 'upload_session', 'finish') aiohttpretty.register_json_uri( 'POST', complete_part_url, status=200, body=provider_fixtures.get('file_metadata', None) ) metadata = await provider._complete_session(file_stream, session_id, path) assert metadata == provider_fixtures['file_metadata'] assert aiohttpretty.has_call(method='POST', uri=complete_part_url) provider.CHUNK_SIZE = CHUNK_SIZE
def test_version_metadata(self, provider, version_metadata): path = WaterButlerPath('/my-image.jpg') url = provider.bucket.generate_url(100, 'GET', query_parameters={'versions': ''}) aiohttpretty.register_uri('GET', url, status=200, body=version_metadata) data = yield from provider.revisions(path) assert isinstance(data, list) assert len(data) == 3 for item in data: assert 'extra' in item assert 'version' in item assert 'versionIdentifier' in item assert aiohttpretty.has_call(method='GET', uri=url)
async def test_upload_update(self, provider, root_provider_fixtures, file_stream): upload_metadata = root_provider_fixtures['upload_metadata'] item_to_overwrite = root_provider_fixtures['folder_list_metadata'][ 'entries'][0] path = WaterButlerPath('/newfile', _ids=(provider.folder, item_to_overwrite['id'])) upload_url = provider._build_upload_url('files', item_to_overwrite['id'], 'content') aiohttpretty.register_json_uri('POST', upload_url, status=201, body=upload_metadata) metadata, created = await provider.upload(file_stream, path) expected = BoxFileMetadata(upload_metadata['entries'][0], path).serialized() assert metadata.serialized() == expected assert created is False assert aiohttpretty.has_call(method='POST', uri=upload_url)
async def test_upload_conflict_keep(self, provider, root_provider_fixtures, file_stream): upload_metadata = root_provider_fixtures['upload_metadata'] item = upload_metadata['entries'][0] path = WaterButlerPath('/newfile', _ids=(provider.folder, item['id'])) upload_url = provider._build_upload_url('files', 'content') aiohttpretty.register_json_uri('POST', upload_url, status=201, body=upload_metadata) metadata_url = provider.build_url('files', path.identifier) aiohttpretty.register_json_uri('GET', metadata_url, body=upload_metadata) list_url = provider.build_url( 'folders', item['path_collection']['entries'][1]['id'], 'items', fields='id,name,type', limit=1000) aiohttpretty.register_json_uri( 'GET', list_url, body=root_provider_fixtures['folder_list_metadata']) metadata, created = await provider.upload(file_stream, path, conflict='keep') expected = BoxFileMetadata(item, path).serialized() # since the metadata for the renamed conflict file isn't actually saved, this one is odd to # test. assert metadata.serialized() == expected assert created is True assert path.identifier_path == metadata.path assert aiohttpretty.has_call(method='POST', uri=upload_url)
async def test_download_range(self, provider): path = '/folder1/file.py' gl_path = GitLabPath(path, _ids=([('a1b2c3d4', 'master')] * 3)) url = ('http://base.url/api/v4/projects/123/repository/files' '/folder1%2Ffile.py?ref=a1b2c3d4') aiohttpretty.register_json_uri('GET', url, body={'content': 'aGVsbG8='}) result = await provider.download(gl_path, branch='master', range=(0, 1)) assert result.partial assert await result.read( ) == b'he' # body content after base64 decoding and slice assert aiohttpretty.has_call(method='GET', uri=url, headers={ 'Range': 'bytes=0-1', 'PRIVATE-TOKEN': 'naps' })
async def test_intra_move_folder_replace(self, provider, intra_fixtures, root_provider_fixtures): item = intra_fixtures['intra_folder_metadata'] list_metadata = root_provider_fixtures['folder_list_metadata'] src_path = WaterButlerPath('/name/', _ids=(provider, item['id'])) dest_path = WaterButlerPath('/charmander/name/', _ids=(provider, item['id'], item['id'])) file_url = provider.build_url('folders', src_path.identifier) delete_url = provider.build_url('folders', dest_path.identifier, recursive=True) list_url = provider.build_url( 'folders', item['id'], 'items', fields='id,name,size,modified_at,etag,total_count', offset=0, limit=1000) aiohttpretty.register_json_uri('PUT', file_url, body=item) aiohttpretty.register_uri('DELETE', delete_url, status=204) aiohttpretty.register_json_uri('GET', list_url, body=list_metadata) expected_folder = BoxFolderMetadata(item, dest_path) expected_folder._children = [] for child_item in list_metadata['entries']: child_path = dest_path.child( child_item['name'], folder=(child_item['type'] == 'folder')) serialized_child = provider._serialize_item(child_item, child_path) expected_folder._children.append(serialized_child) expected = (expected_folder, False) result = await provider.intra_move(provider, src_path, dest_path) assert result == expected assert aiohttpretty.has_call(method='DELETE', uri=delete_url)