async def test_upload_update(self, provider, file_stream): upload_id = '7' item = fixtures.list_file['items'][0] path = WaterButlerPath('/birdie.jpg', _ids=(provider.folder['id'], item['id'])) start_upload_url = provider._build_upload_url('files', path.identifier, uploadType='resumable') finish_upload_url = provider._build_upload_url('files', path.identifier, uploadType='resumable', upload_id=upload_id) aiohttpretty.register_json_uri('PUT', finish_upload_url, body=item) aiohttpretty.register_uri( 'PUT', start_upload_url, headers={ 'LOCATION': 'http://waterbutler.io?upload_id={}'.format(upload_id) }) result, created = await provider.upload(file_stream, path) assert aiohttpretty.has_call(method='PUT', uri=start_upload_url) assert aiohttpretty.has_call(method='PUT', uri=finish_upload_url) assert created is False expected = GoogleDriveFileMetadata(item, path) assert result == expected
async def test_download(self, provider, native_dataset_metadata): path = '/21' url = provider.build_url(dvs.DOWN_BASE_URL, path, key=provider.token) draft_url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id, 'latest'), key=provider.token) published_url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id, 'latest-published'), key=provider.token) aiohttpretty.register_uri('GET', url, body=b'better', auto_length=True) aiohttpretty.register_json_uri('GET', draft_url, status=200, body=native_dataset_metadata) aiohttpretty.register_json_uri('GET', published_url, status=200, body=native_dataset_metadata) path = await provider.validate_path(path) result = await provider.download(path) content = await result.read() assert content == b'better'
def test_download_by_path_revision(self, provider, repo_tree_metadata_root): ref = hashlib.sha1().hexdigest() file_sha = repo_tree_metadata_root['tree'][0]['sha'] path = yield from provider.validate_path('/file.txt', branch='other_branch') url = provider.build_repo_url('git', 'blobs', file_sha) tree_url = provider.build_repo_url('git', 'trees', ref, recursive=1) commit_url = provider.build_repo_url('commits', path=path.path.lstrip('/'), sha='Just a test') aiohttpretty.register_uri('GET', url, body=b'delicious') aiohttpretty.register_json_uri('GET', tree_url, body=repo_tree_metadata_root) aiohttpretty.register_json_uri('GET', commit_url, body=[{ 'commit': { 'tree': { 'sha': ref } } }]) result = yield from provider.download(path, revision='Just a test') content = yield from result.read() assert content == b'delicious'
async def test_download_drive_revision(self, provider): revision = 'oldest' body = b'we love you conrad' item = fixtures.list_file['items'][0] path = WaterButlerPath('/birdie.jpg', _ids=(provider.folder['id'], item['id'])) download_file_url = item['downloadUrl'] metadata_url = provider.build_url('files', path.identifier) revision_url = provider.build_url('files', item['id'], 'revisions', revision, alt='json') aiohttpretty.register_json_uri('GET', revision_url, body=item) aiohttpretty.register_json_uri('GET', metadata_url, body=item) aiohttpretty.register_uri('GET', download_file_url, body=body, auto_length=True) result = await provider.download(path, revision=revision) content = await result.read() assert content == body
async def test_folder_delete(self, provider, folder_and_contents, mock_time): path = WaterButlerPath('/some-folder/') params = {'prefix': 'some-folder/'} query_url = provider.bucket.generate_url(100, 'GET') aiohttpretty.register_uri( 'GET', query_url, params=params, body=folder_and_contents, status=200, ) query_params = {'delete': ''} (payload, headers) = bulk_delete_body( ['thisfolder/', 'thisfolder/item1', 'thisfolder/item2'] ) delete_url = provider.bucket.generate_url( 100, 'POST', query_parameters=query_params, headers=headers, ) aiohttpretty.register_uri('POST', delete_url, status=204) await provider.delete(path) assert aiohttpretty.has_call(method='GET', uri=query_url, params=params) assert aiohttpretty.has_call(method='POST', uri=delete_url)
def test_metadata_file_missing(self, provider): path = WaterButlerPath('/notfound.txt') url = provider.bucket.new_key(path.path).generate_url(100, 'HEAD') aiohttpretty.register_uri('HEAD', url, status=404) with pytest.raises(exceptions.MetadataError): yield from provider.metadata(path)
async def test_upload_file(self, mock_time, mock_provider, file_wb_path, meta_file_raw, meta_file_parsed, meta_file_upload_raw, file_stream_file): file_obj_name = utils.get_obj_name(file_wb_path, is_folder=False) signed_url_upload = mock_provider._build_and_sign_url( 'PUT', file_obj_name, **{}) resp_headers = utils.get_multi_dict_from_python_dict( dict(json.loads(meta_file_upload_raw))) aiohttpretty.register_uri('PUT', signed_url_upload, headers=resp_headers, status=HTTPStatus.OK) signed_url_metadata = mock_provider._build_and_sign_url( 'HEAD', file_obj_name, **{}) resp_headers = utils.get_multi_dict_from_python_dict( dict(json.loads(meta_file_raw))) aiohttpretty.register_uri('HEAD', signed_url_metadata, headers=resp_headers, status=HTTPStatus.OK) metadata_json = json.loads(meta_file_parsed) metadata_expected = GoogleCloudFileMetadata(metadata_json) metadata, _ = await mock_provider.upload(file_stream_file, file_wb_path) assert metadata == metadata_expected assert aiohttpretty.has_call(method='PUT', uri=signed_url_upload) assert aiohttpretty.has_call(method='HEAD', uri=signed_url_metadata)
async def test_download(self, provider, native_dataset_metadata): path = '/21' url = provider.build_url(dvs.DOWN_BASE_URL, path, key=provider.token) draft_url = provider.build_url(dvs.JSON_BASE_URL.format( provider._id, 'latest'), key=provider.token) published_url = provider.build_url(dvs.JSON_BASE_URL.format( provider._id, 'latest-published'), key=provider.token) aiohttpretty.register_uri('GET', url, body=b'better', auto_length=True) aiohttpretty.register_json_uri('GET', draft_url, status=200, body=native_dataset_metadata) aiohttpretty.register_json_uri('GET', published_url, status=200, body=native_dataset_metadata) path = await provider.validate_path(path) result = await provider.download(path) content = await result.read() assert content == b'better'
async def test_intra_move_folder_replace(self, provider, intra_fixtures, root_provider_fixtures): item = intra_fixtures['intra_folder_metadata'] list_metadata = root_provider_fixtures['folder_list_metadata'] src_path = WaterButlerPath('/name/', _ids=(provider, item['id'])) dest_path = WaterButlerPath('/charmander/name/', _ids=(provider, item['id'], item['id'])) file_url = provider.build_url('folders', src_path.identifier) delete_url = provider.build_url('folders', dest_path.identifier, recursive=True) list_url = provider.build_url('folders', item['id'], 'items', fields='id,name,size,modified_at,etag,total_count', offset=0, limit=1000) aiohttpretty.register_json_uri('PUT', file_url, body=item) aiohttpretty.register_uri('DELETE', delete_url, status=204) aiohttpretty.register_json_uri('GET', list_url, body=list_metadata) expected_folder = BoxFolderMetadata(item, dest_path) expected_folder._children = [] for child_item in list_metadata['entries']: child_path = dest_path.child(child_item['name'], folder=(child_item['type'] == 'folder')) serialized_child = provider._serialize_item(child_item, child_path) expected_folder._children.append(serialized_child) expected = (expected_folder, False) result = await provider.intra_move(provider, src_path, dest_path) assert result == expected assert aiohttpretty.has_call(method='DELETE', uri=delete_url)
async def test_upload_keep(self, provider, file_stream, file_metadata, file_metadata_object): path = WaterButlerPath('/phile', prepend=provider.folder) renamed_path = WaterButlerPath('/phile (1)', prepend=provider.folder) path._parts[-1]._id = 'fake_id' provider.handle_name_conflict = utils.MockCoroutine( return_value=(renamed_path, True)) url = provider._webdav_url_ + renamed_path.full_path aiohttpretty.register_uri('PROPFIND', url, body=file_metadata, auto_length=True, status=207) aiohttpretty.register_uri('PUT', provider._webdav_url_ + '/my_folder/phile (1)', body=b'squares', auto_length=True, status=201) metadata, created = await provider.upload(file_stream, path, 'keep') assert created is True assert metadata.name == file_metadata_object.name assert metadata.size == file_metadata_object.size assert aiohttpretty.has_call(method='PUT', uri=url)
def test_upload_create(self, provider, file_stream, native_file_metadata, empty_native_dataset_metadata, native_dataset_metadata): path = '/thefile.txt' url = provider.build_url(dvs.EDIT_MEDIA_BASE_URL, 'study', provider.doi) aiohttpretty.register_uri('POST', url, status=201) latest_url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id, 'latest'), key=provider.token) latest_published_url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id, 'latest-published'), key=provider.token) aiohttpretty.register_json_uri('GET', latest_published_url, body={'data': {'files': []}}) aiohttpretty.register_uri('GET', latest_url, responses=[ { 'status': 200, 'body': json.dumps(empty_native_dataset_metadata).encode('utf-8'), 'headers': {'Content-Type': 'application/json'}, }, { 'status': 200, 'body': json.dumps(native_dataset_metadata).encode('utf-8'), 'headers': {'Content-Type': 'application/json'}, }, ]) path = yield from provider.validate_path(path) metadata, created = yield from provider.upload(file_stream, path) entry = native_file_metadata['datafile'] expected = DataverseFileMetadata(entry, 'latest') assert created is True assert metadata == expected assert aiohttpretty.has_call(method='POST', uri=url) assert aiohttpretty.has_call(method='GET', uri=latest_url) assert aiohttpretty.has_call(method='GET', uri=latest_published_url)
async def test_create_folder_naming_conflict(self, provider, folder_contents_metadata): path = WaterButlerPath('/pumpkin/', prepend=provider.folder) folder_url = provider._webdav_url_ + path.full_path aiohttpretty.register_uri('MKCOL', folder_url, status=405) with pytest.raises(exceptions.FolderNamingConflict): await provider.create_folder(path)
async def test_upload_updates(self, provider, file_stream, native_file_metadata, native_dataset_metadata): path = '/20' url = provider.build_url(dvs.EDIT_MEDIA_BASE_URL, 'study', provider.doi) aiohttpretty.register_uri('POST', url, status=201) published_url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id, 'latest'), key=provider.token) aiohttpretty.register_json_uri('GET', published_url, status=200, body=native_dataset_metadata) delete_url = provider.build_url(dvs.EDIT_MEDIA_BASE_URL, 'file', '/20') # Old file id aiohttpretty.register_json_uri('DELETE', delete_url, status=204) latest_published_url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id, 'latest-published'), key=provider.token) aiohttpretty.register_json_uri('GET', latest_published_url, body={'data': {'files': []}}) path = await provider.validate_path(path) metadata, created = await provider.upload(file_stream, path) entry = native_file_metadata['datafile'] expected = DataverseFileMetadata(entry, 'latest') assert metadata == expected assert created is False assert aiohttpretty.has_call(method='POST', uri=url) assert aiohttpretty.has_call(method='GET', uri=published_url)
async def test_project_article_download(self, project_provider, list_project_articles, article_metadata, file_metadata): body = b'castle on a cloud' file_id = file_metadata['id'] article_id = str(list_project_articles[0]['id']) download_url = file_metadata['download_url'] article_metadata_url = project_provider.build_url( 'articles', article_id) list_articles_url = project_provider.build_url( 'projects', project_provider.project_id, 'articles') aiohttpretty.register_uri('GET', download_url, body=body, auto_length=True) aiohttpretty.register_json_uri('GET', list_articles_url, body=list_project_articles) aiohttpretty.register_json_uri('GET', article_metadata_url, body=article_metadata) path = await project_provider.validate_path('/{}/{}'.format( article_id, file_id)) result = await project_provider.download(path) content = await result.read() assert content == body
async def test_upload_file_checksum_mismatch(self, mock_time, mock_provider, file_wb_path, meta_file_raw, meta_file_upload_raw, file_stream_file): file_obj_name = utils.get_obj_name(file_wb_path, is_folder=False) signed_url_upload = mock_provider._build_and_sign_url( 'PUT', file_obj_name, **{}) # There is no need to use `MultiDict` since the hashes are not used resp_headers_dict = dict(json.loads(meta_file_upload_raw)) resp_headers_dict.update( {'etag': '"9e780e1c4ee28c44642160b349b3aab0"'}) resp_headers = utils.get_multi_dict_from_python_dict(resp_headers_dict) aiohttpretty.register_uri('PUT', signed_url_upload, headers=resp_headers, status=HTTPStatus.OK) signed_url_metadata = mock_provider._build_and_sign_url( 'HEAD', file_obj_name, **{}) # There is no need to use `MultiDict` since the hashes are not used resp_headers = utils.get_multi_dict_from_python_dict( dict(json.loads(meta_file_raw))) aiohttpretty.register_uri('HEAD', signed_url_metadata, headers=resp_headers, status=HTTPStatus.OK) with pytest.raises(exceptions.UploadChecksumMismatchError) as exc: await mock_provider.upload(file_stream_file, file_wb_path) assert exc.value.code == HTTPStatus.INTERNAL_SERVER_ERROR assert aiohttpretty.has_call(method='HEAD', uri=signed_url_metadata) assert aiohttpretty.has_call(method='PUT', uri=signed_url_upload)
async def test_download_standard_file_range(self, provider, download_fixtures): file_id = download_fixtures['file_id'] path = OneDrivePath('/toes.txt', _ids=[download_fixtures['root_id'], file_id]) metadata_response = download_fixtures['file_metadata'] metadata_url = provider._build_drive_url('items', file_id) aiohttpretty.register_json_uri('GET', metadata_url, body=metadata_response) download_url = download_fixtures['file_download_url'] aiohttpretty.register_uri('GET', download_url, status=206, body=download_fixtures['file_content'][0:2]) response = await provider.download(path, range=(0, 1)) assert response.partial content = await response.read() assert content == b'te' assert aiohttpretty.has_call(method='GET', uri=download_url, headers={ 'Range': 'bytes=0-1', 'Authorization': 'bearer wrote harry potter', 'accept-encoding': '' })
def test_delete(monkeypatch, provider, mock_path): path = WaterButlerPath('/unrelatedpath', _ids=('Doesntmatter', 'another')) aiohttpretty.register_uri('DELETE', 'https://waterbutler.io/another/', status_code=200) yield from provider.delete(path) assert aiohttpretty.has_call(method='DELETE', uri='https://waterbutler.io/another/', check_params=False)
async def test_delete_comfirm_delete(self, provider, folder_and_contents, mock_time): path = WaterButlerPath('/') query_url = provider.bucket.generate_url(100, 'GET') aiohttpretty.register_uri( 'GET', query_url, params={'prefix': ''}, body=folder_and_contents, status=200, ) (payload, headers) = bulk_delete_body( ['thisfolder/', 'thisfolder/item1', 'thisfolder/item2'] ) delete_url = provider.bucket.generate_url( 100, 'POST', query_parameters={'delete': ''}, headers=headers, ) aiohttpretty.register_uri('POST', delete_url, status=204) with pytest.raises(exceptions.DeleteError): await provider.delete(path) await provider.delete(path, confirm_delete=1) assert aiohttpretty.has_call(method='POST', uri=delete_url)
async def test_download_not_found(self, provider, mock_time): path = WaterButlerPath('/muhtriangle') url = provider.bucket.new_key(path.path).generate_url(100, response_headers={'response-content-disposition': 'attachment'}) aiohttpretty.register_uri('GET', url, status=404) with pytest.raises(exceptions.DownloadError): await provider.download(path)
def test_download_by_path_ref_branch(self, provider, repo_tree_metadata_root): ref = hashlib.sha1().hexdigest() file_sha = repo_tree_metadata_root['tree'][0]['sha'] path = yield from provider.validate_path('/file.txt', branch='other_branch') url = provider.build_repo_url('git', 'blobs', file_sha) tree_url = provider.build_repo_url('git', 'trees', ref, recursive=1) latest_sha_url = provider.build_repo_url('git', 'refs', 'heads', path.identifier[0]) aiohttpretty.register_uri('GET', url, body=b'delicious') aiohttpretty.register_json_uri('GET', tree_url, body=repo_tree_metadata_root) aiohttpretty.register_json_uri('GET', latest_sha_url, body={'object': { 'sha': ref }}) result = yield from provider.download(path) content = yield from result.read() assert content == b'delicious'
async def test_validate_v1_path_file(self, provider, file_header_metadata, mock_time): file_path = 'foobah' params = {'prefix': '/' + file_path + '/', 'delimiter': '/'} good_metadata_url = provider.bucket.new_key('/' + file_path).generate_url( 100, 'HEAD') bad_metadata_url = provider.bucket.generate_url(100) aiohttpretty.register_uri('HEAD', good_metadata_url, headers=file_header_metadata) aiohttpretty.register_uri('GET', bad_metadata_url, params=params, status=404) assert WaterButlerPath('/') == await provider.validate_v1_path('/') try: wb_path_v1 = await provider.validate_v1_path('/' + file_path) except Exception as exc: pytest.fail(str(exc)) with pytest.raises(exceptions.NotFoundError) as exc: await provider.validate_v1_path('/' + file_path + '/') assert exc.value.code == client.NOT_FOUND wb_path_v0 = await provider.validate_path('/' + file_path) assert wb_path_v1 == wb_path_v0
def test_upload(self, provider, file_content, file_stream, file_metadata): path = WaterButlerPath('/foobah') content_md5 = hashlib.md5(file_content).hexdigest() url = provider.bucket.new_key(path.path).generate_url(100, 'PUT') metadata_url = provider.bucket.new_key(path.path).generate_url( 100, 'HEAD') aiohttpretty.register_uri( 'HEAD', metadata_url, responses=[ { 'status': 404 }, { 'headers': file_metadata }, ], ) aiohttpretty.register_uri('PUT', url, status=200, headers={'ETag': '"{}"'.format(content_md5)}), metadata, created = yield from provider.upload(file_stream, path) assert metadata['kind'] == 'file' assert created assert aiohttpretty.has_call(method='PUT', uri=url) assert aiohttpretty.has_call(method='HEAD', uri=metadata_url)
async def test_upload_checksum_mismatch(self, provider, file_stream, file_header_metadata, mock_time): path = WaterButlerPath('/foobah') url = provider.bucket.new_key(path.path).generate_url(100, 'PUT') metadata_url = provider.bucket.new_key(path.path).generate_url( 100, 'HEAD') aiohttpretty.register_uri( 'HEAD', metadata_url, responses=[ { 'status': 404 }, { 'headers': file_header_metadata }, ], ) aiohttpretty.register_uri('PUT', url, status=200, headers={'ETag': '"bad hash"'}) with pytest.raises(exceptions.UploadChecksumMismatchError): await provider.upload(file_stream, path) assert aiohttpretty.has_call(method='PUT', uri=url) assert aiohttpretty.has_call(method='HEAD', uri=metadata_url)
async def test_upload_create(self, provider, file_stream, native_file_metadata, empty_native_dataset_metadata, native_dataset_metadata): path = '/thefile.txt' url = provider.build_url(dvs.EDIT_MEDIA_BASE_URL, 'study', provider.doi) aiohttpretty.register_uri('POST', url, status=201) latest_url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id, 'latest'), key=provider.token) latest_published_url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id, 'latest-published'), key=provider.token) aiohttpretty.register_json_uri('GET', latest_published_url, body={'data': {'files': []}}) aiohttpretty.register_uri('GET', latest_url, responses=[ { 'status': 200, 'body': json.dumps(empty_native_dataset_metadata).encode('utf-8'), 'headers': {'Content-Type': 'application/json'}, }, { 'status': 200, 'body': json.dumps(native_dataset_metadata).encode('utf-8'), 'headers': {'Content-Type': 'application/json'}, }, ]) path = await provider.validate_path(path) metadata, created = await provider.upload(file_stream, path) entry = native_file_metadata['datafile'] expected = DataverseFileMetadata(entry, 'latest') assert created is True assert metadata == expected assert aiohttpretty.has_call(method='POST', uri=url) assert aiohttpretty.has_call(method='GET', uri=latest_url) assert aiohttpretty.has_call(method='GET', uri=latest_published_url)
async def test_intra_copy(self, provider, file_header_metadata, mock_time): source_path = WaterButlerPath('/source') dest_path = WaterButlerPath('/dest') metadata_url = provider.bucket.new_key(dest_path.path).generate_url( 100, 'HEAD') aiohttpretty.register_uri('HEAD', metadata_url, headers=file_header_metadata) header_path = '/' + os.path.join(provider.settings['bucket'], source_path.path) headers = {'x-amz-copy-source': parse.quote(header_path)} url = provider.bucket.new_key(dest_path.path).generate_url( 100, 'PUT', headers=headers) aiohttpretty.register_uri('PUT', url, status=200) metadata, exists = await provider.intra_copy(provider, source_path, dest_path) assert provider._check_region.called assert metadata.kind == 'file' assert not exists assert aiohttpretty.has_call(method='HEAD', uri=metadata_url) assert aiohttpretty.has_call(method='PUT', uri=url, headers=headers)
async def test_download_docs(self, provider): body = b'we love you conrad' item = fixtures.docs_file_metadata path = WaterButlerPath('/birdie.jpg', _ids=(provider.folder['id'], item['id'])) metadata_url = provider.build_url('files', path.identifier) revisions_url = provider.build_url('files', item['id'], 'revisions') download_file_url = item['exportLinks'][ 'application/vnd.openxmlformats-officedocument.wordprocessingml.document'] aiohttpretty.register_json_uri('GET', metadata_url, body=item) aiohttpretty.register_uri('GET', download_file_url, body=body, auto_length=True) aiohttpretty.register_json_uri('GET', revisions_url, body={'items': [{ 'id': 'foo' }]}) result = await provider.download(path) content = await result.read() assert content == body
async def test_single_version_metadata(self, provider, single_version_metadata, mock_time): path = WaterButlerPath('/single-version.file') url = provider.bucket.generate_url(100, 'GET', query_parameters={'versions': ''}) params = build_folder_params(path) aiohttpretty.register_uri('GET', url, params=params, status=200, body=single_version_metadata) data = await provider.revisions(path) assert isinstance(data, list) assert len(data) == 1 for item in data: assert hasattr(item, 'extra') assert hasattr(item, 'version') assert hasattr(item, 'version_identifier') assert aiohttpretty.has_call(method='GET', uri=url, params=params)
async def test_upload_create_nested(self, provider, file_stream): upload_id = '7' item = fixtures.list_file['items'][0] path = WaterButlerPath('/ed/sullivan/show.mp3', _ids=[str(x) for x in range(3)]) start_upload_url = provider._build_upload_url('files', uploadType='resumable') finish_upload_url = provider._build_upload_url('files', uploadType='resumable', upload_id=upload_id) aiohttpretty.register_uri( 'POST', start_upload_url, headers={ 'LOCATION': 'http://waterbutler.io?upload_id={}'.format(upload_id) }) aiohttpretty.register_json_uri('PUT', finish_upload_url, body=item) result, created = await provider.upload(file_stream, path) assert aiohttpretty.has_call(method='POST', uri=start_upload_url) assert aiohttpretty.has_call(method='PUT', uri=finish_upload_url) assert created is True expected = GoogleDriveFileMetadata(item, path) assert result == expected
def test_download_not_found(self, provider): path = DropboxPath(provider.folder, '/vectors.txt') url = provider._build_content_url('files', 'auto', path.full_path) aiohttpretty.register_uri('GET', url, status=404) with pytest.raises(exceptions.DownloadError): yield from provider.download(str(path))
async def test_validate_v1_path_file(self, provider, root_provider_fixtures): file_id = '5000948880' good_url = provider.build_url('files', file_id, fields='id,name,path_collection') bad_url = provider.build_url('folders', file_id, fields='id,name,path_collection') aiohttpretty.register_json_uri( 'get', good_url, body=root_provider_fixtures['file_metadata']['entries'][0], status=200) aiohttpretty.register_uri('get', bad_url, status=404) try: wb_path_v1 = await provider.validate_v1_path('/' + file_id) except Exception as exc: pytest.fail(str(exc)) with pytest.raises(exceptions.NotFoundError) as exc: await provider.validate_v1_path('/' + file_id + '/') assert exc.value.code == HTTPStatus.NOT_FOUND wb_path_v0 = await provider.validate_path('/' + file_id) assert wb_path_v1 == wb_path_v0
async def test_validate_v1_path_folder(self, provider, folder_metadata, mock_time): folder_path = 'Photos' params = {'prefix': '/' + folder_path + '/', 'delimiter': '/'} good_metadata_url = provider.bucket.generate_url(100) bad_metadata_url = provider.bucket.new_key('/' + folder_path).generate_url(100, 'HEAD') aiohttpretty.register_uri( 'GET', good_metadata_url, params=params, body=folder_metadata, headers={'Content-Type': 'application/xml'} ) aiohttpretty.register_uri('HEAD', bad_metadata_url, status=404) try: wb_path_v1 = await provider.validate_v1_path('/' + folder_path + '/') except Exception as exc: pytest.fail(str(exc)) with pytest.raises(exceptions.NotFoundError) as exc: await provider.validate_v1_path('/' + folder_path) assert exc.value.code == client.NOT_FOUND wb_path_v0 = await provider.validate_path('/' + folder_path + '/') assert wb_path_v1 == wb_path_v0
async def test_project_article_delete(self, project_provider, list_project_articles, article_metadata, file_metadata): file_id = str(file_metadata['id']) article_id = str(list_project_articles[0]['id']) article_metadata_url = project_provider.build_url( 'articles', article_id) article_delete_url = project_provider.build_url( 'articles', article_id, 'files', file_id) list_articles_url = project_provider.build_url( 'projects', project_provider.project_id, 'articles') aiohttpretty.register_json_uri('GET', list_articles_url, body=list_project_articles) aiohttpretty.register_json_uri('GET', article_metadata_url, body=article_metadata) aiohttpretty.register_uri('DELETE', article_delete_url) path = await project_provider.validate_path('/{}/{}'.format( article_id, file_id)) result = await project_provider.delete(path) assert result is None assert aiohttpretty.has_call(method='DELETE', uri=article_delete_url)
async def test_upload_encrypted(self, provider, file_content, file_stream, file_header_metadata, mock_time): # Set trigger for encrypt_key=True in s3.provider.upload provider.encrypt_uploads = True path = WaterButlerPath('/foobah') content_md5 = hashlib.md5(file_content).hexdigest() url = provider.bucket.new_key(path.path).generate_url(100, 'PUT', encrypt_key=True) metadata_url = provider.bucket.new_key(path.path).generate_url(100, 'HEAD') aiohttpretty.register_uri( 'HEAD', metadata_url, responses=[ {'status': 404}, {'headers': file_header_metadata}, ], ) headers={'ETag': '"{}"'.format(content_md5)} aiohttpretty.register_uri('PUT', url, status=200, headers=headers) metadata, created = await provider.upload(file_stream, path) assert metadata.kind == 'file' assert metadata.extra['encryption'] == 'AES256' assert created assert aiohttpretty.has_call(method='PUT', uri=url) assert aiohttpretty.has_call(method='HEAD', uri=metadata_url)
async def test_metadata_missing(self, provider): path = WaterButlerPath('/pfile', prepend=provider.folder) url = provider.build_url('metadata', 'auto', path.full_path) aiohttpretty.register_uri('GET', url, status=404) with pytest.raises(exceptions.MetadataError): await provider.metadata(path)
async def test_download_range(self, provider, root_provider_fixtures): item = root_provider_fixtures['file_metadata']['entries'][0] path = WaterButlerPath('/triangles.txt', _ids=(provider.folder, item['id'])) metadata_url = provider.build_url('files', item['id']) content_url = provider.build_url('files', item['id'], 'content') aiohttpretty.register_json_uri('GET', metadata_url, body=item) aiohttpretty.register_uri('GET', content_url, body=b'be', auto_length=True, status=206) result = await provider.download(path, range=(0, 1)) assert result.partial content = await result.read() assert content == b'be' assert aiohttpretty.has_call(method='GET', uri=content_url, headers={ 'Authorization': 'Bearer wrote harry potter', 'Accept-Encoding': '', 'Range': 'bytes=0-1' })
async def test_register_uri(self): url = 'http://example.com/' desired_response = b'example data' aiohttpretty.register_uri('GET', url, body=desired_response) options = aiohttpretty.registry[('GET', 'http://example.com/')] assert options == {'body': b'example data'}
def test_upload_encrypted(self, provider, file_content, file_stream, file_metadata, mock_time): # Set trigger for encrypt_key=True in s3.provider.upload provider.encrypt_uploads = True path = WaterButlerPath('/foobah') content_md5 = hashlib.md5(file_content).hexdigest() url = provider.bucket.new_key(path.path).generate_url(100, 'PUT', encrypt_key=True) metadata_url = provider.bucket.new_key(path.path).generate_url(100, 'HEAD') aiohttpretty.register_uri( 'HEAD', metadata_url, responses=[ {'status': 404}, {'headers': file_metadata}, ], ) aiohttpretty.register_uri('PUT', url, status=200, headers={'ETag': '"{}"'.format(content_md5)}) metadata, created = yield from provider.upload(file_stream, path) assert metadata.kind == 'file' assert metadata.extra['encryption'] == 'AES256' assert created assert aiohttpretty.has_call(method='PUT', uri=url) assert aiohttpretty.has_call(method='HEAD', uri=metadata_url)
def mock_temp_key(endpoint, temp_url_key): aiohttpretty.register_uri( 'HEAD', endpoint, status=204, headers={'X-Account-Meta-Temp-URL-Key': temp_url_key}, )
def test_download_by_path(self, provider): path = GitHubPath('/my.file') url = provider.build_repo_url('contents', path.path) aiohttpretty.register_uri('GET', url, body=b'delicious') result = yield from provider.download(str(path)) content = yield from result.response.read() assert content == b'delicious'
def test_download_by_file_sha(self, provider): ref = hashlib.sha1().hexdigest() url = provider.build_repo_url('git', 'blobs', ref) aiohttpretty.register_uri('GET', url, body=b'delicious') result = yield from provider.download('', fileSha=ref) content = yield from result.response.read() assert content == b'delicious'
def test_download_not_found(self, provider): path = WaterButlerPath('/muhtriangle') url = provider.bucket.new_key(path.path).generate_url(100, response_headers={'response-content-disposition': 'attachment'}) aiohttpretty.register_uri('GET', url, status=404) with pytest.raises(exceptions.DownloadError): yield from provider.download(path)
def test_delete(self, connected_provider): path = CloudFilesPath('/delete.file') url = connected_provider.build_url(path.path) aiohttpretty.register_uri('DELETE', url, status=204) yield from connected_provider.delete(str(path)) assert aiohttpretty.has_call(method='DELETE', uri=url)
def test_metadata_missing(self, provider): path = BoxPath('/' + provider.folder + '/pfile') url = provider.build_url('files', path._id) aiohttpretty.register_uri('GET', url, status=404) with pytest.raises(exceptions.MetadataError): yield from provider.metadata(str(path))
async def test_delete_file(self, connected_provider): path = WaterButlerPath('/delete.file') url = connected_provider.build_url(path.path) aiohttpretty.register_uri('DELETE', url, status=204) await connected_provider.delete(path) assert aiohttpretty.has_call(method='DELETE', uri=url)
def test_metadata_missing(self, provider): path = WaterButlerPath('/pfile', prepend=provider.folder) url = provider.build_url('metadata', 'auto', path.full_path) aiohttpretty.register_uri('GET', url, status=404) with pytest.raises(exceptions.MetadataError): yield from provider.metadata(path)
async def test_upload_file_checksum_mismatch(self, mock_time, mock_provider, file_wb_path, meta_file_raw, meta_file_upload_raw, file_stream_file): file_obj_name = utils.get_obj_name(file_wb_path, is_folder=False) signed_url_upload = mock_provider._build_and_sign_url('PUT', file_obj_name, **{}) # There is no need to use `MultiDict` since the hashes are not used resp_headers_dict = dict(json.loads(meta_file_upload_raw)) resp_headers_dict.update({'etag': '"9e780e1c4ee28c44642160b349b3aab0"'}) resp_headers = utils.get_multi_dict_from_python_dict(resp_headers_dict) aiohttpretty.register_uri( 'PUT', signed_url_upload, headers=resp_headers, status=HTTPStatus.OK ) signed_url_metadata = mock_provider._build_and_sign_url('HEAD', file_obj_name, **{}) # There is no need to use `MultiDict` since the hashes are not used resp_headers = utils.get_multi_dict_from_python_dict(dict(json.loads(meta_file_raw))) aiohttpretty.register_uri( 'HEAD', signed_url_metadata, headers=resp_headers, status=HTTPStatus.OK ) with pytest.raises(exceptions.UploadChecksumMismatchError) as exc: await mock_provider.upload(file_stream_file, file_wb_path) assert exc.value.code == HTTPStatus.INTERNAL_SERVER_ERROR assert aiohttpretty.has_call(method='HEAD', uri=signed_url_metadata) assert aiohttpretty.has_call(method='PUT', uri=signed_url_upload)
def test_validate_v1_path_folder(self, provider, folder_metadata, mock_time): folder_path = 'Photos' params = {'prefix': '/' + folder_path + '/', 'delimiter': '/'} good_metadata_url = provider.bucket.generate_url(100) bad_metadata_url = provider.bucket.new_key('/' + folder_path).generate_url(100, 'HEAD') aiohttpretty.register_uri( 'GET', good_metadata_url, params=params, body=folder_metadata, headers={'Content-Type': 'application/xml'} ) aiohttpretty.register_uri('HEAD', bad_metadata_url, status=404) try: wb_path_v1 = yield from provider.validate_v1_path('/' + folder_path + '/') except Exception as exc: pytest.fail(str(exc)) with pytest.raises(exceptions.NotFoundError) as exc: yield from provider.validate_v1_path('/' + folder_path) assert exc.value.code == client.NOT_FOUND wb_path_v0 = yield from provider.validate_path('/' + folder_path + '/') assert wb_path_v1 == wb_path_v0
async def test_upload_file(self, mock_time, mock_provider, file_wb_path, meta_file_raw, meta_file_parsed, meta_file_upload_raw, file_stream_file): file_obj_name = utils.get_obj_name(file_wb_path, is_folder=False) signed_url_upload = mock_provider._build_and_sign_url('PUT', file_obj_name, **{}) resp_headers = utils.get_multi_dict_from_python_dict(dict(json.loads(meta_file_upload_raw))) aiohttpretty.register_uri( 'PUT', signed_url_upload, headers=resp_headers, status=HTTPStatus.OK ) signed_url_metadata = mock_provider._build_and_sign_url('HEAD', file_obj_name, **{}) resp_headers = utils.get_multi_dict_from_python_dict(dict(json.loads(meta_file_raw))) aiohttpretty.register_uri( 'HEAD', signed_url_metadata, headers=resp_headers, status=HTTPStatus.OK ) metadata_json = json.loads(meta_file_parsed) metadata_expected = GoogleCloudFileMetadata(metadata_json) metadata, _ = await mock_provider.upload(file_stream_file, file_wb_path) assert metadata == metadata_expected assert aiohttpretty.has_call(method='PUT', uri=signed_url_upload) assert aiohttpretty.has_call(method='HEAD', uri=signed_url_metadata)
async def test_project_article_download(self, project_provider, file_article_metadata, list_project_articles, file_metadata): article_id = str(list_project_articles[0]['id']) file_id = str(file_article_metadata['files'][0]['id']) body = b'castle on a cloud' root_parts = project_provider.root_path_parts list_articles_url = project_provider.build_url(False, *root_parts, 'articles') file_metadata_url = project_provider.build_url(False, *root_parts, 'articles', article_id, 'files', file_id) article_metadata_url = project_provider.build_url(False, *root_parts, 'articles', article_id) download_url = file_metadata['download_url'] aiohttpretty.register_json_uri('GET', list_articles_url, body=list_project_articles, params={'page': '1', 'page_size': str(MAX_PAGE_SIZE)}) aiohttpretty.register_json_uri('GET', list_articles_url, body=[], params={'page': '2', 'page_size': str(MAX_PAGE_SIZE)}) aiohttpretty.register_json_uri('GET', file_metadata_url, body=file_metadata) aiohttpretty.register_json_uri('GET', article_metadata_url, body=file_article_metadata) aiohttpretty.register_uri('GET', download_url, params={'token': project_provider.token}, body=body, auto_length=True) path = await project_provider.validate_path('/{}/{}'.format(article_id, file_id)) result = await project_provider.download(path) content = await result.read() assert content == body
def test_upload_create_nested(self, provider, file_stream): path = '/ed/sullivan/show.mp3' upload_id = '7' parts = path.split('/') urls, bodies = [], [] for idx, part in enumerate(parts[:-1]): query = provider._build_query(idx or provider.folder['id'], title=parts[idx + 1]) if part == 'sullivan': body = {'items': []} else: body = fixtures.generate_list(idx + 1) url = provider.build_url('files', q=query, alt='json') aiohttpretty.register_json_uri('GET', url, body=body) urls.append(url) bodies.append(body) item = fixtures.list_file['items'][0] start_upload_url = provider._build_upload_url('files', uploadType='resumable') finish_upload_url = provider._build_upload_url('files', uploadType='resumable', upload_id=upload_id) aiohttpretty.register_uri('POST', start_upload_url, headers={'LOCATION': 'http://waterbutler.io?upload_id={}'.format(upload_id)}) aiohttpretty.register_json_uri('PUT', finish_upload_url, body=item) result, created = yield from provider.upload(file_stream, path) assert aiohttpretty.has_call(method='POST', uri=start_upload_url) assert aiohttpretty.has_call(method='PUT', uri=finish_upload_url) assert created is True expected = GoogleDriveFileMetadata(item, '/ed/sullivan').serialized() assert result == expected
def test_folder_delete(self, provider, contents_and_self, mock_time): path = WaterButlerPath('/some-folder/') params = {'prefix': 'some-folder/'} query_url = provider.bucket.generate_url(100, 'GET') aiohttpretty.register_uri( 'GET', query_url, params=params, body=contents_and_self, status=200, ) query_params = {'delete': ''} (payload, headers) = bulk_delete_body( ['thisfolder/', 'thisfolder/item1', 'thisfolder/item2'] ) delete_url = provider.bucket.generate_url( 100, 'POST', query_parameters=query_params, headers=headers, ) aiohttpretty.register_uri('POST', delete_url, status=204) yield from provider.delete(path) assert aiohttpretty.has_call(method='GET', uri=query_url, params=params) assert aiohttpretty.has_call(method='POST', uri=delete_url)
def test_delete(self, provider): path = WaterButlerPath('/some-file') url = provider.bucket.new_key(path.path).generate_url(100, 'DELETE') aiohttpretty.register_uri('DELETE', url, status=200) yield from provider.delete(path) assert aiohttpretty.has_call(method='DELETE', uri=url)
async def test_download(self, provider): path = WaterButlerPath('/triangles.txt', prepend=provider.folder) url = provider._build_content_url('files', 'download') aiohttpretty.register_uri('POST', url, body=b'better', auto_length=True) result = await provider.download(path) content = await result.response.read() assert content == b'better'
def test_download_not_found(self, provider, file_metadata): item = file_metadata['entries'][0] path = BoxPath('/' + item['id'] + '/vectors.txt') metadata_url = provider.build_url('files', item['id']) aiohttpretty.register_uri('GET', metadata_url, status=404) with pytest.raises(exceptions.ProviderError): yield from provider.download(str(path))
def test_download(self, connected_provider): path = CloudFilesPath('/lets-go-crazy') body = b'dearly-beloved' url = connected_provider.sign_url(path) aiohttpretty.register_uri('GET', url, body=body) result = yield from connected_provider.download(str(path)) content = yield from result.response.read() assert content == body
async def test_metadata_file_bad_content_type(self, connected_provider, file_metadata): item = file_metadata item['Content-Type'] = 'application/directory' path = WaterButlerPath('/does_not.exist') url = connected_provider.build_url(path.path) aiohttpretty.register_uri('HEAD', url, headers=item) with pytest.raises(exceptions.MetadataError): await connected_provider.metadata(path)
def test_download(self, provider): path = WaterButlerPath('/triangles.txt', prepend=provider.folder) url = provider._build_content_url('files', 'auto', path.full_path) aiohttpretty.register_uri('GET', url, body=b'better', auto_length=True) result = yield from provider.download(path) content = yield from result.response.read() assert content == b'better'