async def test_chunked_upload(self, provider, file_stream, file_sha_b64, root_provider_fixtures): assert file_stream.size == 38 session_metadata = root_provider_fixtures['create_session_metadata'] parts_manifest = [ root_provider_fixtures['upload_part_one'], root_provider_fixtures['upload_part_two'], ] file_metadata = root_provider_fixtures['upload_commit_metadata'] provider._create_chunked_upload_session = MockCoroutine( return_value=session_metadata) provider._upload_parts = MockCoroutine(return_value=parts_manifest) provider._complete_chunked_upload_session = MockCoroutine( return_value=file_metadata) path = WaterButlerPath('/foobah/', _ids=('0', '1')) await provider._chunked_upload(path, file_stream) provider._create_chunked_upload_session.assert_called_with( path, file_stream) provider._upload_parts.assert_called_with(file_stream, session_metadata) provider._complete_chunked_upload_session.assert_called_with( session_metadata, parts_manifest, file_sha_b64)
async def test_validate_v1_path_file(self, provider, repo_metadata, path_metadata_folder, folder_full_contents_list): file_path = '/folder2-lvl1/folder1-lvl2/folder1-lvl3/file0001.20bytes.txt' # Mock ``_fetch_default_branch()`` instead of using ``aiohttpretty`` repo_metadata = json.loads(repo_metadata) provider._fetch_default_branch = MockCoroutine( return_value=repo_metadata['mainbranch']['name']) # Mock ``_fetch_path_metadata()`` instead of using ``aiohttpretty`` parent_folder_metadata = json.loads( path_metadata_folder)['non_root_lvl3'] provider._fetch_path_metadata = MockCoroutine( return_value=parent_folder_metadata) # Mock ``_fetch_dir_listing()`` instead of using ``aiohttpretty`` parent_dir_listing = json.loads( folder_full_contents_list)['file_parent'] provider._fetch_dir_listing = MockCoroutine( return_value=parent_dir_listing) try: wb_path_v1 = await provider.validate_v1_path(file_path) except Exception as exc: return pytest.fail(str(exc)) wb_path_v0 = await provider.validate_path(file_path) assert wb_path_v1 == wb_path_v0 assert wb_path_v1.branch_name == repo_metadata['mainbranch']['name'] assert wb_path_v1.commit_sha == parent_folder_metadata['commit'][ 'hash'][:12] bad_path = '{}/'.format(file_path) with pytest.raises(exceptions.NotFoundError): await provider.validate_v1_path(bad_path)
async def test_validate_v1_path_folder(self, provider, repo_metadata, path_metadata_folder, folder_full_contents_list): folder_path = '/folder2-lvl1/folder1-lvl2/folder1-lvl3/' repo_metadata = json.loads(repo_metadata) provider._fetch_default_branch = MockCoroutine( return_value=repo_metadata['mainbranch']['name']) parent_folder_metadata = json.loads( path_metadata_folder)['non_root_lvl2'] provider._fetch_path_metadata = MockCoroutine( return_value=parent_folder_metadata) parent_dir_listing = json.loads( folder_full_contents_list)['folder_parent'] provider._fetch_dir_listing = MockCoroutine( return_value=parent_dir_listing) try: wb_path_v1 = await provider.validate_v1_path(folder_path) except Exception as exc: return pytest.fail(str(exc)) wb_path_v0 = await provider.validate_path(folder_path) assert wb_path_v1 == wb_path_v0 assert wb_path_v1.branch_name == repo_metadata['mainbranch']['name'] assert wb_path_v1.commit_sha == parent_folder_metadata['commit'][ 'hash'][:12] bad_path = folder_path.rstrip('/') with pytest.raises(exceptions.NotFoundError): await provider.validate_v1_path(bad_path)
async def test_postvalidate_put_folder(self, handler): handler.path = WaterButlerPath('/Folder1/') handler.kind = 'folder' handler.get_query_argument = mock.Mock(return_value='child!') handler.provider.exists = MockCoroutine(return_value=False) handler.provider.can_duplicate_names = MockCoroutine(return_value=False) await handler.postvalidate_put() assert handler.target_path == WaterButlerPath('/Folder1/child!/') handler.get_query_argument.assert_called_once_with('name', default=None) handler.provider.exists.assert_called_once_with( WaterButlerPath('/Folder1/child!', prepend=None))
async def test_chunked_upload_upload_parts(self, provider, file_stream, provider_fixtures): assert file_stream.size == 38 provider.CHUNK_SIZE = 4 session_id = provider_fixtures['session_metadata']['session_id'] provider._upload_part = MockCoroutine() await provider._upload_parts(file_stream, session_id) assert provider._upload_part.call_count == 10 upload_args = { 'close': False, 'cursor': { 'session_id': session_id, 'offset': 0, } } for i in range(0, 9): upload_args['cursor']['offset'] = i * 4 assert provider._upload_part.called_once_with( file_stream, provider.CHUNK_SIZE, upload_args) upload_args['cursor']['offset'] = 36 assert provider._upload_part.called_once_with(file_stream, 2, upload_args) provider.CHUNK_SIZE = CHUNK_SIZE
async def test_get_file_download_file(self, http_request): handler = mock_handler(http_request) handler.download_file = MockCoroutine() await handler.get_file() handler.download_file.assert_awaited_once()
async def test_upload_limit_chunked_upload(self, provider, file_stream): assert file_stream.size == 38 provider.NONCHUNKED_UPLOAD_LIMIT = 15 provider.metadata = MockCoroutine() provider._contiguous_upload = MockCoroutine() provider._chunked_upload = MockCoroutine(return_value={'id': '345'}) path = WaterButlerPath('/foobah/', _ids=('0', '1')) await provider.upload(file_stream, path) provider._chunked_upload.assert_called_with(path, file_stream) assert not provider._contiguous_upload.called provider.NONCHUNKED_UPLOAD_LIMIT = NONCHUNKED_UPLOAD_LIMIT
async def test_chunked_upload_upload_parts(self, provider, file_stream, provider_fixtures): assert file_stream.size == 38 provider.CHUNK_SIZE = 4 session_id = provider_fixtures['session_metadata']['session_id'] provider._upload_part = MockCoroutine() await provider._upload_parts(file_stream, session_id) assert provider._upload_part.call_count == 10 calls = [] upload_args = { 'close': False, 'cursor': {'session_id': session_id, 'offset': 0, } } for i in range(0, 9): upload_args['cursor']['offset'] = i * 4 calls.append(unittest.mock.call(file_stream, provider.CHUNK_SIZE, upload_args)) upload_args['cursor']['offset'] = 36 calls.append(unittest.mock.call(file_stream, 2, upload_args)) provider._upload_part.assert_has_calls(calls, any_order=False) provider.CHUNK_SIZE = CHUNK_SIZE
async def test_put_file(self, handler): handler.target_path = WaterButlerPath('/file') handler.upload_file = MockCoroutine() await handler.put() handler.upload_file.assert_called_once_with()
async def test_get_path_metadata_with_commit_sha(self, provider, branch_metadata, path_metadata_file): path = BitbucketPath('/file0001.20bytes.txt', _ids=[(COMMIT_SHA, 'develop'), (COMMIT_SHA, 'develop')]) assert path.commit_sha == COMMIT_SHA provider._fetch_branch_commit_sha = MockCoroutine( return_value=COMMIT_SHA) file_metadata = json.loads(path_metadata_file)['root'] query_params = { 'format': 'meta', 'fields': 'commit.hash,commit.date,path,size,links.history.href' } path_meta_url = '{}/?{}'.format( provider._build_v2_repo_url('src', COMMIT_SHA, *path.path_tuple()), urlencode(query_params)) aiohttpretty.register_json_uri('GET', path_meta_url, body=file_metadata) result = await provider._fetch_path_metadata(path) assert not provider._fetch_branch_commit_sha.called
async def test_head(self, handler): handler.path = WaterButlerPath('/file') handler.header_file_metadata = MockCoroutine() await handler.head() handler.header_file_metadata.assert_called_with()
async def test_get_file_metadata(self, handler): handler.file_metadata = MockCoroutine() handler.request.query_arguments['meta'] = '' await handler.get_file() handler.file_metadata.assert_awaited_once()
def mock_handler(http_request): """ Mock WB Provider Handler. Since tornado 5.0, handler cannot be shared between tests as fixtures when the testing tornado web server is started with ``autoreload=True``, which is enabled automatically if debug mode is on. Although setting either ``autoreload==False`` or ``Debug=False`` fixes the issue, it is still better to take this mock handler out from fixtures. :param http_request: the mocked HTTP request that is required to start the tornado web app :return: a mocked handler """ handler = ProviderHandler(make_app(True), http_request) handler.path_kwargs = {'provider': 'test', 'path': '/file', 'resource': 'guid1'} handler.path = '/test_path' handler.provider = MockProvider() handler.requested_version = None handler.resource = 'test_source_resource' handler.metadata = MockFileMetadata() handler.dest_path = '/test_dest_path' handler.dest_provider = MockProvider() handler.dest_resource = 'test_dest_resource' handler.dest_meta = MockFileMetadata() handler.arguments = {} handler.write = Mock() handler.write_stream = MockCoroutine() handler.redirect = Mock() handler.uploader = asyncio.Future() handler.wsock = Mock() handler.writer = Mock() return handler
async def test_download_file_stream_redirect(self, http_request): handler = mock_handler(http_request) handler.provider.download = MockCoroutine(return_value='stream') await handler.download_file() handler.redirect.assert_called_once_with('stream')
async def test_upload_limit_contiguous_upload(self, provider, file_stream): assert file_stream.size == 38 provider.CONTIGUOUS_UPLOAD_SIZE_LIMIT = 40 provider.metadata = MockCoroutine() provider._contiguous_upload = MockCoroutine() provider._chunked_upload = MockCoroutine() path = WaterButlerPath('/foobah') await provider.upload(file_stream, path) provider._contiguous_upload.assert_called_with(file_stream, path, conflict='replace') assert not provider._chunked_upload.called provider.CONTIGUOUS_UPLOAD_SIZE_LIMIT = CONTIGUOUS_UPLOAD_SIZE_LIMIT
async def test_put_folder(self, handler): handler.target_path = WaterButlerPath('/folder/') handler.create_folder = MockCoroutine() await handler.put() handler.create_folder.assert_called_once_with()
async def test_get_file_revisions_raw(self, handler, mock_revision_metadata): handler.provider.revisions = MockCoroutine(return_value=mock_revision_metadata) await handler.get_file_revisions() handler.write.assert_called_once_with({ 'data': [r.json_api_serialized() for r in mock_revision_metadata] })
async def test_delete(self, handler): handler.path = WaterButlerPath('/folder/') handler.provider.delete = MockCoroutine() await handler.delete() handler.provider.delete.assert_called_once_with(WaterButlerPath( '/folder/', prepend=None), confirm_delete=0)
async def test_file_metadata(self, handler, mock_file_metadata): handler.provider.metadata = MockCoroutine(return_value=mock_file_metadata) await handler.file_metadata() handler.write.assert_called_once_with({ 'data': mock_file_metadata.json_api_serialized(handler.resource) })
async def test_get_file_versions(self, query_param, handler): # Query parameters versions and revisions are equivalent, but versions is preferred for # clarity. handler.get_file_revisions = MockCoroutine() handler.request.query_arguments[query_param] = '' await handler.get_file() handler.get_file_revisions.assert_awaited_once()
async def test_get_folder_download_as_zip(self, handler): # Including 'zip' in the query params should trigger the download_as_zip method handler.download_folder_as_zip = MockCoroutine() handler.request.query_arguments['zip'] = '' await handler.get_folder() handler.download_folder_as_zip.assert_awaited_once()
async def test_delete_confirm_delete(self, handler): handler.path = WaterButlerPath('/folder/') handler.provider.delete = MockCoroutine() handler.request.query_arguments['confirm_delete'] = '1' await handler.delete() handler.provider.delete.assert_called_with(WaterButlerPath( '/folder/', prepend=None), confirm_delete=1)
async def test_file_metadata_version(self, handler, mock_file_metadata): handler.provider.metadata = MockCoroutine(return_value=mock_file_metadata) handler.requested_version = 'version id' await handler.file_metadata() handler.provider.metadata.assert_called_once_with(handler.path, revision='version id') handler.write.assert_called_once_with({ 'data': mock_file_metadata.json_api_serialized(handler.resource) })
async def test_chunked_upload(self, provider, file_stream, provider_fixtures): assert file_stream.size == 38 provider.CHUNK_SIZE = 4 session_id = provider_fixtures['session_metadata']['session_id'] provider._create_upload_session = MockCoroutine(return_value=session_id) provider._upload_parts = MockCoroutine() provider._complete_session = MockCoroutine() path = WaterButlerPath('/foobah') await provider._chunked_upload(file_stream, path) provider._create_upload_session.assert_called() provider._upload_parts.assert_called_with(file_stream, session_id) provider._complete_session.assert_called_with(file_stream, session_id, path, conflict='replace') provider.CHUNK_SIZE = CHUNK_SIZE
async def test_get_folder(self, handler, mock_folder_children): # The get_folder method expected behavior is to return folder children's metadata, not the # metadata of the actual folder. This should be true of all providers. handler.provider.metadata = MockCoroutine(return_value=mock_folder_children) serialized_data = [x.json_api_serialized(handler.resource) for x in mock_folder_children] await handler.get_folder() handler.write.assert_called_once_with({'data': serialized_data})
async def test_download_file_safari_mime_type(self, extension, mimetype, handler, mock_stream): """ If the file extention is in mime_types override the content type to fix issues with safari shoving in new file extensions """ handler.path = WaterButlerPath('/test_path.{}'.format(extension)) handler.provider.download = MockCoroutine(return_value=mock_stream) await handler.download_file() handler.write_stream.assert_called_once_with(mock_stream) assert handler._headers['Content-Type'] == bytes(mimetype, 'latin-1')
async def test_download_folder_as_zip(self, handler, mock_stream): handler.provider.zip = MockCoroutine(return_value=mock_stream) handler.path = WaterButlerPath('/test_file') await handler.download_folder_as_zip() assert handler._headers['Content-Type'] == bytes('application/zip', 'latin-1') expected = bytes('attachment;filename="{}"'.format(handler.path.name + '.zip'), 'latin-1') assert handler._headers['Content-Disposition'] == expected handler.write_stream.assert_called_once_with(mock_stream)
async def test_header_file_metadata(self, http_request, mock_file_metadata): handler = mock_handler(http_request) handler.provider.metadata = MockCoroutine(return_value=mock_file_metadata) await handler.header_file_metadata() assert handler._headers['Content-Length'] == '1337' assert handler._headers['Last-Modified'] == 'Wed, 25 Sep 1991 18:20:30 GMT' assert handler._headers['Content-Type'] == 'application/octet-stream' expected = json.dumps(mock_file_metadata.json_api_serialized(handler.resource)) assert handler._headers['X-Waterbutler-Metadata'] == expected
async def test_download_file_range_request_header(self, http_request, mock_partial_stream): handler = mock_handler(http_request) handler.request.headers['Range'] = 'bytes=10-100' handler.provider.download = MockCoroutine(return_value=mock_partial_stream) handler.path = WaterButlerPath('/test_file') await handler.download_file() assert handler._headers['Content-Range'] == mock_partial_stream.content_range assert handler.get_status() == 206 handler.write_stream.assert_called_once_with(mock_partial_stream)
def mock_inter(monkeypatch, request): src_provider = MockProvider() dest_provider = MockProvider() mock_make_provider = mock.Mock(side_effect=[src_provider, dest_provider]) monkeypatch.setattr(waterbutler.server.api.v1.provider.movecopy, 'make_provider', mock_make_provider) mock_celery = MockCoroutine(return_value=(MockFileMetadata(), request.param)) mock_adelay = MockCoroutine(return_value='4ef2d1dd-c5da-41a7-ae4a-9d0ba7a68927') monkeypatch.setattr(waterbutler.server.api.v1.provider.movecopy.tasks.copy, 'adelay', mock_adelay) monkeypatch.setattr(waterbutler.server.api.v1.provider.movecopy.tasks.move, 'adelay', mock_adelay) monkeypatch.setattr(waterbutler.server.api.v1.provider.movecopy.tasks, 'wait_on_celery', mock_celery) return mock_make_provider, mock_adelay