def test_delete_project_after_creating_run_with_clks( requests, result_type_number_parties): result_type, number_parties = result_type_number_parties project, _ = create_project_upload_fake_data(requests, [100] * number_parties, overlap=0.5, result_type=result_type) post_run(requests, project, 0.9) delete_project(requests, project)
def test_run_description(requests, result_type): project, dp1, dp2 = create_project_upload_fake_data( requests, [100, 100], overlap=0.5, result_type=result_type) run_id = post_run(requests, project, 0.98) run = get_run(requests, project, run_id) assert 'run_id' in run assert 'notes' in run assert 'threshold' in run
def test_project_json_data_upload_with_too_small_encoded_size(requests): new_project_data, r1, r2 = create_project_upload_fake_data( requests, [500, 500], overlap=0.95, result_type='mapping', encoding_size=4) with pytest.raises(AssertionError): run_id = post_run(requests, new_project_data, 0.9) get_run_result(requests, new_project_data, run_id, wait=True)
def test_project_json_data_upload_with_various_encoded_sizes( requests, encoding_size): new_project_data, r1, r2 = create_project_upload_fake_data( requests, [500, 500], overlap=0.95, result_type='mapping', encoding_size=encoding_size) run_id = post_run(requests, new_project_data, 0.9) result = get_run_result(requests, new_project_data, run_id, wait=True) assert 'mapping' in result assert len(result['mapping']) >= 475
def test_project_json_data_upload_with_too_small_encoded_size( requests, result_type_number_parties): result_type, number_parties = result_type_number_parties new_project_data, _ = create_project_upload_fake_data( requests, [500] * number_parties, overlap=0.8, result_type=result_type, encoding_size=4 ) with pytest.raises(AssertionError): run_id = post_run(requests, new_project_data, 0.9) get_run_result(requests, new_project_data, run_id, wait=True)
def test_run_description(requests, result_type_number_parties): THRESHOLD = .98 result_type, number_parties = result_type_number_parties project, _ = create_project_upload_fake_data(requests, [100] * number_parties, overlap=0.5, result_type=result_type) run_id = post_run(requests, project, THRESHOLD) run = get_run(requests, project, run_id) assert 'run_id' in run assert 'notes' in run assert run['threshold'] == THRESHOLD
def test_project_json_data_upload_with_too_large_encoded_size(requests): new_project_data, r1, r2 = create_project_upload_fake_data( requests, [50, 50], overlap=0.95, result_type='mapping', encoding_size=4096) time.sleep(5) project_description = requests.get( url + '/projects/{}'.format(new_project_data['project_id']), headers={ 'Authorization': new_project_data['result_token'] }).json() assert 'error' in project_description assert project_description['error']
def test_project_json_data_upload_with_too_large_encoded_size( requests, result_type_number_parties): result_type, number_parties = result_type_number_parties new_project_data, _ = create_project_upload_fake_data( requests, [500] * number_parties, overlap=0.8, result_type=result_type, encoding_size=4096 ) time.sleep(5) project_description = requests.get( url + '/projects/{}'.format(new_project_data['project_id']), headers={'Authorization': new_project_data['result_token']} ).json() assert 'error' in project_description assert project_description['error']
def test_project_json_data_upload_with_various_encoded_sizes( requests, encoding_size, result_type_number_parties): result_type, number_parties = result_type_number_parties new_project_data, _ = create_project_upload_fake_data( requests, [500] * number_parties, overlap=0.8, result_type=result_type, encoding_size=encoding_size ) run_id = post_run(requests, new_project_data, 0.9) result = get_run_result(requests, new_project_data, run_id, wait=True) if result_type == 'mapping': assert 'mapping' in result assert len(result['mapping']) >= 400 elif result_type == 'groups': assert 'groups' in result # This is a pretty bad bound, but we're not testing the # accuracy. assert len(result['groups']) >= 400
def create_project_response(requests, size, overlap, result_type, encoding_size=128): """ Create a project with the given size, overlap and result_type. Tests that use one of these projects will get a dict like the following: { "project_id": "ID", "upload-mode": "BINARY" | "JSON", "size": [size 1, size 2], "encoding-size": int number of bytes in each encoding e.g. 128, "overlap": float between 0 and 1, "result_token": "TOKEN", "upload_tokens": [TOKENS, ...], "dp_1": <JSON RESPONSE TO DATA UPLOAD> "dp_2": <JSON RESPONSE TO DATA UPLOAD> } """ project, dp_1, dp_2 = create_project_upload_fake_data( requests, size, overlap=overlap, result_type=result_type, encoding_size=encoding_size) project.update({ 'size': size, 'encoding-size': encoding_size, 'upload-mode': 'JSON', 'overlap': overlap, 'dp_1': dp_1, 'dp_2': dp_2 }) return project
def test_delete_project_after_creating_run_with_clks(requests, result_type): project, dp1, dp2 = create_project_upload_fake_data( requests, [100, 100], overlap=0.5, result_type=result_type) post_run(requests, project, 0.9) delete_project(requests, project)