def create_project_response(requests, size, overlap, result_type, encoding_size=128, uses_blocking=False): """ Create a project with the given size, overlap and result_type. Tests that use one of these projects will get a dict like the following: { "project_id": "ID", "upload-mode": "BINARY" | "JSON", "size": [size 1, size 2], "encoding-size": int number of bytes in each encoding e.g. 128, "overlap": float between 0 and 1, "result_token": "TOKEN", "upload_tokens": [TOKENS, ...], "dp_1": <JSON RESPONSE TO DATA UPLOAD> "dp_2": <JSON RESPONSE TO DATA UPLOAD> } """ project, dp_responses = create_project_upload_fake_data( requests, size, overlap=overlap, result_type=result_type, encoding_size=encoding_size) project.update({ 'size': size, 'encoding-size': encoding_size, 'upload-mode': 'JSON', 'uses_blocking': uses_blocking, 'overlap': overlap, 'dp_responses': dp_responses }) return project
def _create_data_linkage_run(requests, result_type_number_parties): result_type, number_parties = result_type_number_parties project, _ = create_project_upload_fake_data(requests, [100] * number_parties, overlap=0.5, result_type=result_type) run_id = post_run(requests, project, 1.0) return project, run_id
def test_project_json_data_upload_with_too_small_encoded_size( requests, result_type_number_parties): result_type, number_parties = result_type_number_parties new_project_data, _ = create_project_upload_fake_data( requests, [500] * number_parties, overlap=0.8, result_type=result_type, encoding_size=4 ) with pytest.raises(AssertionError): run_id = post_run(requests, new_project_data, 0.9) get_run_result(requests, new_project_data, run_id, wait=True)
def test_project_json_data_upload_with_invalid_encoded_size( requests, result_type_number_parties): result_type, number_parties = result_type_number_parties new_project_data, _ = create_project_upload_fake_data( requests, [500] * number_parties, overlap=0.8, result_type=result_type, encoding_size=20 # not multiple of 8 ) with pytest.raises(AssertionError): run_id = post_run(requests, new_project_data, 0.9) get_run_result(requests, new_project_data, run_id, wait=True, timeout=240)
def test_project_json_data_upload_with_various_encoded_sizes( requests, encoding_size, result_type_number_parties): result_type, number_parties = result_type_number_parties new_project_data, _ = create_project_upload_fake_data( requests, [500] * number_parties, overlap=0.8, result_type=result_type, encoding_size=encoding_size ) run_id = post_run(requests, new_project_data, 0.9) result = get_run_result(requests, new_project_data, run_id, wait=True) if result_type == 'groups': assert 'groups' in result # This is a pretty bad bound, but we're not testing the # accuracy. assert len(result['groups']) >= 400
def test_project_json_data_upload_with_too_large_encoded_size( requests, result_type_number_parties): result_type, number_parties = result_type_number_parties new_project_data, _ = create_project_upload_fake_data( requests, [500] * number_parties, overlap=0.8, result_type=result_type, encoding_size=4096) # Just initializing it before the loop. project_description = {'error': False} rep = 0 max_rep = 10 while not project_description['error'] and rep < max_rep: rep += 1 time.sleep(2) project_description = requests.get( url + '/projects/{}'.format(new_project_data['project_id']), headers={ 'Authorization': new_project_data['result_token'] }).json() assert 'error' in project_description assert project_description['error']