def test_delete_project_handles_500(requests_mock): rest_client = RestClient('http://testing-es-url') requests_mock.delete('http://testing-es-url/api/v1/projects/pid', text='', status_code=500) with pytest.raises(ServiceError): rest_client.project_delete('pid', 'mykey') assert requests_mock.last_request.headers['Authorization'] == 'mykey'
def test_delete_project_handles_503(requests_mock): rest_client = RestClient( 'http://testing-es-url', ClientWaitingConfiguration(wait_exponential_max_ms=10, wait_exponential_multiplier_ms=1, stop_max_delay_ms=10)) requests_mock.delete('http://testing-es-url/api/v1/projects/pid', text='', status_code=503) with pytest.raises(ServiceError): rest_client.project_delete('pid', 'mykey') assert requests_mock.last_request.headers['Authorization'] == 'mykey'
class TestCliInteractionWithService(CLITestHelper): server_options = [ '--server', '--retry-multiplier', '--retry-exponential-max', '--retry-max-time' ] retry_options_values = [ '--retry-multiplier', 50, '--retry-exponential-max', 1000, '--retry-max-time', 30000 ] def setUp(self): super(TestCliInteractionWithService, self).setUp() self.url = os.environ['TEST_ENTITY_SERVICE'] self.rest_client = RestClient(self.url) self.clk_file = create_temp_file() self.clk_file_2 = create_temp_file() # hash some PII for uploading # TODO don't need to rehash data for every test runner = CliRunner() cli_result = runner.invoke(clkhash.cli.cli, [ 'hash', self.pii_file.name, 'secret', SIMPLE_SCHEMA_PATH, self.clk_file.name ]) assert cli_result.exit_code == 0 cli_result = runner.invoke(clkhash.cli.cli, [ 'hash', self.pii_file_2.name, 'secret', SIMPLE_SCHEMA_PATH, self.clk_file_2.name ]) assert cli_result.exit_code == 0 self.clk_file.close() self.clk_file_2.close() self._created_projects = [] def tearDown(self): super(TestCliInteractionWithService, self).tearDown() try: os.remove(self.clk_file.name) os.remove(self.clk_file_2.name) except: pass self.delete_created_projects() def delete_created_projects(self): for project in self._created_projects: try: self.rest_client.project_delete(project['project_id'], project['result_token']) except KeyError: pass except ServiceError: # probably already deleted pass def _create_project(self, project_args=None): command = [ 'create-project', '--server', self.url, '--schema', SIMPLE_SCHEMA_PATH ] if project_args is not None: for key in project_args: command.append('--{}'.format(key)) command.append(project_args[key]) project = self.run_command_load_json_output(command) self._created_projects.append(project) return project def _create_project_and_run(self, project_args=None, run_args=None): project = self._create_project(project_args) threshold = run_args[ 'threshold'] if run_args is not None and 'threshold' in run_args else 0.99 command = [ 'create', '--server', self.url, '--threshold', str(threshold), '--project', project['project_id'], '--apikey', project['result_token'], ] if run_args is not None: for key in run_args: command.append('--{}'.format(key)) command.append(run_args[key]) run = self.run_command_load_json_output(command) return project, run def _test_helps(self, command, list_expected_commands, include_server_options=False): runner = CliRunner() result = runner.invoke(clkhash.cli.cli, [command, '--help']) if include_server_options: list_to_test = self.server_options + list_expected_commands else: list_to_test = list_expected_commands for option in list_to_test: self.assertIn(option, result.output) def test_status_help(self): self._test_helps('status', ['--output', '--verbose', '--help'], include_server_options=True) def test_status(self): self.run_command_load_json_output(['status', '--server', self.url]) def test_status_retry_options(self): self.run_command_load_json_output(['status', '--server', self.url] + self.retry_options_values) def test_status_invalid_server_raises(self): with pytest.raises(AssertionError) as exec_info: self.run_command_capture_output( ['status', '--server', 'https://example.com']) self.assertIn('invalid choice', exec_info.value.args[0]) def _test_create_project(self, out): self.assertIn('project_id', out) self.assertIn('result_token', out) self.assertIn('update_tokens', out) self.assertGreaterEqual(len(out['project_id']), 16) self.assertGreaterEqual(len(out['result_token']), 16) self.assertGreaterEqual(len(out['update_tokens']), 2) def test_create_project(self): out = self._create_project() self._test_create_project(out) def test_create_project_retry_options(self): out = self._create_project({ 'retry-multiplier': 50, 'retry-exponential-max': 1000, 'retry-max-time': 30000 }) self._test_create_project(out) def test_create_project_help(self): self._test_helps('create-project', [ '--type', '--schema', '--name', '--parties', '--output', '--verbose', '--help' ], include_server_options=True) def test_create_project_2_party(self): out = self._create_project(project_args={'parties': '2'}) self._test_create_project(out) def test_create_project_multi_party(self): out = self._create_project(project_args={ 'parties': '3', 'type': 'groups' }) self.assertIn('project_id', out) self.assertIn('result_token', out) self.assertIn('update_tokens', out) self.assertGreaterEqual(len(out['project_id']), 16) self.assertGreaterEqual(len(out['result_token']), 16) self.assertGreaterEqual(len(out['update_tokens']), 3) def test_create_project_invalid_parties_type(self): with pytest.raises(AssertionError) as exec_info: self._create_project(project_args={'parties': '3'}) assert "requires result type 'groups'" in exec_info.value.args[0] def test_create_project_bad_type(self): with pytest.raises(AssertionError) as exec_info: self._create_project(project_args={'type': 'invalid'}) assert 'invalid choice' in exec_info.value.args[0] def test_create_project_and_run(self): project, run = self._create_project_and_run() self.assertIn('project_id', project) self.assertIn('run_id', run) def _test_delete_run(self, extra_arguments): project, run = self._create_project_and_run() command = [ 'delete', '--server', self.url, '--project', project['project_id'], '--run', run['run_id'], '--apikey', project['result_token'] ] + extra_arguments runner = CliRunner() cli_result = runner.invoke(clkhash.cli.cli, command) assert cli_result.exit_code == 0, cli_result.output # TODO get runs and check it is gone? with pytest.raises(ServiceError): self.rest_client.run_get_status(project['project_id'], project['result_token'], run['run_id']) def test_delete_run(self): self._test_delete_run([]) def test_delete_run_retry_options(self): self._test_delete_run(self.retry_options_values) def test_delete_run_help(self): self._test_helps( 'delete', ['--project', '--run', '--apikey', '--verbose', '--help'], include_server_options=True) def _test_delete_project(self, extra_arguments): project, run = self._create_project_and_run() runner = CliRunner() command = [ 'delete-project', '--server', self.url, '--project', project['project_id'], '--apikey', project['result_token'] ] + extra_arguments cli_result = runner.invoke(clkhash.cli.cli, command) assert cli_result.exit_code == 0, cli_result.output with pytest.raises(ServiceError): self.rest_client.project_get_description(project['project_id'], project['result_token']) def test_delete_project(self): self._test_delete_project([]) def test_delete_project_retry_options(self): self._test_delete_project(self.retry_options_values) def test_delete_project_help(self): self._test_helps('delete-project', ['--project', '--apikey', '--verbose', '--help'], include_server_options=True) def test_create_help(self): self._test_helps('create', [ '--name', '--project', '--apikey', '--output', '--threshold', '--verbose', '--help' ], include_server_options=True) def test_create_with_optional_name(self): out = self._create_project({'name': 'testprojectname'}) self._test_create_project(out) def test_create_with_bad_schema(self): # Make sure we don't succeed with bad schema. schema_path = os.path.join(os.path.dirname(__file__), 'testdata', 'bad-schema-v1.json') with pytest.raises(AssertionError): self.run_command_load_json_output([ 'create-project', '--server', self.url, '--schema', schema_path ]) def test_upload_help(self): self._test_helps( 'upload', ['--project', '--apikey', '--output', '--verbose', '--help'], include_server_options=True) def _test_single_upload(self, extra_arguments): project = self._create_project() # Upload self.run_command_load_json_output([ 'upload', '--server', self.url, '--project', project['project_id'], '--apikey', project['update_tokens'][0], self.clk_file.name ] + extra_arguments) def test_single_upload(self): self._test_single_upload([]) def test_single_upload_retry_options(self): self._test_single_upload(self.retry_options_values) def test_2_party_upload_and_results(self): project, run = self._create_project_and_run() def get_coord_results(): # Get results from coordinator return self.run_command_capture_output([ 'results', '--server', self.url, '--project', project['project_id'], '--run', run['run_id'], '--apikey', project['result_token'] ]) # Upload Alice alice_upload = self.run_command_load_json_output([ 'upload', '--server', self.url, '--project', project['project_id'], '--apikey', project['update_tokens'][0], self.clk_file.name ]) self.assertIn('receipt_token', alice_upload) out_early = get_coord_results() self.assertEqual("", out_early) # Upload Bob (subset of clks uploaded) bob_upload = self.run_command_load_json_output([ 'upload', '--server', self.url, '--project', project['project_id'], '--apikey', project['update_tokens'][1], self.clk_file_2.name ]) self.assertIn('receipt_token', bob_upload) # Use the rest client to wait until the run is complete self.rest_client.wait_for_run(project['project_id'], run['run_id'], project['result_token'], timeout=ES_TIMEOUT) results_raw = get_coord_results() res = json.loads(results_raw) self.assertIn('mask', res) # Should be close to half ones. This is really just testing the service # not the command line tool. number_in_common = res['mask'].count(1) self.assertGreaterEqual(number_in_common / self.SAMPLES, 0.4) self.assertLessEqual(number_in_common / self.SAMPLES, 0.6) # Get results from first DP alice_res = self.run_command_load_json_output([ 'results', '--server', self.url, '--project', project['project_id'], '--run', run['run_id'], '--apikey', alice_upload['receipt_token'] ]) self.assertIn('permutation', alice_res) self.assertIn('rows', alice_res) # Get results from second DP bob_res = self.run_command_load_json_output([ 'results', '--server', self.url, '--project', project['project_id'], '--run', run['run_id'], '--apikey', bob_upload['receipt_token'], '--watch' ]) self.assertIn('permutation', bob_res) self.assertIn('rows', bob_res) def test_multi_party_upload_and_results(self): project, run = self._create_project_and_run({ 'type': 'groups', 'parties': '3' }) def get_coord_results(): # Get results from coordinator return self.run_command_capture_output([ 'results', '--server', self.url, '--project', project['project_id'], '--run', run['run_id'], '--apikey', project['result_token'] ]) # Upload Alice alice_upload = self.run_command_load_json_output([ 'upload', '--server', self.url, '--project', project['project_id'], '--apikey', project['update_tokens'][0], self.clk_file.name ]) self.assertIn('receipt_token', alice_upload) out_early = get_coord_results() self.assertEqual("", out_early) # Upload Bob (subset of clks uploaded) bob_upload = self.run_command_load_json_output([ 'upload', '--server', self.url, '--project', project['project_id'], '--apikey', project['update_tokens'][1], self.clk_file_2.name ]) self.assertIn('receipt_token', bob_upload) out_early = get_coord_results() self.assertEqual("", out_early) # Upload Charlie (we're lazy and just reuse Bob) charlie_upload = self.run_command_load_json_output([ 'upload', '--server', self.url, '--project', project['project_id'], '--apikey', project['update_tokens'][2], self.clk_file_2.name ]) self.assertIn('receipt_token', charlie_upload) self.rest_client.wait_for_run(project['project_id'], run['run_id'], project['result_token'], timeout=ES_TIMEOUT) results = get_coord_results() res = json.loads(results) self.assertIn('groups', res) # Recall that Bob and Charlie have the same samples. These form # half of Alice's samples. groups = res['groups'] assert self.SAMPLES * .45 <= len(groups) <= self.SAMPLES * .55 number_of_groups_of_three = sum(len(group) == 3 for group in groups) assert number_of_groups_of_three >= .9 * len(groups)