def test_multi_process_runner_error_propagates_from_subprocesses(self): job_count_dict = {'worker': 1, 'ps': 1} with self.assertRaisesRegexp(ValueError, 'This is an error.'): MultiProcessRunner().run( proc_func_that_errors, multi_process_runner.job_count_to_cluster_spec(job_count_dict), timeout=20)
def test_multi_process_runner_error_propagates_from_subprocesses(self): job_count_dict = {'worker': 1, 'ps': 1} with self.assertRaisesRegexp( RuntimeError, 'Exception raised by subprocess: RuntimeError: ' 'This is an error.'): multi_process_runner.run( proc_func_that_errors, multi_process_runner.job_count_to_cluster_spec(job_count_dict), timeout=20)
def test_multi_process_runner_args_passed_correctly(self): job_count_dict = {'worker': 1} returned_data = MultiProcessRunner().run( proc_func_that_return_args_and_kwargs, multi_process_runner.job_count_to_cluster_spec(job_count_dict), args=('a', 'b'), kwargs={'c_k': 'c_v'}) self.assertEqual(returned_data[0][0], 'a') self.assertEqual(returned_data[0][1], 'b') self.assertEqual(returned_data[0][2], ('c_k', 'c_v'))
def test_stdout_captured(self): def simple_func(): return 'foobar' job_count_dict = {'worker': 1} with self.assertRaisesRegexp( RuntimeError, '`multi_process_runner` is not initialized.'): MultiProcessRunner().run( simple_func, multi_process_runner.job_count_to_cluster_spec(job_count_dict))
def test_signal_doesnt_fire_after_process_exits(self): job_count_dict = {'worker': 1} mpr = MultiProcessRunner() mpr.run(proc_func_that_does_nothing, multi_process_runner.job_count_to_cluster_spec(job_count_dict), time_to_exit=10) time.sleep(15) with self.assertRaisesRegexp(Queue.Empty, ''): # If the signal was fired, another message would be added to internal # queue, so verifying it's empty. mpr._get_internal_queue().get(block=False)
def test_multi_process_runner_queue_emptied_between_runs(self): job_count_dict = {'worker': 2} cluster_spec = multi_process_runner.job_count_to_cluster_spec( job_count_dict) returned_data = MultiProcessRunner().run( proc_func_that_adds_simple_return_data, cluster_spec) self.assertTrue(returned_data) self.assertEqual(returned_data[0], 'dummy_data') self.assertEqual(returned_data[1], 'dummy_data') returned_data = MultiProcessRunner().run(proc_func_that_does_nothing, cluster_spec) self.assertFalse(returned_data)
def test_process_that_exits(self): def func_to_exit_in_10_sec(): time.sleep(5) multi_process_runner.add_return_data('foo') time.sleep(20) multi_process_runner.add_return_data('bar') job_count_dict = {'worker': 1} returned_data = multi_process_runner.run( func_to_exit_in_10_sec, multi_process_runner.job_count_to_cluster_spec(job_count_dict), time_to_exit=10) self.assertLen(returned_data, 1)
def test_stdout_captured(self): def simple_print_func(): print('This is something printed.') return 'This is returned data.' job_count_dict = {'worker': 2} returned_data, std_stream_data = MultiProcessRunner().run( simple_print_func, multi_process_runner.job_count_to_cluster_spec(job_count_dict), return_std_stream=True) num_string_std_stream = len( [d for d in std_stream_data if d == 'This is something printed.']) num_string_returned_data = len( [d for d in returned_data if d == 'This is returned data.']) self.assertEqual(num_string_std_stream, 2) self.assertEqual(num_string_returned_data, 2)
def test_multi_process_runner(self): job_count_dict = {'worker': 2, 'ps': 3, 'evaluator': 2} proc_flags = { 'test_flag': 3, } returned_data = MultiProcessRunner().run( proc_func_that_adds_task_type_in_return_data, multi_process_runner.job_count_to_cluster_spec(job_count_dict), proc_flags=proc_flags, args=(self, )) for data in returned_data: job_count_dict[data] -= 1 self.assertEqual(job_count_dict['worker'], 0) self.assertEqual(job_count_dict['ps'], 0) self.assertEqual(job_count_dict['evaluator'], 0)