def test_empty_parallel_pipeline_continuation(self): global join_results_called join_results_called = False pipeline = _print_results.send("12").continue_with(in_parallel( [])).continue_with(in_parallel([])).continue_with(_join_results) result = self.test_harness.run_pipeline(pipeline) self.assertTrue(join_results_called) self.assertEqual(result, None)
def test_empty_parallel_pipeline(self): global join_results_called join_results_called = False pipeline = in_parallel([in_parallel([])]).continue_with(in_parallel( [])).continue_with(_join_results) result = self.test_harness.run_pipeline(pipeline) self.assertTrue(join_results_called) self.assertEqual(result, None)
def test_nested_parallel_workflow(self): pipeline = in_parallel([ in_parallel([ _say_hello.send("John", "Smith"), _say_hello.send("Jane", "Doe").continue_with( _say_goodbye, goodbye_message="see you later!") ]) ]) result = self.test_harness.run_pipeline(pipeline) self.assertEqual(result, [[ 'Hello John Smith', 'Hello Jane Doe. So now I will say see you later!' ]])
def test_continuation_into_nested_parallel_workflow(self): pipeline = _say_hello.send("John", "Smith").continue_with( in_parallel([ in_parallel([ _say_goodbye.send(goodbye_message="see you later!"), _say_goodbye.send(goodbye_message="see you later!") ]) ])) result = self.test_harness.run_pipeline(pipeline) self.assertEqual(result, [[ 'Hello John Smith. So now I will say see you later!', 'Hello John Smith. So now I will say see you later!' ]])
def test_parallel_workflow_with_max_parallelism(self): pipeline = in_parallel([ _say_hello.send("Jane", "Doe"), in_parallel([ _say_hello.send("Bob", "Smith").continue_with( _say_goodbye, goodbye_message="see you later!"), _say_hello.send("Tom", "Smith"), ], max_parallelism=1), ]).continue_with(_print_results) result = self.test_harness.run_pipeline(pipeline) self.assertEqual( result, "['Hello Jane Doe', ['Hello Bob Smith. So now I will say see you later!', 'Hello Tom Smith']]" )
def parallel_workflow(first_name, last_name): return in_parallel([ _say_hello.send("John", "Smith"), _say_hello.send("Jane", "Doe").continue_with(_say_goodbye, goodbye_message="see you later!"), ]).continue_with(_count_results)
def test_passing_multiple_args_into_parallel_pipeline(self): pipeline = _generate_name.send().continue_with( in_parallel( [_add_greeting.send('Hello'), _add_greeting.send('Goodbye')])) result = self.test_harness.run_pipeline(pipeline) self.assertEqual(result, ['Hello John Smith', 'Goodbye John Smith'])
def test_passing_arg_into_parallel_pipeline(self): pipeline = _print_results.send('John').continue_with( in_parallel([ _say_hello.send(last_name='Smith'), _say_hello.send(last_name='Doe') ])) result = self.test_harness.run_pipeline(pipeline) self.assertEqual(result, ['Hello John Smith', 'Hello John Doe'])
def test_parallel_workflow_with_max_parallelism_as_continuation(self): pipeline = _say_hello.send("Jane", "Doe").continue_with( in_parallel([_print_results.send(), _print_results.send()], max_parallelism=1)) result = self.test_harness.run_pipeline(pipeline) self.assertEqual(result, ['Hello Jane Doe', 'Hello Jane Doe'])
def test_parallel_workflow_with_error_and_continuations(self): global join_results_called join_results_called = False pipeline = in_parallel([ _fail.send().continue_with( in_parallel([ _say_hello.send("John", "Smith").continue_with(_join_results) ])), # this chain will fail at first step _say_goodbye.send("John", "Bye") # this chain will proceed ]) # overall we will get an exception self.assertRaises(TaskErrorException, self.test_harness.run_pipeline, pipeline) self.assertEqual(join_results_called, False) self.assertEqual(say_goodbye_called, True)
def test_nested_parallel_workflow_continuations(self): pipeline = in_parallel([ in_parallel([ in_parallel([ _say_hello.send("John", "Smith"), _say_hello.send("Jane", "Doe").continue_with( _say_goodbye, goodbye_message="see you later!") ]).continue_with(_join_results) ]).continue_with(_join_results2) ]).continue_with(_join_results3) result = self.test_harness.run_pipeline(pipeline) self.assertEqual( result, 'Hello John Smith; Hello Jane Doe. So now I will say see you later!' ) self.assertEqual(join_results_called, True) self.assertEqual(join_results2_called, True) self.assertEqual(join_results3_called, True)
def test_parallel_workflow_with_state(self): pipeline = in_parallel([ _say_hello_with_state.send("John", "Smith"), _say_hello_with_state.send("Jane", "Doe").continue_with( _say_goodbye, goodbye_message="see you later!"), ]).continue_with(_join_results_with_state) result = self.test_harness.run_pipeline(pipeline) self.assertEqual( result, 'Hello John Smith; Hello Jane Doe. So now I will say see you later! Average name length is 8.0' )
def test_parallel_workflow_with_error(self): global join_results_called join_results_called = False pipeline = in_parallel([ _say_hello.send("John", "Smith"), _fail.send(), ]).continue_with(_join_results) self.assertRaises(TaskErrorException, self.test_harness.run_pipeline, pipeline) self.assertEqual(join_results_called, False)
def test_continuation_into_parallel_workflow_with_two_continations(self): pipeline = _say_hello.send("John", "Smith").continue_with( in_parallel([ _say_goodbye.send(goodbye_message="see you later!"), _say_goodbye.send(goodbye_message="see you later!"), ])).continue_with(_join_results).continue_with(_print_results) result = self.test_harness.run_pipeline(pipeline) self.assertEqual( result, 'Hello John Smith. So now I will say see you later!; Hello John Smith. So now I will say see you later!' )
def test_pipeline_with_group_that_waits(self): pipeline = in_parallel([ tasks.send(_say_hello, 'Jane', 'Doe'), tasks.send(_say_hello, 'Joe', 'Blogs') ]).wait().continue_with(_say_goodbye, goodbye_message="see you later!") self.test_harness.run_pipeline(pipeline) entries = pipeline.get_tasks() self.assertIn(entries[0][2], _started) # first task in group self.assertIn(entries[1][2], _started) # second task in group self.assertNotIn(entries[2][2], _started) # task after group self.assertIn(pipeline.id, _status) self.assertEqual(_status[pipeline.id], TaskStatus.PAUSED)
def test_parallel_workflow_with_aggregated_dictionary_state(self): pipeline = in_parallel([ _say_hello_with_initial_state.send("Jane", "Doe", { 'A': 2, 'B': 2 }).continue_with(_say_goodbye, goodbye_message="see you later!"), _say_hello_with_initial_state.send("John", "Smith", {'A': 1}), ]).continue_with(_join_results_with_dictionary_state) result = self.test_harness.run_pipeline(pipeline) self.assertEqual( result, "Hello Jane Doe. So now I will say see you later!; Hello John Smith OrderedDict([('A', 2), ('B', 2)])" )
def test_pipeline_with_group_that_waits_and_then_is_cancelled(self): pipeline = in_parallel([ tasks.send(_say_hello, 'Jane', 'Doe'), tasks.send(_say_hello, 'Joe', 'Blogs') ]).wait().continue_with(_say_goodbye, goodbye_message="see you later!") self.test_harness.run_pipeline(pipeline) entries = pipeline.get_tasks() self.assertIn(entries[0][2], _started) # first task in group self.assertIn(entries[1][2], _started) # second task in group self.assertNotIn(entries[2][2], _started) # task after group self.assertIn(pipeline.id, _status) self.assertEqual(_status[pipeline.id], TaskStatus.PAUSED) self.test_harness.run_action(pipeline, TaskAction.CANCEL_PIPELINE) self.assertEqual(_status[pipeline.id], TaskStatus.CANCELLED) self.assertNotIn(entries[2][2], _started) # task after group # pipeline result is an exception action_result = self.test_harness.run_action(pipeline, TaskAction.GET_RESULT) task_exception = self._unpack(action_result.result, TaskException) self.assertEqual(task_exception.id, pipeline.id)
def state_passing_workflow_with_exception_in_parallel(): return _create_workflow_state.send(initial_state=456) \ .continue_with(_add_two_numbers, 3, 5) \ .continue_with(_update_workflow_state) \ .continue_with(in_parallel([_raise_exception.send()])) \ .finally_do(_destroy_workflow_state)
def test_empty_parallel_workflow(self): pipeline = in_parallel([]) result = self.test_harness.run_pipeline(pipeline) self.assertEqual(result, ())
def a_parallel_pipeline(state): return state, in_parallel([ a_task_that_sets_state.send() for _ in range(0, 2) ]).continue_with(an_aggregation_task)