def test_engine_tasks_on_success_finish(self): # Start workflow. execution = self.engine.start_workflow_execution(WB_NAME, "test_subsequent", CONTEXT) tasks = db_api.tasks_get(WB_NAME, execution['id']) self.assertEqual(len(tasks), 1) execution = db_api.execution_get(WB_NAME, execution['id']) task = self._assert_single_item(tasks, name='test_subsequent') # Make 'test_subsequent' task successful. self.engine.convey_task_result(WB_NAME, execution['id'], task['id'], states.SUCCESS, None) tasks = db_api.tasks_get(WB_NAME, execution['id']) self.assertEqual(len(tasks), 4) self._assert_single_item(tasks, name='test_subsequent', state=states.SUCCESS) self._assert_single_item(tasks, name='attach-volumes', state=states.IDLE) tasks2 = self._assert_multiple_items(tasks, 2, name='create-vms', state=states.RUNNING) # Make 2 'create-vms' tasks successful. self.engine.convey_task_result(WB_NAME, execution['id'], tasks2[0]['id'], states.SUCCESS, None) self.engine.convey_task_result(WB_NAME, execution['id'], tasks2[1]['id'], states.SUCCESS, None) tasks = db_api.tasks_get(WB_NAME, execution['id']) self._assert_multiple_items(tasks, 2, name='create-vms', state=states.SUCCESS) task = self._assert_single_item(tasks, name='attach-volumes', state=states.RUNNING) # Make 'attach-volumes' task successful. self.engine.convey_task_result(WB_NAME, execution['id'], task['id'], states.SUCCESS, None) execution = db_api.execution_get(WB_NAME, execution['id']) tasks = db_api.tasks_get(WB_NAME, execution['id']) self.assertEqual(execution['state'], states.SUCCESS) self._assert_multiple_items(tasks, 4, state=states.SUCCESS)
def run_delayed_task(): """ Runs the delayed task. Performs all the steps required to setup a task to run which are not already done. This is mostly code copied over from convey_task_result. """ db_api.start_tx() try: workbook_name = task['workbook_name'] execution_id = task['execution_id'] execution = db_api.execution_get(workbook_name, execution_id) # Change state from DELAYED to IDLE to unblock processing. WORKFLOW_TRACE.info("Task '%s' [%s -> %s]" % (task['name'], task['state'], states.IDLE)) db_task = db_api.task_update(workbook_name, execution_id, task['id'], {"state": states.IDLE}) task_to_start = [db_task] data_flow.prepare_tasks(task_to_start, outbound_context) db_api.commit_tx() finally: db_api.end_tx() if not states.is_stopped_or_finished(execution["state"]): cls._run_tasks(task_to_start)
def test_with_one_task(self): execution = self.engine.start_workflow_execution(WB_NAME, "build_name", CONTEXT) task = db_api.tasks_get(workbook_name=WB_NAME, execution_id=execution['id'])[0] executor.ExecutorClient.handle_task\ .assert_called_once_with(auth_context.ctx(), params={'output': 'Stormin Stanley'}, task_id=task['id'], action_name='std.echo') self.engine.convey_task_result(task['id'], states.SUCCESS, {'output': 'Stormin Stanley'}) task = db_api.tasks_get(workbook_name=WB_NAME, execution_id=execution['id'])[0] execution = db_api.execution_get(execution['id']) self.assertEqual(execution['state'], states.SUCCESS) self.assertEqual(task['state'], states.SUCCESS) self.assertEqual( task['output'], {'task': {'build_name': {'string': 'Stormin Stanley'}}})
def handle_task(self, cntx, **kwargs): """Handle the execution of the workbook task. :param cntx: a request context dict :type cntx: dict :param kwargs: a dict of method arguments :type kwargs: dict """ try: task = kwargs.get('task', None) if not task: raise Exception('No task is provided to the executor.') LOG.info("Received a task: %s" % task) db_task = db_api.task_get(task['workbook_name'], task['execution_id'], task['id']) db_exec = db_api.execution_get(task['workbook_name'], task['execution_id']) if not db_exec or not db_task: return if db_exec['state'] != states.RUNNING or \ db_task['state'] != states.IDLE: return self._do_task_action(db_task) db_api.task_update(task['workbook_name'], task['execution_id'], task['id'], {'state': states.RUNNING}) except Exception as exc: LOG.exception(exc) self._handle_task_error(task, exc)
def test_require_flow(self): execution = self.engine.start_workflow_execution(WB_NAME, "greet", CONTEXT) tasks = db_api.tasks_get(workbook_name=WB_NAME, execution_id=execution['id']) self.engine.convey_task_result(tasks[0]['id'], states.SUCCESS, None) tasks = db_api.tasks_get(workbook_name=WB_NAME, execution_id=execution['id']) self.assertIsNotNone(tasks) self.assertEqual(2, len(tasks)) self.assertEqual(tasks[0]['state'], states.SUCCESS) self.assertEqual(tasks[1]['state'], states.RUNNING) self.assertEqual(states.RUNNING, self.engine.get_workflow_execution_state( WB_NAME, execution['id'])) self.engine.convey_task_result(tasks[1]['id'], states.SUCCESS, None) tasks = db_api.tasks_get(workbook_name=WB_NAME, execution_id=execution['id']) execution = db_api.execution_get(execution['id']) self.assertEqual(execution['state'], states.SUCCESS) self.assertEqual(tasks[0]['state'], states.SUCCESS) self.assertEqual(tasks[1]['state'], states.SUCCESS) self.assertEqual(states.SUCCESS, self.engine.get_workflow_execution_state( WB_NAME, execution['id']))
def test_add_token_to_context(self): task_name = "create-vms" cfg.CONF.pecan.auth_enable = True try: workbook = create_workbook("test_rest.yaml") db_api.workbook_update(workbook['name'], {'trust_id': '123'}) execution = self.engine.start_workflow_execution(workbook['name'], task_name, {}) tasks = db_api.tasks_get(workbook_name=workbook['name'], execution_id=execution['id']) task = self._assert_single_item(tasks, name=task_name) openstack_context = task['in_context']['openstack'] self.assertIn("auth_token", openstack_context) self.assertEqual(TOKEN, openstack_context['auth_token']) self.assertEqual(USER_ID, openstack_context["user_id"]) self.engine.convey_task_result(task['id'], states.SUCCESS, {}) execution = db_api.execution_get(execution['id']) self.assertEqual(states.SUCCESS, execution['state']) finally: cfg.CONF.pecan.auth_enable = False
def test_add_token_to_context(self): task_name = "create-vms" cfg.CONF.pecan.auth_enable = True try: workbook = create_workbook("test_rest.yaml") db_api.workbook_update(workbook['name'], {'trust_id': '123'}) execution = self.engine.start_workflow_execution( workbook['name'], task_name, {}) tasks = db_api.tasks_get(workbook['name'], execution['id']) task = self._assert_single_item(tasks, name=task_name) context = task['in_context'] self.assertIn("auth_token", context) self.assertEqual(TOKEN, context['auth_token']) self.assertEqual(USER_ID, context["user_id"]) self.engine.convey_task_result(workbook['name'], execution['id'], task['id'], states.SUCCESS, {}) execution = db_api.execution_get(workbook['name'], execution['id']) self.assertEqual(states.SUCCESS, execution['state']) finally: cfg.CONF.pecan.auth_enable = False
def run_delayed_task(context): """Runs the delayed task. Performs all the steps required to setup a task to run which are not already done. This is mostly code copied over from convey_task_result. :param context Mistral authentication context inherited from a caller thread. """ auth_context.set_ctx(context) db_api.start_tx() try: execution_id = task['execution_id'] execution = db_api.execution_get(execution_id) # Change state from DELAYED to RUNNING. WORKFLOW_TRACE.info("Task '%s' [%s -> %s]" % (task['name'], task['state'], states.RUNNING)) executables = data_flow.prepare_tasks([task], outbound_context, workbook) db_api.commit_tx() finally: db_api.end_tx() if states.is_stopped_or_finished(execution['state']): return for task_id, action_name, action_params in executables: self._run_task(task_id, action_name, action_params)
def test_engine_one_task(self): # Start workflow. execution = ENGINE.start_workflow_execution(WB_NAME, "create-vms", CONTEXT) tasks = db_api.tasks_get(WB_NAME, execution['id']) self.assertEqual(1, len(tasks)) self._assert_single_item(tasks, name='create-vms', state=states.RUNNING) # Make 'create-vms' task successful. ENGINE.convey_task_result(WB_NAME, execution['id'], tasks[0]['id'], states.SUCCESS, None) execution = db_api.execution_get(WB_NAME, execution['id']) self.assertEqual(execution['state'], states.SUCCESS) tasks = db_api.tasks_get(WB_NAME, execution['id']) self.assertEqual(1, len(tasks)) self._assert_single_item(tasks, name='create-vms', state=states.SUCCESS)
def get_workflow_execution_state(cls, workbook_name, execution_id): execution = db_api.execution_get(workbook_name, execution_id) if not execution: raise exc.EngineException("Workflow execution not found " "[workbook_name=%s, execution_id=%s]" % (workbook_name, execution_id)) return execution["state"]
def test_engine_sync_task(self): execution = ENGINE.start_workflow_execution(WB_NAME, "create-vm-nova", CONTEXT) task = db_api.tasks_get(WB_NAME, execution['id'])[0] execution = db_api.execution_get(WB_NAME, execution['id']) self.assertEqual(execution['state'], states.SUCCESS) self.assertEqual(task['state'], states.SUCCESS)
def test_engine_sync_task(self): execution = self.engine.start_workflow_execution(WB_NAME, "create-vm-nova", CONTEXT) task = db_api.tasks_get(WB_NAME, execution['id'])[0] execution = db_api.execution_get(WB_NAME, execution['id']) self.assertEqual(execution['state'], states.SUCCESS) self.assertEqual(task['state'], states.SUCCESS)
def get(self, workbook_name, id): LOG.debug("Fetch execution [workbook_name=%s, id=%s]" % (workbook_name, id)) values = db_api.execution_get(workbook_name, id) if not values: abort(404) else: return Execution.from_dict(values)
def test_with_one_sync_task(self): execution = self.engine.start_workflow_execution(WB_NAME, "build_name", CONTEXT) task = db_api.tasks_get(workbook_name=WB_NAME, execution_id=execution['id'])[0] execution = db_api.execution_get(execution['id']) self.assertEqual(execution['state'], states.SUCCESS) self.assertEqual(task['state'], states.SUCCESS)
def test_engine_task_std_action_with_namespaces(self): execution = self.engine.start_workflow_execution(WB_NAME, "std_http_task", {}) tasks = db_api.tasks_get(workbook_name=WB_NAME, execution_id=execution['id']) execution = db_api.execution_get(execution['id']) self.assertEqual(1, len(tasks)) self.assertEqual(states.SUCCESS, tasks[0]['state']) self.assertEqual(states.SUCCESS, execution['state'])
def test_engine_with_no_namespaces(self): execution = self.engine.start_workflow_execution(WB_NAME, "task1", {}) tasks = db_api.tasks_get(workbook_name=WB_NAME, execution_id=execution['id']) execution = db_api.execution_get(execution['id']) self.assertIsNotNone(tasks) self.assertEqual(1, len(tasks)) self.assertEqual(tasks[0]['state'], states.SUCCESS) self.assertEqual(execution['state'], states.SUCCESS)
def convey_task_result(cls, workbook_name, execution_id, task_id, state, result): db_api.start_tx() workbook = cls._get_workbook(workbook_name) try: #TODO(rakhmerov): validate state transition task = db_api.task_get(workbook_name, execution_id, task_id) task_output = data_flow.get_task_output(task, result) # Update task state. task = db_api.task_update(workbook_name, execution_id, task_id, {"state": state, "output": task_output}) execution = db_api.execution_get(workbook_name, execution_id) # Calculate task outbound context. outbound_context = data_flow.get_outbound_context(task) cls._create_next_tasks(task, workbook) # Determine what tasks need to be started. tasks = db_api.tasks_get(workbook_name, execution_id) new_exec_state = cls._determine_execution_state(execution, tasks) if execution['state'] != new_exec_state: execution = \ db_api.execution_update(workbook_name, execution_id, { "state": new_exec_state }) LOG.info("Changed execution state: %s" % execution) tasks_to_start = workflow.find_resolved_tasks(tasks) data_flow.prepare_tasks(tasks_to_start, outbound_context) db_api.commit_tx() except Exception as e: raise exc.EngineException("Failed to create necessary DB objects:" " %s" % e) finally: db_api.end_tx() if states.is_stopped_or_finished(execution["state"]): return task if tasks_to_start: cls._run_tasks(tasks_to_start) return task
def test_direct_flow_on_success_finish(self): # Start workflow. execution = self.engine.start_workflow_execution(WB_NAME, "start-task", CONTEXT) # Only the first task is RUNNING tasks = db_api.tasks_get(workbook_name=WB_NAME, execution_id=execution['id']) self.assertEqual(len(tasks), 1) task = self._assert_single_item(tasks, name='start-task', state=states.RUNNING) # Make 'start-task' successful. self.engine.convey_task_result(task['id'], states.SUCCESS, None) tasks = db_api.tasks_get(workbook_name=WB_NAME, execution_id=execution['id']) self.assertEqual(len(tasks), 3) self._assert_single_item(tasks, name='start-task', state=states.SUCCESS) task1 = self._assert_single_item(tasks, name='task-one', state=states.RUNNING) self._assert_single_item(tasks, name='task-two', state=states.RUNNING) # Make 'task-one' tasks successful. self.engine.convey_task_result(task1['id'], states.SUCCESS, None) tasks = db_api.tasks_get(workbook_name=WB_NAME, execution_id=execution['id']) tasks_2 = self._assert_multiple_items(tasks, 2, name='task-two', state=states.RUNNING) # Make both 'task-two' task successful. self.engine.convey_task_result(tasks_2[0]['id'], states.SUCCESS, None) self.engine.convey_task_result(tasks_2[1]['id'], states.SUCCESS, None) tasks = db_api.tasks_get(workbook_name=WB_NAME, execution_id=execution['id']) execution = db_api.execution_get(execution['id']) self._assert_multiple_items(tasks, 4, state=states.SUCCESS) self.assertEqual(execution['state'], states.SUCCESS)
def test_engine_one_task(self): execution = self.engine.start_workflow_execution(WB_NAME, "create-vms", CONTEXT) task = db_api.tasks_get(WB_NAME, execution['id'])[0] self.engine.convey_task_result(WB_NAME, execution['id'], task['id'], states.SUCCESS, None) task = db_api.tasks_get(WB_NAME, execution['id'])[0] execution = db_api.execution_get(WB_NAME, execution['id']) self.assertEqual(execution['state'], states.SUCCESS) self.assertEqual(task['state'], states.SUCCESS)
def handle_task(self, cntx, **kwargs): """Handle the execution of the workbook task. :param cntx: a request context dict :type cntx: dict :param kwargs: a dict of method arguments :type kwargs: dict """ try: task = kwargs.get('task', None) if not task: raise Exception('No task is provided to the executor.') LOG.info("Received a task: %s" % task) db_task = db_api.task_get(task['workbook_name'], task['execution_id'], task['id']) db_exec = db_api.execution_get(task['workbook_name'], task['execution_id']) if not db_exec or not db_task: return if db_exec['state'] != states.RUNNING or \ db_task['state'] != states.IDLE: return # Update the state to running before performing action. The # do_task_action assigns state to the task which is the appropriate # value to preserve. WORKFLOW_TRACE.info("Task '%s' [%s -> %s]" % (db_task['name'], db_task['state'], states.RUNNING)) db_api.task_update(task['workbook_name'], task['execution_id'], task['id'], {'state': states.RUNNING}) self._do_task_action(db_task) except Exception as e: LOG.exception(e) self._handle_task_error(task, e)
def test_direct_flow_on_error_finish(self): # Start workflow. execution = self.engine.start_workflow_execution(WB_NAME, "start-task", CONTEXT) tasks = db_api.tasks_get(workbook_name=WB_NAME, execution_id=execution['id']) self.assertEqual(execution['state'], states.RUNNING) start_task = self._assert_single_item(tasks, name='start-task', state=states.RUNNING) # Make 'start-task' task fail. self.engine.convey_task_result(start_task['id'], states.ERROR, CONTEXT) tasks = db_api.tasks_get(workbook_name=WB_NAME, execution_id=execution['id']) self.assertEqual(len(tasks), 4) task3 = self._assert_single_item(tasks, name='task-three', state=states.RUNNING) task2 = self._assert_single_item(tasks, name='task-two', state=states.RUNNING) task4 = self._assert_single_item(tasks, name='task-four', state=states.RUNNING) # Make all running tasks successful. self.engine.convey_task_result(task2['id'], states.SUCCESS, None) self.engine.convey_task_result(task3['id'], states.SUCCESS, None) self.engine.convey_task_result(task4['id'], states.SUCCESS, None) tasks = db_api.tasks_get(workbook_name=WB_NAME, execution_id=execution['id']) execution = db_api.execution_get(execution['id']) self._assert_multiple_items(tasks, 3, state=states.SUCCESS) self._assert_single_item(tasks, state=states.ERROR) self.assertEqual(execution['state'], states.SUCCESS)
def handle_task(self, cntx, **kwargs): """Handle the execution of the workbook task. :param cntx: a request context dict :type cntx: dict :param kwargs: a dict of method arguments :type kwargs: dict """ try: task = kwargs.get('task', None) if not task: raise Exception('No task is provided to the executor.') LOG.info("Received a task: %s" % task) db_task = db_api.task_get(task['workbook_name'], task['execution_id'], task['id']) db_exec = db_api.execution_get(task['workbook_name'], task['execution_id']) if not db_exec or not db_task: return if db_exec['state'] != states.RUNNING or \ db_task['state'] != states.IDLE: return # Update the state to running before performing action. The # do_task_action assigns state to the task which is the appropriate # value to preserve. WORKFLOW_TRACE.info( "Task '%s' [%s -> %s]" % (db_task['name'], db_task['state'], states.RUNNING)) db_api.task_update(task['workbook_name'], task['execution_id'], task['id'], {'state': states.RUNNING}) self._do_task_action(db_task) except Exception as e: LOG.exception(e) self._handle_task_error(task, e)
def get_workflow_execution_state(self, cntx, **kwargs): """Gets the workflow execution state. :param cntx: a request context dict :type cntx: dict :param kwargs: a dict of method arguments :type kwargs: dict :return: Current workflow state. """ workbook_name = kwargs.get('workbook_name') execution_id = kwargs.get('execution_id') execution = db_api.execution_get(execution_id) if not execution: raise exc.EngineException("Workflow execution not found " "[workbook_name=%s, execution_id=%s]" % (workbook_name, execution_id)) return execution["state"]
def test_require_flow(self): execution = self.engine.start_workflow_execution(WB_NAME, "backup-vms", CONTEXT) tasks = db_api.tasks_get(workbook_name=WB_NAME, execution_id=execution['id']) self.engine.convey_task_result(tasks[0]['id'], states.SUCCESS, None) tasks = db_api.tasks_get(workbook_name=WB_NAME, execution_id=execution['id']) self.assertIsNotNone(tasks) self.assertEqual(2, len(tasks)) self.assertEqual(tasks[0]['state'], states.SUCCESS) # Since we mocked out executor notification we expect IDLE # for the second task. self.assertEqual(tasks[1]['state'], states.IDLE) self.assertEqual(states.RUNNING, self.engine.get_workflow_execution_state( WB_NAME, execution['id'])) self.engine.convey_task_result(tasks[1]['id'], states.SUCCESS, None) tasks = db_api.tasks_get(workbook_name=WB_NAME, execution_id=execution['id']) execution = db_api.execution_get(execution['id']) self.assertEqual(execution['state'], states.SUCCESS) self.assertEqual(tasks[0]['state'], states.SUCCESS) self.assertEqual(tasks[1]['state'], states.SUCCESS) self.assertEqual(states.SUCCESS, self.engine.get_workflow_execution_state( WB_NAME, execution['id']))
def test_engine_multiple_tasks(self): execution = self.engine.start_workflow_execution(WB_NAME, "backup-vms", CONTEXT) tasks = db_api.tasks_get(WB_NAME, execution['id']) self.engine.convey_task_result(WB_NAME, execution['id'], tasks[0]['id'], states.SUCCESS, None) tasks = db_api.tasks_get(WB_NAME, execution['id']) self.assertIsNotNone(tasks) self.assertEqual(2, len(tasks)) self.assertEqual(tasks[0]['state'], states.SUCCESS) # Since we mocked out executor notification we expect IDLE # for the second task. self.assertEqual(tasks[1]['state'], states.IDLE) self.assertEqual(states.RUNNING, self.engine.get_workflow_execution_state( WB_NAME, execution['id'])) self.engine.convey_task_result(WB_NAME, execution['id'], tasks[1]['id'], states.SUCCESS, None) tasks = db_api.tasks_get(WB_NAME, execution['id']) execution = db_api.execution_get(WB_NAME, execution['id']) self.assertEqual(execution['state'], states.SUCCESS) self.assertEqual(tasks[0]['state'], states.SUCCESS) self.assertEqual(tasks[1]['state'], states.SUCCESS) self.assertEqual(states.SUCCESS, self.engine.get_workflow_execution_state( WB_NAME, execution['id']))
def test_three_subsequent_tasks(self): wb = create_workbook('data_flow/three_subsequent_tasks.yaml') execution = ENGINE.start_workflow_execution(wb['name'], 'build_full_name', CONTEXT) # We have to reread execution to get its latest version. execution = db_api.execution_get(execution['workbook_name'], execution['id']) self.assertEqual(execution['state'], states.SUCCESS) self.assertDictEqual(execution['context'], CONTEXT) tasks = db_api.tasks_get(wb['name'], execution['id']) self.assertEqual(3, len(tasks)) build_full_name_task =\ self._assert_single_item(tasks, name='build_full_name') build_greeting_task =\ self._assert_single_item(tasks, name='build_greeting') send_greeting_task =\ self._assert_single_item(tasks, name='send_greeting') # Check the first task. self.assertEqual(states.SUCCESS, build_full_name_task['state']) self.assertDictEqual(CONTEXT, build_full_name_task['in_context']) self.assertDictEqual({'first_name': 'John', 'last_name': 'Doe'}, build_full_name_task['input']) self.assertDictEqual( { 'f_name': 'John Doe', 'task': { 'build_full_name': { 'full_name': 'John Doe' } } }, build_full_name_task['output']) # Check the second task. in_context = CONTEXT.copy() in_context['f_name'] = 'John Doe' self.assertEqual(states.SUCCESS, build_greeting_task['state']) self.assertEqual('John Doe', build_greeting_task['in_context']['f_name']) self.assertDictEqual({'full_name': 'John Doe'}, build_greeting_task['input']) self.assertDictEqual( { 'greet_msg': 'Hello, John Doe!', 'task': { 'build_greeting': { 'greeting': 'Hello, John Doe!', } } }, build_greeting_task['output']) del build_greeting_task['in_context']['f_name'] del build_greeting_task['in_context']['task'] self.assertDictEqual(CONTEXT, build_greeting_task['in_context']) # Check the third task. in_context = CONTEXT.copy() in_context['f_name'] = 'John Doe' in_context['greet_msg'] = 'Hello, John Doe!' self.assertEqual(states.SUCCESS, send_greeting_task['state']) self.assertEqual('John Doe', send_greeting_task['in_context']['f_name']) self.assertEqual('Hello, John Doe!', send_greeting_task['in_context']['greet_msg']) self.assertDictEqual({'greeting': 'Hello, John Doe!'}, send_greeting_task['input']) self.assertDictEqual( { 'sent': True, 'task': { 'send_greeting': { 'greeting_sent': True, } } }, send_greeting_task['output']) del send_greeting_task['in_context']['f_name'] del send_greeting_task['in_context']['greet_msg'] del send_greeting_task['in_context']['task'] self.assertDictEqual(CONTEXT, send_greeting_task['in_context'])
def test_two_dependent_tasks(self): CTX = copy.copy(CONTEXT) wb = create_workbook('data_flow/two_dependent_tasks.yaml') execution = self.engine.start_workflow_execution( wb['name'], 'build_greeting', CTX) # We have to reread execution to get its latest version. execution = db_api.execution_get(execution['workbook_name'], execution['id']) self.assertEqual(states.SUCCESS, execution['state']) self.assertDictEqual(CTX, execution['context']) tasks = db_api.tasks_get(wb['name'], execution['id']) self.assertEqual(2, len(tasks)) build_full_name_task = \ self._assert_single_item(tasks, name='build_full_name') build_greeting_task = \ self._assert_single_item(tasks, name='build_greeting') # Check the first task. self.assertEqual(states.SUCCESS, build_full_name_task['state']) self._check_in_context_execution(build_full_name_task) del build_full_name_task['in_context']['__execution'] self.assertDictEqual(CTX, build_full_name_task['in_context']) self.assertDictEqual({ 'first_name': 'John', 'last_name': 'Doe' }, build_full_name_task['parameters']) self.assertDictEqual( { 'f_name': 'John Doe', 'task': { 'build_full_name': { 'full_name': 'John Doe' } } }, build_full_name_task['output']) # Check the second task. in_context = CTX in_context['f_name'] = 'John Doe' self.assertEqual(states.SUCCESS, build_greeting_task['state']) self.assertEqual('John Doe', build_greeting_task['in_context']['f_name']) self.assertDictEqual({'full_name': 'John Doe'}, build_greeting_task['parameters']) self.assertDictEqual( {'task': { 'build_greeting': { 'greeting': 'Hello, John Doe!', } }}, build_greeting_task['output']) del build_greeting_task['in_context']['task'] self._check_in_context_execution(build_greeting_task) del build_greeting_task['in_context']['__execution'] self.assertDictEqual(CTX, build_greeting_task['in_context'])
def test_engine_tasks_on_error_finish(self): # Start workflow. execution = self.engine.start_workflow_execution(WB_NAME, "test_subsequent", CONTEXT) tasks = db_api.tasks_get(WB_NAME, execution['id']) execution = db_api.execution_get(WB_NAME, execution['id']) # Make 'test_subsequent' task successful. self.engine.convey_task_result(WB_NAME, execution['id'], tasks[0]['id'], states.ERROR, None) tasks = db_api.tasks_get(WB_NAME, execution['id']) self.assertEqual(len(tasks), 6) self._assert_single_item(tasks, name='backup-vms', state=states.IDLE) self._assert_single_item(tasks, name='test_subsequent', state=states.ERROR) self._assert_single_item(tasks, name='attach-volumes', state=states.IDLE) tasks2 = self._assert_multiple_items(tasks, 3, name='create-vms', state=states.RUNNING) # Make 'create-vms' tasks successful. self.engine.convey_task_result(WB_NAME, execution['id'], tasks2[0]['id'], states.SUCCESS, None) self.engine.convey_task_result(WB_NAME, execution['id'], tasks2[1]['id'], states.SUCCESS, None) self.engine.convey_task_result(WB_NAME, execution['id'], tasks2[2]['id'], states.SUCCESS, None) tasks = db_api.tasks_get(WB_NAME, execution['id']) task1 = self._assert_single_item(tasks, name='backup-vms', state=states.RUNNING) task2 = self._assert_single_item(tasks, name='attach-volumes', state=states.RUNNING) self._assert_multiple_items(tasks, 3, name='create-vms', state=states.SUCCESS) # Make tasks 'backup-vms' and 'attach-volumes' successful. self.engine.convey_task_result(WB_NAME, execution['id'], task1['id'], states.SUCCESS, None) self.engine.convey_task_result(WB_NAME, execution['id'], task2['id'], states.SUCCESS, None) execution = db_api.execution_get(WB_NAME, execution['id']) self.assertEqual(execution['state'], states.SUCCESS) tasks = db_api.tasks_get(WB_NAME, execution['id']) self._assert_single_item(tasks, state=states.ERROR) self._assert_multiple_items(tasks, 5, state=states.SUCCESS)
def test_two_dependent_tasks(self): CTX = copy.copy(CONTEXT) wb = create_workbook('data_flow/two_dependent_tasks.yaml') execution = self.engine.start_workflow_execution(wb['name'], 'build_greeting', CTX) # We have to reread execution to get its latest version. execution = db_api.execution_get(execution['id']) self.assertEqual(states.SUCCESS, execution['state']) self.assertDictEqual(CTX, execution['context']) tasks = db_api.tasks_get(workbook_name=wb['name'], execution_id=execution['id']) self.assertEqual(2, len(tasks)) build_full_name_task = \ self._assert_single_item(tasks, name='build_full_name') build_greeting_task = \ self._assert_single_item(tasks, name='build_greeting') # Check the first task. self.assertEqual(states.SUCCESS, build_full_name_task['state']) self._check_in_context_execution(build_full_name_task) del build_full_name_task['in_context']['__execution'] self.assertDictEqual(CTX, build_full_name_task['in_context']) self.assertDictEqual({'first_name': 'John', 'last_name': 'Doe'}, build_full_name_task['parameters']) self.assertDictEqual( { 'f_name': 'John Doe', 'task': { 'build_full_name': { 'full_name': 'John Doe' } } }, build_full_name_task['output']) # Check the second task. in_context = CTX in_context['f_name'] = 'John Doe' self.assertEqual(states.SUCCESS, build_greeting_task['state']) self.assertEqual('John Doe', build_greeting_task['in_context']['f_name']) self.assertDictEqual({'full_name': 'John Doe'}, build_greeting_task['parameters']) self.assertDictEqual( { 'task': { 'build_greeting': { 'greeting': 'Hello, John Doe!', } } }, build_greeting_task['output']) del build_greeting_task['in_context']['task'] self._check_in_context_execution(build_greeting_task) del build_greeting_task['in_context']['__execution'] self.assertDictEqual(CTX, build_greeting_task['in_context'])
def convey_task_result(cls, workbook_name, execution_id, task_id, state, result): db_api.start_tx() try: workbook = cls._get_workbook(workbook_name) #TODO(rakhmerov): validate state transition task = db_api.task_get(workbook_name, execution_id, task_id) wf_trace_msg = "Task '%s' [%s -> %s" % \ (task['name'], task['state'], state) wf_trace_msg += ']' if state == states.ERROR \ else ", result = %s]" % result WORKFLOW_TRACE.info(wf_trace_msg) task_output = data_flow.get_task_output(task, result) # Update task state. task, outbound_context = cls._update_task(workbook, task, state, task_output) execution = db_api.execution_get(workbook_name, execution_id) cls._create_next_tasks(task, workbook) # Determine what tasks need to be started. tasks = db_api.tasks_get(workbook_name, execution_id) new_exec_state = cls._determine_execution_state(execution, tasks) if execution['state'] != new_exec_state: wf_trace_msg = \ "Execution '%s' [%s -> %s]" % \ (execution_id, execution['state'], new_exec_state) WORKFLOW_TRACE.info(wf_trace_msg) execution = \ db_api.execution_update(workbook_name, execution_id, { "state": new_exec_state }) LOG.info("Changed execution state: %s" % execution) tasks_to_start, delayed_tasks = workflow.find_resolved_tasks(tasks) cls._add_variables_to_data_flow_context(outbound_context, execution) data_flow.prepare_tasks(tasks_to_start, outbound_context) db_api.commit_tx() except Exception as e: LOG.exception("Failed to create necessary DB objects.") raise exc.EngineException("Failed to create necessary DB objects:" " %s" % e) finally: db_api.end_tx() if states.is_stopped_or_finished(execution["state"]): return task for task in delayed_tasks: cls._schedule_run(workbook, task, outbound_context) if tasks_to_start: cls._run_tasks(tasks_to_start) return task
def _get(self, id): values = db_api.execution_get(id) return Execution.from_dict(values)
def convey_task_result(self, cntx, **kwargs): """Conveys task result to Mistral Engine. This method should be used by clients of Mistral Engine to update state of a task once task action has been performed. One of the clients of this method is Mistral REST API server that receives task result from the outside action handlers. Note: calling this method serves an event notifying Mistral that it possibly needs to move the workflow on, i.e. run other workflow tasks for which all dependencies are satisfied. :param cntx: a request context dict :type cntx: dict :param kwargs: a dict of method arguments :type kwargs: dict :return: Task. """ task_id = kwargs.get('task_id') state = kwargs.get('state') result = kwargs.get('result') db_api.start_tx() try: # TODO(rakhmerov): validate state transition task = db_api.task_get(task_id) workbook = self._get_workbook(task['workbook_name']) wf_trace_msg = "Task '%s' [%s -> %s" % \ (task['name'], task['state'], state) wf_trace_msg += ']' if state == states.ERROR \ else ", result = %s]" % result WORKFLOW_TRACE.info(wf_trace_msg) task_output = data_flow.get_task_output(task, result) # Update task state. task, outbound_context = self._update_task(workbook, task, state, task_output) execution = db_api.execution_get(task['execution_id']) self._create_next_tasks(task, workbook) # Determine what tasks need to be started. tasks = db_api.tasks_get(workbook_name=task['workbook_name'], execution_id=task['execution_id']) new_exec_state = self._determine_execution_state(execution, tasks) if execution['state'] != new_exec_state: wf_trace_msg = \ "Execution '%s' [%s -> %s]" % \ (execution['id'], execution['state'], new_exec_state) WORKFLOW_TRACE.info(wf_trace_msg) execution = \ db_api.execution_update(execution['id'], { "state": new_exec_state }) LOG.info("Changed execution state: %s" % execution) tasks_to_start, delayed_tasks = workflow.find_resolved_tasks(tasks) self._add_variables_to_data_flow_context(outbound_context, execution) data_flow.prepare_tasks(tasks_to_start, outbound_context) db_api.commit_tx() except Exception as e: msg = "Failed to create necessary DB objects: %s" % e LOG.exception(msg) raise exc.EngineException(msg) finally: db_api.end_tx() if states.is_stopped_or_finished(execution["state"]): return task for task in delayed_tasks: self._schedule_run(workbook, task, outbound_context) if tasks_to_start: self._run_tasks(tasks_to_start) return task
def convey_task_result(self, cntx, **kwargs): """Conveys task result to Mistral Engine. This method should be used by clients of Mistral Engine to update state of a task once task action has been performed. One of the clients of this method is Mistral REST API server that receives task result from the outside action handlers. Note: calling this method serves an event notifying Mistral that it possibly needs to move the workflow on, i.e. run other workflow tasks for which all dependencies are satisfied. :param cntx: a request context dict :type cntx: dict :param kwargs: a dict of method arguments :type kwargs: dict :return: Task. """ task_id = kwargs.get('task_id') state = kwargs.get('state') result = kwargs.get('result') db_api.start_tx() try: # TODO(rakhmerov): validate state transition task = db_api.task_get(task_id) workbook = self._get_workbook(task['workbook_name']) wf_trace_msg = "Task '%s' [%s -> %s" % \ (task['name'], task['state'], state) wf_trace_msg += ']' if state == states.ERROR \ else ", result = %s]" % result WORKFLOW_TRACE.info(wf_trace_msg) action_name = wb_task.TaskSpec(task['task_spec'])\ .get_full_action_name() if not a_f.get_action_class(action_name): action = a_f.resolve_adhoc_action_name(workbook, action_name) if not action: msg = 'Unknown action [workbook=%s, action=%s]' % \ (workbook, action_name) raise exc.ActionException(msg) result = a_f.convert_adhoc_action_result(workbook, action_name, result) task_output = data_flow.get_task_output(task, result) # Update task state. task, context = self._update_task(workbook, task, state, task_output) execution = db_api.execution_get(task['execution_id']) self._create_next_tasks(task, workbook) # Determine what tasks need to be started. tasks = db_api.tasks_get(execution_id=task['execution_id']) new_exec_state = self._determine_execution_state(execution, tasks) if execution['state'] != new_exec_state: wf_trace_msg = \ "Execution '%s' [%s -> %s]" % \ (execution['id'], execution['state'], new_exec_state) WORKFLOW_TRACE.info(wf_trace_msg) execution = db_api.execution_update(execution['id'], { "state": new_exec_state }) LOG.info("Changed execution state: %s" % execution) # Create a list of tasks that can be executed immediately (have # their requirements satisfied) along with the list of tasks that # require some delay before they'll be executed. tasks_to_start, delayed_tasks = workflow.find_resolved_tasks(tasks) # Populate context with special variables such as `openstack` and # `__execution`. self._add_variables_to_data_flow_context(context, execution) # Update task with new context and params. executables = data_flow.prepare_tasks(tasks_to_start, context, workbook) db_api.commit_tx() except Exception as e: msg = "Failed to create necessary DB objects: %s" % e LOG.exception(msg) raise exc.EngineException(msg) finally: db_api.end_tx() if states.is_stopped_or_finished(execution['state']): return task for task in delayed_tasks: self._schedule_run(workbook, task, context) for task_id, action_name, action_params in executables: self._run_task(task_id, action_name, action_params) return task
def test_engine_tasks_on_error_finish(self): # Start workflow. execution = ENGINE.start_workflow_execution(WB_NAME, "test_subsequent", CONTEXT) tasks = db_api.tasks_get(WB_NAME, execution['id']) execution = db_api.execution_get(WB_NAME, execution['id']) # Make 'test_subsequent' task successful. ENGINE.convey_task_result(WB_NAME, execution['id'], tasks[0]['id'], states.ERROR, None) tasks = db_api.tasks_get(WB_NAME, execution['id']) self.assertEqual(len(tasks), 6) self._assert_single_item(tasks, name='backup-vms', state=states.IDLE) self._assert_single_item(tasks, name='test_subsequent', state=states.ERROR) self._assert_single_item(tasks, name='attach-volumes', state=states.IDLE) tasks2 = self._assert_multiple_items(tasks, 3, name='create-vms', state=states.RUNNING) # Make 'create-vms' tasks successful. ENGINE.convey_task_result(WB_NAME, execution['id'], tasks2[0]['id'], states.SUCCESS, None) ENGINE.convey_task_result(WB_NAME, execution['id'], tasks2[1]['id'], states.SUCCESS, None) ENGINE.convey_task_result(WB_NAME, execution['id'], tasks2[2]['id'], states.SUCCESS, None) tasks = db_api.tasks_get(WB_NAME, execution['id']) task1 = self._assert_single_item(tasks, name='backup-vms', state=states.RUNNING) task2 = self._assert_single_item(tasks, name='attach-volumes', state=states.RUNNING) self._assert_multiple_items(tasks, 3, name='create-vms', state=states.SUCCESS) # Make tasks 'backup-vms' and 'attach-volumes' successful. ENGINE.convey_task_result(WB_NAME, execution['id'], task1['id'], states.SUCCESS, None) ENGINE.convey_task_result(WB_NAME, execution['id'], task2['id'], states.SUCCESS, None) execution = db_api.execution_get(WB_NAME, execution['id']) self.assertEqual(execution['state'], states.SUCCESS) tasks = db_api.tasks_get(WB_NAME, execution['id']) self._assert_single_item(tasks, state=states.ERROR) self._assert_multiple_items(tasks, 5, state=states.SUCCESS)