Esempio n. 1
0
    def test_update_workbook(self):
        # Create workbook.
        wb_db = wb_service.create_workbook_v2(WORKBOOK)

        self.assertIsNotNone(wb_db)
        self.assertEqual(2, len(db_api.get_workflow_definitions()))

        # Update workbook.
        wb_db = wb_service.update_workbook_v2(UPDATED_WORKBOOK)

        self.assertIsNotNone(wb_db)
        self.assertEqual('my_wb', wb_db.name)
        self.assertEqual(UPDATED_WORKBOOK, wb_db.definition)
        self.assertListEqual(['test'], wb_db.tags)

        db_wfs = db_api.get_workflow_definitions()

        self.assertEqual(2, len(db_wfs))

        # Workflow 1.
        wf1_db = self._assert_single_item(db_wfs, name='my_wb.wf1')
        wf1_spec = spec_parser.get_workflow_spec(wf1_db.spec)

        self.assertEqual('wf1', wf1_spec.get_name())
        self.assertEqual('direct', wf1_spec.get_type())

        # Workflow 2.
        wf2_db = self._assert_single_item(db_wfs, name='my_wb.wf2')
        wf2_spec = spec_parser.get_workflow_spec(wf2_db.spec)

        self.assertEqual('wf2', wf2_spec.get_name())
        self.assertEqual('reverse', wf2_spec.get_type())
Esempio n. 2
0
def _schedule_run_workflow(task_ex, task_spec, wf_input, index):
    parent_wf_ex = task_ex.workflow_execution
    parent_wf_spec = spec_parser.get_workflow_spec(parent_wf_ex.spec)

    wf_spec_name = task_spec.get_workflow_name()

    wf_def = e_utils.resolve_workflow_definition(parent_wf_ex.workflow_name,
                                                 parent_wf_spec.get_name(),
                                                 wf_spec_name)

    wf_spec = spec_parser.get_workflow_spec(wf_def.spec)

    wf_params = {'task_execution_id': task_ex.id, 'with_items_index': index}

    if 'env' in parent_wf_ex.params:
        wf_params['env'] = parent_wf_ex.params['env']

    for k, v in wf_input.items():
        if k not in wf_spec.get_input():
            wf_params[k] = v
            del wf_input[k]

    scheduler.schedule_call(None,
                            'mistral.engine.task_handler.run_workflow',
                            0,
                            wf_name=wf_def.name,
                            wf_input=wf_input,
                            wf_params=wf_params)
Esempio n. 3
0
    def schedule(self, input_dict, target, index=0, desc='', safe_rerun=False):
        assert not self.action_ex

        parent_wf_ex = self.task_ex.workflow_execution
        parent_wf_spec = spec_parser.get_workflow_spec(parent_wf_ex.spec)

        task_spec = spec_parser.get_task_spec(self.task_ex.spec)

        wf_spec_name = task_spec.get_workflow_name()

        wf_def = e_utils.resolve_workflow_definition(
            parent_wf_ex.workflow_name, parent_wf_spec.get_name(),
            wf_spec_name)

        wf_spec = spec_parser.get_workflow_spec(wf_def.spec)

        wf_params = {'task_execution_id': self.task_ex.id, 'index': index}

        if 'env' in parent_wf_ex.params:
            wf_params['env'] = parent_wf_ex.params['env']

        for k, v in list(input_dict.items()):
            if k not in wf_spec.get_input():
                wf_params[k] = v
                del input_dict[k]

        wf_handler.start_workflow(wf_def.id, input_dict,
                                  "sub-workflow execution", wf_params)
Esempio n. 4
0
    def test_update_workflows(self):
        db_wfs = wf_service.create_workflows(WORKFLOW_LIST)

        self.assertEqual(2, len(db_wfs))

        # Workflow 1.
        wf1_db = self._assert_single_item(db_wfs, name='wf1')
        wf1_spec = spec_parser.get_workflow_spec(wf1_db.spec)

        self.assertEqual('wf1', wf1_spec.get_name())
        self.assertEqual('reverse', wf1_spec.get_type())
        self.assertIn('param1', wf1_spec.get_input())
        self.assertIs(wf1_spec.get_input().get('param1'), utils.NotDefined)

        db_wfs = wf_service.update_workflows(UPDATED_WORKFLOW_LIST)

        self.assertEqual(1, len(db_wfs))

        wf1_db = self._assert_single_item(db_wfs, name='wf1')
        wf1_spec = spec_parser.get_workflow_spec(wf1_db.spec)

        self.assertEqual('wf1', wf1_spec.get_name())
        self.assertListEqual([], wf1_spec.get_tags())
        self.assertEqual('reverse', wf1_spec.get_type())
        self.assertIn('param1', wf1_spec.get_input())
        self.assertIn('param2', wf1_spec.get_input())
        self.assertIs(wf1_spec.get_input().get('param1'), utils.NotDefined)
        self.assertIs(wf1_spec.get_input().get('param2'), utils.NotDefined)
Esempio n. 5
0
def _schedule_run_workflow(task_ex, task_spec, wf_input, index):
    parent_wf_ex = task_ex.workflow_execution
    parent_wf_spec = spec_parser.get_workflow_spec(parent_wf_ex.spec)

    wf_spec_name = task_spec.get_workflow_name()

    wf_def = e_utils.resolve_workflow_definition(
        parent_wf_ex.workflow_name,
        parent_wf_spec.get_name(),
        wf_spec_name
    )

    wf_spec = spec_parser.get_workflow_spec(wf_def.spec)

    wf_params = {
        'task_execution_id': task_ex.id,
        'with_items_index': index
    }

    if 'env' in parent_wf_ex.params:
        wf_params['env'] = parent_wf_ex.params['env']

    for k, v in wf_input.items():
        if k not in wf_spec.get_input():
            wf_params[k] = v
            del wf_input[k]

    scheduler.schedule_call(
        None,
        'mistral.engine.task_handler.run_workflow',
        0,
        wf_name=wf_def.name,
        wf_input=wf_input,
        wf_params=wf_params
    )
Esempio n. 6
0
    def get_controller(wf_ex, wf_spec=None):
        if not wf_spec:
            wf_spec = spec_parser.get_workflow_spec(wf_ex['spec'])

        ctrl_cls = WorkflowController._get_class(wf_spec.get_type())

        return ctrl_cls(wf_ex)
Esempio n. 7
0
def _build_action(action_ex):
    if isinstance(action_ex, models.WorkflowExecution):
        return actions.WorkflowAction(None, action_ex=action_ex)

    wf_name = None
    wf_spec_name = None

    if action_ex.workflow_name:
        wf_name = action_ex.workflow_name
        wf_spec = spec_parser.get_workflow_spec(
            action_ex.task_execution.workflow_execution.spec
        )
        wf_spec_name = wf_spec.get_name()

    adhoc_action_name = action_ex.runtime_context.get('adhoc_action_name')

    if adhoc_action_name:
        action_def = actions.resolve_action_definition(
            adhoc_action_name,
            wf_name,
            wf_spec_name
        )

        return actions.AdHocAction(action_def, action_ex=action_ex)

    action_def = actions.resolve_action_definition(
        action_ex.name,
        wf_name,
        wf_spec_name
    )

    return actions.PythonAction(action_def, action_ex=action_ex)
Esempio n. 8
0
    def _stop_workflow(wf_ex, state, message=None):
        if state == states.SUCCESS:
            wf_ctrl = wf_base.get_controller(wf_ex)

            final_context = {}

            try:
                final_context = wf_ctrl.evaluate_workflow_final_context()
            except Exception as e:
                LOG.warning(
                    'Failed to get final context for %s: %s' % (wf_ex, e)
                )

            wf_spec = spec_parser.get_workflow_spec(wf_ex.spec)

            return wf_handler.succeed_workflow(
                wf_ex,
                final_context,
                wf_spec,
                message
            )
        elif state == states.ERROR:
            return wf_handler.fail_workflow(wf_ex, message)

        return wf_ex
Esempio n. 9
0
    def _on_task_state_change(self, task_ex, wf_ex, task_state=states.SUCCESS):
        task_spec = spec_parser.get_task_spec(task_ex.spec)
        wf_spec = spec_parser.get_workflow_spec(wf_ex.spec)

        # We must be sure that if task is completed,
        # it was also completed in previous transaction.
        if (task_handler.is_task_completed(task_ex, task_spec)
                and states.is_completed(task_state)):
            task_handler.after_task_complete(task_ex, task_spec, wf_spec)

            # Ignore DELAYED state.
            if task_ex.state == states.RUNNING_DELAYED:
                return

            wf_ctrl = wf_base.WorkflowController.get_controller(wf_ex, wf_spec)

            # Calculate commands to process next.
            cmds = wf_ctrl.continue_workflow()

            task_ex.processed = True

            self._dispatch_workflow_commands(wf_ex, cmds)

            self._check_workflow_completion(wf_ex, wf_ctrl)
        elif task_handler.need_to_continue(task_ex, task_spec):
            # Re-run existing task.
            cmds = [commands.RunExistingTask(task_ex, reset=False)]

            self._dispatch_workflow_commands(wf_ex, cmds)
Esempio n. 10
0
    def _on_task_state_change(self, task_ex, wf_ex):
        task_spec = spec_parser.get_task_spec(task_ex.spec)
        wf_spec = spec_parser.get_workflow_spec(wf_ex.spec)

        if task_handler.is_task_completed(task_ex, task_spec):
            task_handler.after_task_complete(task_ex, task_spec, wf_spec)

            # Ignore DELAYED state.
            if task_ex.state == states.DELAYED:
                return

            wf_ctrl = wf_base.WorkflowController.get_controller(wf_ex)

            # Calculate commands to process next.
            cmds = wf_ctrl.continue_workflow()

            task_ex.processed = True

            self._dispatch_workflow_commands(wf_ex, cmds)

            self._check_workflow_completion(wf_ex, wf_ctrl)
        elif task_handler.need_to_continue(task_ex, task_spec):
            # Re-run existing task.
            cmds = [commands.RunExistingTask(task_ex, reset=False)]

            self._dispatch_workflow_commands(wf_ex, cmds)
Esempio n. 11
0
    def start_workflow(self, wf_name, wf_input, description='', **params):
        wf_exec_id = None

        try:
            params = self._canonize_workflow_params(params)

            with db_api.transaction():
                wf_def = db_api.get_workflow_definition(wf_name)
                wf_spec = spec_parser.get_workflow_spec(wf_def.spec)

                eng_utils.validate_input(wf_def, wf_input, wf_spec)

                wf_ex = self._create_workflow_execution(
                    wf_def, wf_spec, wf_input, description, params)
                wf_exec_id = wf_ex.id

                wf_trace.info(wf_ex, "Starting workflow: '%s'" % wf_name)

                wf_ctrl = wf_base.WorkflowController.get_controller(
                    wf_ex, wf_spec)

                self._dispatch_workflow_commands(wf_ex,
                                                 wf_ctrl.continue_workflow())

                return wf_ex.get_clone()
        except Exception as e:
            LOG.error("Failed to start workflow '%s' id=%s: %s\n%s", wf_name,
                      wf_exec_id, e, traceback.format_exc())
            self._fail_workflow(wf_exec_id, e)
            raise e
Esempio n. 12
0
def _build_action(action_ex):
    if isinstance(action_ex, models.WorkflowExecution):
        return actions.WorkflowAction(None, action_ex=action_ex)

    wf_name = None
    wf_spec_name = None

    if action_ex.workflow_name:
        wf_name = action_ex.workflow_name
        wf_spec = spec_parser.get_workflow_spec(
            action_ex.task_execution.workflow_execution.spec)
        wf_spec_name = wf_spec.get_name()

    adhoc_action_name = action_ex.runtime_context.get('adhoc_action_name')

    if adhoc_action_name:
        action_def = actions.resolve_action_definition(adhoc_action_name,
                                                       wf_name, wf_spec_name)

        return actions.AdHocAction(action_def, action_ex=action_ex)

    action_def = actions.resolve_action_definition(action_ex.name, wf_name,
                                                   wf_spec_name)

    return actions.PythonAction(action_def, action_ex=action_ex)
Esempio n. 13
0
    def __init__(self, wf_ex):
        """Creates a new workflow controller.

        :param wf_ex: Workflow execution.
        """
        self.wf_ex = wf_ex
        self.wf_spec = spec_parser.get_workflow_spec(wf_ex.spec)
Esempio n. 14
0
    def __init__(self, wf_ex):
        """Creates a new workflow controller.

        :param wf_ex: Workflow execution.
        """
        self.wf_ex = wf_ex
        self.wf_spec = spec_parser.get_workflow_spec(wf_ex.spec)
Esempio n. 15
0
def _schedule_run_action(task_ex, task_spec, action_input, index):
    wf_ex = task_ex.workflow_execution
    wf_spec = spec_parser.get_workflow_spec(wf_ex.spec)

    action_spec_name = task_spec.get_action_name()

    action_def = action_handler.resolve_definition(
        action_spec_name,
        task_ex,
        wf_spec
    )

    action_ex = action_handler.create_action_execution(
        action_def, action_input, task_ex, index
    )

    target = expr.evaluate_recursively(
        task_spec.get_target(),
        utils.merge_dicts(
            copy.deepcopy(action_input),
            copy.copy(task_ex.in_context)
        )
    )

    scheduler.schedule_call(
        None,
        'mistral.engine.action_handler.run_existing_action',
        0,
        action_ex_id=action_ex.id,
        target=target
    )
Esempio n. 16
0
    def get_controller(wf_ex, wf_spec=None):
        if not wf_spec:
            wf_spec = spec_parser.get_workflow_spec(wf_ex['spec'])

        ctrl_cls = WorkflowController._get_class(wf_spec.get_type())

        return ctrl_cls(wf_ex)
Esempio n. 17
0
def _schedule_run_workflow(task_ex, task_spec, wf_input, index,
                           parent_wf_spec):
    parent_wf_ex = task_ex.workflow_execution

    wf_spec_name = task_spec.get_workflow_name()

    wf_def = e_utils.resolve_workflow_definition(parent_wf_ex.workflow_name,
                                                 parent_wf_spec.get_name(),
                                                 wf_spec_name)

    wf_spec = spec_parser.get_workflow_spec(wf_def.spec)

    wf_params = {'task_execution_id': task_ex.id, 'with_items_index': index}

    if 'env' in parent_wf_ex.params:
        wf_params['env'] = parent_wf_ex.params['env']

    for k, v in list(wf_input.items()):
        if k not in wf_spec.get_input():
            wf_params[k] = v
            del wf_input[k]

    wf_ex_id, _ = wf_ex_service.create_workflow_execution(
        wf_def.name, wf_input, "sub-workflow execution", wf_params, wf_spec)

    scheduler.schedule_call(None,
                            'mistral.engine.task_handler.resume_workflow',
                            0,
                            wf_ex_id=wf_ex_id,
                            env=None)
Esempio n. 18
0
def transform_result(task_ex, result):
    """Transforms task result accounting for ad-hoc actions.

    In case if the given result is an action result and action is
    an ad-hoc action the method transforms the result according to
    ad-hoc action configuration.

    :param task_ex: Task DB model.
    :param result: Result of task action/workflow.
    """
    if result.is_error():
        return result

    action_spec_name = spec_parser.get_task_spec(
        task_ex.spec).get_action_name()

    if action_spec_name:
        wf_ex = task_ex.workflow_execution
        wf_spec_name = spec_parser.get_workflow_spec(wf_ex.spec).get_name()

        return transform_action_result(
            wf_ex.workflow_name,
            wf_spec_name,
            action_spec_name,
            result
        )

    return result
Esempio n. 19
0
def get_controller(wf_ex, wf_spec=None):
    """Gets a workflow controller instance by given workflow execution object.

    :param wf_ex: Workflow execution object.
    :param wf_spec: Workflow specification object. If passed, the method works
        faster.
    :returns: Workflow controller class.
    """

    if not wf_spec:
        wf_spec = spec_parser.get_workflow_spec(wf_ex['spec'])

    wf_type = wf_spec.get_type()

    ctrl_cls = None

    for cls in u.iter_subclasses(WorkflowController):
        if cls.__workflow_type__ == wf_type:
            ctrl_cls = cls
            break

    if not ctrl_cls:
        raise exc.MistralError(
            'Failed to find a workflow controller [type=%s]' % wf_type)

    return ctrl_cls(wf_ex, wf_spec)
Esempio n. 20
0
def run_new_task(wf_cmd):
    """Runs a task."""
    ctx = wf_cmd.ctx
    wf_ex = wf_cmd.wf_ex
    wf_spec = spec_parser.get_workflow_spec(wf_ex.spec)
    task_spec = wf_cmd.task_spec

    # NOTE(xylan): Need to think how to get rid of this weird judgment to keep
    # it more consistent with the function name.
    task_ex = wf_utils.find_task_execution_with_state(wf_ex, task_spec,
                                                      states.WAITING)

    if task_ex:
        _set_task_state(task_ex, states.RUNNING)
        task_ex.in_context = ctx
    else:
        task_ex = _create_task_execution(wf_ex, task_spec, ctx)

    LOG.debug(
        'Starting workflow task [workflow=%s, task_spec=%s, init_state=%s]' %
        (wf_ex.name, task_spec, task_ex.state))

    # TODO(rakhmerov): 'concurrency' policy should keep a number of running
    # actions/workflows under control so it can't be implemented if it runs
    # before any action executions are created.
    before_task_start(task_ex, task_spec, wf_spec)

    # Policies could possibly change task state.
    if task_ex.state != states.RUNNING:
        return

    _run_existing_task(task_ex, task_spec, wf_spec)
Esempio n. 21
0
def check_workflow_completion(wf_ex):
    if states.is_paused_or_completed(wf_ex.state):
        return

    # Workflow is not completed if there are any incomplete task
    # executions that are not in WAITING state. If all incomplete
    # tasks are waiting and there are no unhandled errors, then these
    # tasks will not reach completion. In this case, mark the
    # workflow complete.
    incomplete_tasks = wf_utils.find_incomplete_task_executions(wf_ex)

    if any(not states.is_waiting(t.state) for t in incomplete_tasks):
        return

    wf_spec = spec_parser.get_workflow_spec(wf_ex.spec)

    wf_ctrl = wf_base.get_controller(wf_ex, wf_spec)

    if wf_ctrl.all_errors_handled():
        succeed_workflow(wf_ex, wf_ctrl.evaluate_workflow_final_context(),
                         wf_spec)
    else:
        state_info = wf_utils.construct_fail_info_message(wf_ctrl, wf_ex)

        fail_workflow(wf_ex, state_info)
Esempio n. 22
0
def _schedule_run_action(task_ex, task_spec, action_input, index):
    wf_ex = task_ex.workflow_execution
    wf_spec = spec_parser.get_workflow_spec(wf_ex.spec)

    action_spec_name = task_spec.get_action_name()

    # TODO(rakhmerov): Refactor ad-hoc actions and isolate them.
    action_def = e_utils.resolve_action_definition(wf_ex.workflow_name,
                                                   wf_spec.get_name(),
                                                   action_spec_name)

    if action_def.spec:
        # Ad-hoc action.
        action_spec = spec_parser.get_action_spec(action_def.spec)

        base_name = action_spec.get_base()

        action_def = e_utils.resolve_action_definition(task_ex.workflow_name,
                                                       wf_spec.get_name(),
                                                       base_name)

    action_ex = _create_action_execution(task_ex, action_def, action_input,
                                         index)

    target = expr.evaluate_recursively(
        task_spec.get_target(),
        utils.merge_dicts(copy.deepcopy(action_input),
                          copy.copy(task_ex.in_context)))

    scheduler.schedule_call(None,
                            'mistral.engine.task_handler.run_action',
                            0,
                            action_ex_id=action_ex.id,
                            target=target)
Esempio n. 23
0
def get_controller(wf_ex, wf_spec=None):
    """Gets a workflow controller instance by given workflow execution object.

    :param wf_ex: Workflow execution object.
    :param wf_spec: Workflow specification object. If passed, the method works
        faster.
    :returns: Workflow controller class.
    """

    if not wf_spec:
        wf_spec = spec_parser.get_workflow_spec(wf_ex['spec'])

    wf_type = wf_spec.get_type()

    ctrl_cls = None

    for cls in u.iter_subclasses(WorkflowController):
        if cls.__workflow_type__ == wf_type:
            ctrl_cls = cls
            break

    if not ctrl_cls:
        raise exc.NotFoundException(
            'Failed to find a workflow controller [type=%s]' % wf_type
        )

    return ctrl_cls(wf_ex, wf_spec)
Esempio n. 24
0
 def __init__(self, wf_ex, task_spec, ctx, task_ex=None):
     self.wf_ex = wf_ex
     self.task_spec = task_spec
     self.ctx = ctx
     self.task_ex = task_ex
     self.wf_spec = spec_parser.get_workflow_spec(wf_ex.spec)
     self.waiting = False
     self.reset_flag = False
Esempio n. 25
0
 def __init__(self, wf_ex, task_spec, ctx, task_ex=None):
     self.wf_ex = wf_ex
     self.task_spec = task_spec
     self.ctx = ctx
     self.task_ex = task_ex
     self.wf_spec = spec_parser.get_workflow_spec(wf_ex.spec)
     self.waiting = False
     self.reset_flag = False
Esempio n. 26
0
def create_cron_trigger(name, workflow_name, workflow_input,
                        workflow_params=None, pattern=None, first_time=None,
                        count=None, start_time=None, workflow_id=None):
    if not start_time:
        start_time = datetime.datetime.now()

    if isinstance(first_time, six.string_types):
        try:
            first_time = datetime.datetime.strptime(
                first_time,
                '%Y-%m-%d %H:%M'
            )
        except ValueError as e:
            raise exc.InvalidModelException(e.message)

    validate_cron_trigger_input(pattern, first_time, count)

    first_utc_time = first_time

    if first_time:
        first_second = time.mktime(first_time.timetuple())
        first_utc_time = datetime.datetime.utcfromtimestamp(first_second)
        next_time = first_utc_time

        if not (pattern or count):
            count = 1
    else:
        next_time = get_next_execution_time(pattern, start_time)

    with db_api.transaction():
        wf_def = db_api.get_workflow_definition(
            workflow_id if workflow_id else workflow_name
        )

        eng_utils.validate_input(
            wf_def,
            workflow_input or {},
            parser.get_workflow_spec(wf_def.spec)
        )

        values = {
            'name': name,
            'pattern': pattern,
            'first_execution_time': first_utc_time,
            'next_execution_time': next_time,
            'remaining_executions': count,
            'workflow_name': wf_def.name,
            'workflow_id': wf_def.id,
            'workflow_input': workflow_input or {},
            'workflow_params': workflow_params or {},
            'scope': 'private'
        }

        security.add_trust_id(values)

        trig = db_api.create_cron_trigger(values)

    return trig
    def test_create_workbook(self):
        wb_db = wb_service.create_workbook_v2(WORKBOOK)

        self.assertIsNotNone(wb_db)
        self.assertEqual('my_wb', wb_db.name)
        self.assertEqual(WORKBOOK, wb_db.definition)
        self.assertIsNotNone(wb_db.spec)
        self.assertListEqual(['test'], wb_db.tags)

        db_actions = db_api.get_action_definitions(name='my_wb.concat')

        self.assertEqual(1, len(db_actions))

        # Action.
        action_db = self._assert_single_item(db_actions, name='my_wb.concat')

        self.assertFalse(action_db.is_system)

        action_spec = spec_parser.get_action_spec(action_db.spec)

        self.assertEqual('concat', action_spec.get_name())
        self.assertEqual('std.echo', action_spec.get_base())
        self.assertEqual(ACTION_DEFINITION, action_db.definition)

        db_wfs = db_api.get_workflow_definitions()

        self.assertEqual(2, len(db_wfs))

        # Workflow 1.
        wf1_db = self._assert_single_item(db_wfs, name='my_wb.wf1')
        wf1_spec = spec_parser.get_workflow_spec(wf1_db.spec)

        self.assertEqual('wf1', wf1_spec.get_name())
        self.assertEqual('reverse', wf1_spec.get_type())
        self.assertListEqual(['wf_test'], wf1_spec.get_tags())
        self.assertListEqual(['wf_test'], wf1_db.tags)
        self.assertEqual(WORKBOOK_WF1_DEFINITION, wf1_db.definition)

        # Workflow 2.
        wf2_db = self._assert_single_item(db_wfs, name='my_wb.wf2')
        wf2_spec = spec_parser.get_workflow_spec(wf2_db.spec)

        self.assertEqual('wf2', wf2_spec.get_name())
        self.assertEqual('direct', wf2_spec.get_type())
        self.assertEqual(WORKBOOK_WF2_DEFINITION, wf2_db.definition)
    def test_create_workbook(self):
        wb_db = wb_service.create_workbook_v2(WORKBOOK)

        self.assertIsNotNone(wb_db)
        self.assertEqual('my_wb', wb_db.name)
        self.assertEqual(WORKBOOK, wb_db.definition)
        self.assertIsNotNone(wb_db.spec)
        self.assertListEqual(['test'], wb_db.tags)

        db_actions = db_api.get_action_definitions(name='my_wb.concat')

        self.assertEqual(1, len(db_actions))

        # Action.
        action_db = self._assert_single_item(db_actions, name='my_wb.concat')

        self.assertFalse(action_db.is_system)

        action_spec = spec_parser.get_action_spec(action_db.spec)

        self.assertEqual('concat', action_spec.get_name())
        self.assertEqual('std.echo', action_spec.get_base())
        self.assertEqual(ACTION_DEFINITION, action_db.definition)

        db_wfs = db_api.get_workflow_definitions()

        self.assertEqual(2, len(db_wfs))

        # Workflow 1.
        wf1_db = self._assert_single_item(db_wfs, name='my_wb.wf1')
        wf1_spec = spec_parser.get_workflow_spec(wf1_db.spec)

        self.assertEqual('wf1', wf1_spec.get_name())
        self.assertEqual('reverse', wf1_spec.get_type())
        self.assertListEqual(['wf_test'], wf1_spec.get_tags())
        self.assertListEqual(['wf_test'], wf1_db.tags)
        self.assertEqual(WORKBOOK_WF1_DEFINITION, wf1_db.definition)

        # Workflow 2.
        wf2_db = self._assert_single_item(db_wfs, name='my_wb.wf2')
        wf2_spec = spec_parser.get_workflow_spec(wf2_db.spec)

        self.assertEqual('wf2', wf2_spec.get_name())
        self.assertEqual('direct', wf2_spec.get_type())
        self.assertEqual(WORKBOOK_WF2_DEFINITION, wf2_db.definition)
Esempio n. 29
0
def succeed_workflow(wf_ex, final_context):
    set_execution_state(wf_ex, states.SUCCESS)

    wf_spec = spec_parser.get_workflow_spec(wf_ex.spec)

    wf_ex.output = data_flow.evaluate_workflow_output(wf_spec, final_context)

    if wf_ex.task_execution_id:
        _schedule_send_result_to_parent_workflow(wf_ex)
Esempio n. 30
0
    def schedule(self, input_dict, target, index=0, desc=''):
        parent_wf_ex = self.task_ex.workflow_execution
        parent_wf_spec = spec_parser.get_workflow_spec(parent_wf_ex.spec)

        task_spec = spec_parser.get_task_spec(self.task_ex.spec)

        wf_spec_name = task_spec.get_workflow_name()

        wf_def = e_utils.resolve_workflow_definition(
            parent_wf_ex.workflow_name,
            parent_wf_spec.get_name(),
            wf_spec_name
        )

        wf_spec = spec_parser.get_workflow_spec(wf_def.spec)

        wf_params = {
            'task_execution_id': self.task_ex.id,
            'index': index
        }

        if 'env' in parent_wf_ex.params:
            wf_params['env'] = parent_wf_ex.params['env']

        for k, v in list(input_dict.items()):
            if k not in wf_spec.get_input():
                wf_params[k] = v
                del input_dict[k]

        wf_ex, _ = wf_ex_service.create_workflow_execution(
            wf_def.name,
            input_dict,
            "sub-workflow execution",
            wf_params,
            wf_spec
        )

        scheduler.schedule_call(
            None,
            _RESUME_WORKFLOW_PATH,
            0,
            wf_ex_id=wf_ex.id,
            env=None
        )
    def test_create_workflows(self):
        db_wfs = wf_service.create_workflows(WORKFLOW_LIST)

        self.assertEqual(2, len(db_wfs))

        # Workflow 1.
        wf1_db = self._assert_single_item(db_wfs, name='wf1')
        wf1_spec = spec_parser.get_workflow_spec(wf1_db.spec)

        self.assertEqual('wf1', wf1_spec.get_name())
        self.assertListEqual(['test', 'v2'], wf1_spec.get_tags())
        self.assertEqual('reverse', wf1_spec.get_type())

        # Workflow 2.
        wf2_db = self._assert_single_item(db_wfs, name='wf2')
        wf2_spec = spec_parser.get_workflow_spec(wf2_db.spec)

        self.assertEqual('wf2', wf2_spec.get_name())
        self.assertEqual('direct', wf2_spec.get_type())
Esempio n. 32
0
    def test_create_workflows(self):
        db_wfs = wf_service.create_workflows(WORKFLOW_LIST)

        self.assertEqual(2, len(db_wfs))

        # Workflow 1.
        wf1_db = self._assert_single_item(db_wfs, name='wf1')
        wf1_spec = spec_parser.get_workflow_spec(wf1_db.spec)

        self.assertEqual('wf1', wf1_spec.get_name())
        self.assertListEqual(['test', 'v2'], wf1_spec.get_tags())
        self.assertEqual('reverse', wf1_spec.get_type())

        # Workflow 2.
        wf2_db = self._assert_single_item(db_wfs, name='wf2')
        wf2_spec = spec_parser.get_workflow_spec(wf2_db.spec)

        self.assertEqual('wf2', wf2_spec.get_name())
        self.assertEqual('direct', wf2_spec.get_type())
Esempio n. 33
0
    def __init__(self, wf_ex, wf_spec=None):
        """Creates a new workflow controller.

        :param wf_ex: Workflow execution.

        :param wf_spec: Workflow specification.
        """
        self.wf_ex = wf_ex
        if wf_spec is None:
            wf_spec = spec_parser.get_workflow_spec(wf_ex.spec)
        self.wf_spec = wf_spec
Esempio n. 34
0
    def __init__(self, wf_ex, wf_spec=None):
        """Creates a new workflow controller.

        :param wf_ex: Workflow execution.

        :param wf_spec: Workflow specification.
        """
        self.wf_ex = wf_ex
        if wf_spec is None:
            wf_spec = spec_parser.get_workflow_spec(wf_ex.spec)
        self.wf_spec = wf_spec
Esempio n. 35
0
def succeed_workflow(wf_ex, final_context, state_info=None):
    set_execution_state(wf_ex, states.SUCCESS, state_info)

    wf_spec = spec_parser.get_workflow_spec(wf_ex.spec)

    wf_ex.output = data_flow.evaluate_workflow_output(wf_spec, final_context)

    if wf_ex.task_execution_id:
        _schedule_send_result_to_parent_workflow(wf_ex)

    return wf_ex
Esempio n. 36
0
def on_action_complete(action_ex, result):
    """Handles event of action result arrival.

    Given action result this method performs analysis of the workflow
    execution and identifies commands (including tasks) that can be
    scheduled for execution.

    :param action_ex: Action execution objects the result belongs to.
    :param result: Task action/workflow output wrapped into
        mistral.workflow.utils.Result instance.
    :return List of engine commands that need to be performed.
    """

    task_ex = action_ex.task_execution

    # Ignore if action already completed.
    if (states.is_completed(action_ex.state) and not
            isinstance(action_ex, models.WorkflowExecution)):
        return task_ex

    result = action_handler.transform_result(result, task_ex)

    wf_ex = task_ex.workflow_execution

    # Ignore workflow executions because they're handled during
    # workflow completion.
    if not isinstance(action_ex, models.WorkflowExecution):
        action_handler.store_action_result(action_ex, result)

    wf_spec = spec_parser.get_workflow_spec(wf_ex.spec)
    task_spec = wf_spec.get_tasks()[task_ex.name]

    if result.is_success():
        task_state = states.SUCCESS
        task_state_info = None
    else:
        task_state = states.ERROR
        task_state_info = result.error

    if not task_spec.get_with_items():
        _complete_task(task_ex, task_spec, task_state, task_state_info)
    else:
        with_items.increase_capacity(task_ex)
        if with_items.is_completed(task_ex):
            _complete_task(
                task_ex,
                task_spec,
                with_items.get_final_state(task_ex),
                task_state_info
            )

    return task_ex
Esempio n. 37
0
def run_existing_task(task_ex_id):
    """This function runs existing task execution.

    It is needed mostly by scheduler.
    """
    task_ex = db_api.get_task_execution(task_ex_id)
    task_spec = spec_parser.get_task_spec(task_ex.spec)
    wf_def = db_api.get_workflow_definition(task_ex.workflow_name)
    wf_spec = spec_parser.get_workflow_spec(wf_def.spec)

    # Explicitly change task state to RUNNING.
    task_ex.state = states.RUNNING

    _run_existing_task(task_ex, task_spec, wf_spec)
Esempio n. 38
0
def run_existing_task(task_ex_id):
    """This function runs existing task execution.

    It is needed mostly by scheduler.
    """
    task_ex = db_api.get_task_execution(task_ex_id)
    task_spec = spec_parser.get_task_spec(task_ex.spec)
    wf_def = db_api.get_workflow_definition(task_ex.workflow_name)
    wf_spec = spec_parser.get_workflow_spec(wf_def.spec)

    # Explicitly change task state to RUNNING.
    task_ex.state = states.RUNNING

    _run_existing_task(task_ex, task_spec, wf_spec)
Esempio n. 39
0
def create_workflow_execution(wf_identifier, wf_input, description, params):
    params = canonize_workflow_params(params)

    wf_def = db_api.get_workflow_definition(wf_identifier)
    wf_spec = spec_parser.get_workflow_spec(wf_def.spec)

    eng_utils.validate_input(wf_def, wf_input, wf_spec)

    wf_ex = _create_workflow_execution(wf_def, wf_spec, wf_input, description,
                                       params)

    wf_trace.info(wf_ex, "Starting workflow: '%s'" % wf_identifier)

    return wf_ex.id
Esempio n. 40
0
    def schedule(self, input_dict, target, index=0, desc=''):
        assert not self.action_ex

        parent_wf_ex = self.task_ex.workflow_execution
        parent_wf_spec = spec_parser.get_workflow_spec(parent_wf_ex.spec)

        task_spec = spec_parser.get_task_spec(self.task_ex.spec)

        wf_spec_name = task_spec.get_workflow_name()

        wf_def = e_utils.resolve_workflow_definition(
            parent_wf_ex.workflow_name,
            parent_wf_spec.get_name(),
            wf_spec_name
        )

        wf_spec = spec_parser.get_workflow_spec(wf_def.spec)

        wf_params = {
            'task_execution_id': self.task_ex.id,
            'index': index
        }

        if 'env' in parent_wf_ex.params:
            wf_params['env'] = parent_wf_ex.params['env']

        for k, v in list(input_dict.items()):
            if k not in wf_spec.get_input():
                wf_params[k] = v
                del input_dict[k]

        wf_handler.start_workflow(
            wf_def.id,
            input_dict,
            "sub-workflow execution",
            wf_params
        )
    def test_update_workflows(self):
        db_wfs = wf_service.create_workflows(WORKFLOW_LIST)

        self.assertEqual(2, len(db_wfs))

        # Workflow 1.
        wf1_db = self._assert_single_item(db_wfs, name='wf1')
        wf1_spec = spec_parser.get_workflow_spec(wf1_db.spec)

        self.assertEqual('wf1', wf1_spec.get_name())
        self.assertEqual('reverse', wf1_spec.get_type())
        self.assertIn('param1', wf1_spec.get_input())
        self.assertIs(
            wf1_spec.get_input().get('param1'),
            utils.NotDefined
        )

        db_wfs = wf_service.update_workflows(UPDATED_WORKFLOW_LIST)

        self.assertEqual(1, len(db_wfs))

        wf1_db = self._assert_single_item(db_wfs, name='wf1')
        wf1_spec = spec_parser.get_workflow_spec(wf1_db.spec)

        self.assertEqual('wf1', wf1_spec.get_name())
        self.assertListEqual([], wf1_spec.get_tags())
        self.assertEqual('reverse', wf1_spec.get_type())
        self.assertIn('param1', wf1_spec.get_input())
        self.assertIn('param2', wf1_spec.get_input())
        self.assertIs(
            wf1_spec.get_input().get('param1'),
            utils.NotDefined
        )
        self.assertIs(
            wf1_spec.get_input().get('param2'),
            utils.NotDefined
        )
Esempio n. 42
0
def run_existing_task(task_ex_id, reset=True):
    """This function runs existing task execution.

    It is needed mostly by scheduler.

    :param task_ex_id: Task execution id.
    :param reset: Reset action executions for the task.
    """
    task_ex = db_api.get_task_execution(task_ex_id)
    task_spec = spec_parser.get_task_spec(task_ex.spec)
    wf_def = db_api.get_workflow_definition(task_ex.workflow_name)
    wf_spec = spec_parser.get_workflow_spec(wf_def.spec)

    # Throw exception if the existing task already succeeded.
    if task_ex.state == states.SUCCESS:
        raise exc.EngineException(
            'Rerunning existing task that already succeeded is not supported.'
        )

    # Exit if the existing task failed and reset is not instructed.
    # For a with-items task without reset, re-running the existing
    # task will re-run the failed and unstarted items.
    if (task_ex.state == states.ERROR and not reset and
            not task_spec.get_with_items()):
        return task_ex

    # Reset nested executions only if task is not already RUNNING.
    if task_ex.state != states.RUNNING:
        # Reset state of processed task and related action executions.
        if reset:
            action_exs = task_ex.executions
        else:
            action_exs = db_api.get_action_executions(
                task_execution_id=task_ex.id,
                state=states.ERROR,
                accepted=True
            )

        for action_ex in action_exs:
            action_ex.accepted = False

    # Explicitly change task state to RUNNING.
    set_task_state(task_ex, states.RUNNING, None, processed=False)

    _run_existing_task(task_ex, task_spec, wf_spec)

    return task_ex
Esempio n. 43
0
    def on_action_complete(self, action_ex_id, result):
        wf_ex_id = None

        try:
            with db_api.transaction():
                action_ex = db_api.get_action_execution(action_ex_id)

                # In case of single action execution there is no
                # assigned task execution.
                if not action_ex.task_execution:
                    return action_handler.store_action_result(
                        action_ex,
                        result
                    ).get_clone()

                wf_ex_id = action_ex.task_execution.workflow_execution_id
                wf_ex = wf_handler.lock_workflow_execution(wf_ex_id)

                wf_spec = spec_parser.get_workflow_spec(wf_ex.spec)

                task_ex = task_handler.on_action_complete(
                    action_ex,
                    wf_spec,
                    result
                )

                # If workflow is on pause or completed then there's no
                # need to continue workflow.
                if states.is_paused_or_completed(wf_ex.state):
                    return action_ex.get_clone()

                self._on_task_state_change(task_ex, wf_ex, wf_spec)

                return action_ex.get_clone()
        except Exception as e:
            # TODO(rakhmerov): Need to refactor logging in a more elegant way.
            LOG.error(
                'Failed to handle action execution result [id=%s]: %s\n%s',
                action_ex_id, e, traceback.format_exc()
            )

            # If an exception was thrown after we got the wf_ex_id
            if wf_ex_id:
                self._fail_workflow(wf_ex_id, e)

            raise e
Esempio n. 44
0
def succeed_workflow(wf_ex, final_context, state_info=None):
    wf_spec = spec_parser.get_workflow_spec(wf_ex.spec)

    # Fail workflow if output is not successfully evaluated.
    try:
        wf_ex.output = data_flow.evaluate_workflow_output(
            wf_spec, final_context)
    except Exception as e:
        return fail_workflow(wf_ex, e.message)

    # Set workflow execution to success until after output is evaluated.
    set_execution_state(wf_ex, states.SUCCESS, state_info)

    if wf_ex.task_execution_id:
        _schedule_send_result_to_parent_workflow(wf_ex)

    return wf_ex
Esempio n. 45
0
def on_action_complete(action_ex, result):
    """Handles event of action result arrival.

    Given action result this method performs analysis of the workflow
    execution and identifies commands (including tasks) that can be
    scheduled for execution.

    :param action_ex: Action execution objects the result belongs to.
    :param result: Task action/workflow output wrapped into
        mistral.workflow.utils.Result instance.
    :return List of engine commands that need to be performed.
    """

    task_ex = action_ex.task_execution

    # Ignore if action already completed.
    if (states.is_completed(action_ex.state) and not
            isinstance(action_ex, models.WorkflowExecution)):
        return task_ex

    result = action_handler.transform_result(result, task_ex)

    wf_ex = task_ex.workflow_execution

    # Ignore workflow executions because they're handled during
    # workflow completion.
    if not isinstance(action_ex, models.WorkflowExecution):
        action_handler.store_action_result(action_ex, result)

    wf_spec = spec_parser.get_workflow_spec(wf_ex.spec)
    task_spec = wf_spec.get_tasks()[task_ex.name]

    task_state = states.SUCCESS if result.is_success() else states.ERROR

    if not task_spec.get_with_items():
        _complete_task(task_ex, task_spec, task_state)
    else:
        with_items.increase_capacity(task_ex)
        if with_items.is_completed(task_ex):
            _complete_task(
                task_ex,
                task_spec,
                with_items.get_final_state(task_ex)
            )

    return task_ex
Esempio n. 46
0
    def _continue_workflow(self, wf_ex, task_ex=None, reset=True, env=None):
        wf_ex = wf_service.update_workflow_execution_env(wf_ex, env)

        wf_handler.set_execution_state(
            wf_ex,
            states.RUNNING,
            set_upstream=True
        )

        wf_ctrl = wf_base.get_controller(wf_ex)

        # TODO(rakhmerov): Add YAQL error handling.
        # Calculate commands to process next.
        cmds = wf_ctrl.continue_workflow(task_ex=task_ex, reset=reset, env=env)

        # When resuming a workflow we need to ignore all 'pause'
        # commands because workflow controller takes tasks that
        # completed within the period when the workflow was paused.
        cmds = list(
            filter(
                lambda c: not isinstance(c, commands.PauseWorkflow),
                cmds
            )
        )

        # Since there's no explicit task causing the operation
        # we need to mark all not processed tasks as processed
        # because workflow controller takes only completed tasks
        # with flag 'processed' equal to False.
        for t_ex in wf_ex.task_executions:
            if states.is_completed(t_ex.state) and not t_ex.processed:
                t_ex.processed = True

        wf_spec = spec_parser.get_workflow_spec(wf_ex.spec)

        self._dispatch_workflow_commands(wf_ex, cmds, wf_spec)

        if not cmds:
            if not wf_utils.find_incomplete_task_executions(wf_ex):
                wf_handler.succeed_workflow(
                    wf_ex,
                    wf_ctrl.evaluate_workflow_final_context(),
                    wf_spec
                )

        return wf_ex.get_clone()
Esempio n. 47
0
def run_existing_task(task_ex_id, reset=True):
    """This function runs existing task execution.

    It is needed mostly by scheduler.

    :param task_ex_id: Task execution id.
    :param reset: Reset action executions for the task.
    """
    task_ex = db_api.get_task_execution(task_ex_id)
    task_spec = spec_parser.get_task_spec(task_ex.spec)
    wf_def = db_api.get_workflow_definition(task_ex.workflow_name)
    wf_spec = spec_parser.get_workflow_spec(wf_def.spec)

    # Throw exception if the existing task already succeeded.
    if task_ex.state == states.SUCCESS:
        raise exc.EngineException(
            'Rerunning existing task that already succeeded is not supported.')

    # Exit if the existing task failed and reset is not instructed.
    # For a with-items task without reset, re-running the existing
    # task will re-run the failed and unstarted items.
    if (task_ex.state == states.ERROR and not reset
            and not task_spec.get_with_items()):
        return task_ex

    # Reset nested executions only if task is not already RUNNING.
    if task_ex.state != states.RUNNING:
        # Reset state of processed task and related action executions.
        if reset:
            action_exs = task_ex.executions
        else:
            action_exs = db_api.get_action_executions(
                task_execution_id=task_ex.id,
                state=states.ERROR,
                accepted=True)

        for action_ex in action_exs:
            action_ex.accepted = False

    # Explicitly change task state to RUNNING.
    set_task_state(task_ex, states.RUNNING, None, processed=False)

    _run_existing_task(task_ex, task_spec, wf_spec)

    return task_ex
Esempio n. 48
0
def _schedule_noop_action(task_ex, task_spec):
    wf_ex = task_ex.workflow_execution
    wf_spec = spec_parser.get_workflow_spec(wf_ex.spec)

    action_def = e_utils.resolve_action_definition(wf_ex.workflow_name,
                                                   wf_spec.get_name(),
                                                   'std.noop')

    action_ex = _create_action_execution(task_ex, action_def, {})

    target = expr.evaluate_recursively(task_spec.get_target(),
                                       task_ex.in_context)

    scheduler.schedule_call(None,
                            'mistral.engine.task_handler.run_action',
                            0,
                            action_ex_id=action_ex.id,
                            target=target)
Esempio n. 49
0
def _schedule_run_action(task_ex, task_spec, action_input, index):
    wf_ex = task_ex.workflow_execution
    wf_spec = spec_parser.get_workflow_spec(wf_ex.spec)

    action_spec_name = task_spec.get_action_name()

    # TODO(rakhmerov): Refactor ad-hoc actions and isolate them.
    action_def = e_utils.resolve_action_definition(
        wf_ex.workflow_name,
        wf_spec.get_name(),
        action_spec_name
    )

    if action_def.spec:
        # Ad-hoc action.
        action_spec = spec_parser.get_action_spec(action_def.spec)

        base_name = action_spec.get_base()

        action_def = e_utils.resolve_action_definition(
            task_ex.workflow_name,
            wf_spec.get_name(),
            base_name
        )

    action_ex = _create_action_execution(
        task_ex, action_def, action_input, index
    )

    target = expr.evaluate_recursively(
        task_spec.get_target(),
        utils.merge_dicts(
            copy.deepcopy(action_input),
            copy.copy(task_ex.in_context)
        )
    )

    scheduler.schedule_call(
        None,
        'mistral.engine.task_handler.run_action',
        0,
        action_ex_id=action_ex.id,
        target=target
    )
Esempio n. 50
0
def create_workflow_execution(wf_identifier, wf_input, description, params):
    params = canonize_workflow_params(params)

    wf_def = db_api.get_workflow_definition(wf_identifier)
    wf_spec = spec_parser.get_workflow_spec(wf_def.spec)

    eng_utils.validate_input(wf_def, wf_input, wf_spec)

    wf_ex = _create_workflow_execution(
        wf_def,
        wf_spec,
        wf_input,
        description,
        params
    )

    wf_trace.info(wf_ex, "Starting workflow: '%s'" % wf_identifier)

    return wf_ex.id
Esempio n. 51
0
def _schedule_run_workflow(task_ex, task_spec, wf_input, index,
                           parent_wf_spec):
    parent_wf_ex = task_ex.workflow_execution

    wf_spec_name = task_spec.get_workflow_name()

    wf_def = e_utils.resolve_workflow_definition(
        parent_wf_ex.workflow_name,
        parent_wf_spec.get_name(),
        wf_spec_name
    )

    wf_spec = spec_parser.get_workflow_spec(wf_def.spec)

    wf_params = {
        'task_execution_id': task_ex.id,
        'with_items_index': index
    }

    if 'env' in parent_wf_ex.params:
        wf_params['env'] = parent_wf_ex.params['env']

    for k, v in list(wf_input.items()):
        if k not in wf_spec.get_input():
            wf_params[k] = v
            del wf_input[k]

    wf_ex_id, _ = wf_ex_service.create_workflow_execution(
        wf_def.name,
        wf_input,
        "sub-workflow execution",
        wf_params,
        wf_spec
    )

    scheduler.schedule_call(
        None,
        'mistral.engine.task_handler.resume_workflow',
        0,
        wf_ex_id=wf_ex_id,
        env=None
    )
Esempio n. 52
0
    def start_workflow(self, wf_name, wf_input, description='', **params):
        wf_exec_id = None

        try:
            params = self._canonize_workflow_params(params)

            with db_api.transaction():
                wf_def = db_api.get_workflow_definition(wf_name)
                wf_spec = spec_parser.get_workflow_spec(wf_def.spec)

                eng_utils.validate_input(wf_def, wf_input, wf_spec)

                wf_ex = self._create_workflow_execution(
                    wf_def,
                    wf_spec,
                    wf_input,
                    description,
                    params
                )
                wf_exec_id = wf_ex.id

                wf_trace.info(wf_ex, "Starting workflow: '%s'" % wf_name)

                wf_ctrl = wf_base.WorkflowController.get_controller(
                    wf_ex,
                    wf_spec
                )

                self._dispatch_workflow_commands(
                    wf_ex,
                    wf_ctrl.continue_workflow()
                )

                return wf_ex.get_clone()
        except Exception as e:
            LOG.error(
                "Failed to start workflow '%s' id=%s: %s\n%s",
                wf_name, wf_exec_id, e, traceback.format_exc()
            )
            self._fail_workflow(wf_exec_id, e)
            raise e
Esempio n. 53
0
    def _on_task_state_change(self, task_ex, wf_ex, action_ex=None):
        task_spec = spec_parser.get_task_spec(task_ex.spec)
        wf_spec = spec_parser.get_workflow_spec(wf_ex.spec)

        if states.is_completed(task_ex.state):
            task_handler.after_task_complete(task_ex, task_spec, wf_spec)

            # Ignore DELAYED state.
            if task_ex.state == states.DELAYED:
                return

            wf_ctrl = wf_base.WorkflowController.get_controller(wf_ex)

            # Calculate commands to process next.
            cmds = wf_ctrl.continue_workflow()

            task_ex.processed = True

            self._dispatch_workflow_commands(wf_ex, cmds)

            self._check_workflow_completion(wf_ex, action_ex, wf_ctrl)
Esempio n. 54
0
    def on_task_state_change(self, task_ex_id, state, state_info=None):
        with db_api.transaction():
            task_ex = db_api.get_task_execution(task_ex_id)
            # TODO(rakhmerov): The method is mostly needed for policy and
            # we are supposed to get the same action execution as when the
            # policy worked.

            wf_ex_id = task_ex.workflow_execution_id
            wf_ex = wf_handler.lock_workflow_execution(wf_ex_id)
            wf_spec = spec_parser.get_workflow_spec(wf_ex.spec)

            wf_trace.info(
                task_ex,
                "Task '%s' [%s -> %s] state_info : %s"
                % (task_ex.name, task_ex.state, state, state_info)
            )

            task_ex.state = state
            task_ex.state_info = state_info

            self._on_task_state_change(task_ex, wf_ex, wf_spec)
Esempio n. 55
0
def create_delay_tolerant_workload(name, workflow_name, workflow_input,
                                   workflow_params=None, deadline=None,
                                   job_duration=None, workflow_id=None):
    try:
        deadline = date_parser.parse(deadline)
    except ValueError as e:
        raise exc.InvalidModelException(e.message)
    if deadline < datetime.datetime.now() + datetime.timedelta(seconds=60):
        raise exc.InvalidModelException(
            'deadline must be at least 1 minute in the future.'
        )

    with db_api.transaction():
        wf_def = db_api.get_workflow_definition(
            workflow_id if workflow_id else workflow_name
        )

        eng_utils.validate_input(
            wf_def,
            workflow_input or {},
            parser.get_workflow_spec(wf_def.spec)
        )

        values = {
            'name': name,
            'deadline': deadline,
            'job_duration': job_duration,
            'workflow_name': wf_def.name,
            'workflow_id': wf_def.id,
            'workflow_input': workflow_input or {},
            'workflow_params': workflow_params or {},
            'scope': 'private',
            'executed': False
        }

        security.add_trust_id(values)

        dtw = db_api.create_delay_tolerant_workload(values)

    return dtw