Ejemplo n.º 1
0
    def test_get_task(self):
        inputs = {'a': 123}
        conductor = self._prep_conductor(inputs=inputs,
                                         status=statuses.RUNNING)

        task_route = 0
        task_name = 'task1'
        expected_ctx = dict_util.merge_dicts(json_util.deepcopy(inputs),
                                             {'b': False})
        expected_ctx['__current_task'] = {'id': task_name, 'route': task_route}
        expected_ctx['__state'] = conductor.workflow_state.serialize()
        task = conductor.get_task(task_name, task_route)
        self.assertEqual(task['id'], task_name)
        self.assertEqual(task['route'], task_route)
        self.assertDictEqual(task['ctx'], expected_ctx)

        self.forward_task_statuses(conductor, task_name,
                                   [statuses.RUNNING, statuses.SUCCEEDED])

        task_name = 'task2'
        expected_ctx = dict_util.merge_dicts(json_util.deepcopy(expected_ctx),
                                             {'c': 'xyz'})
        expected_ctx['__current_task'] = {'id': task_name, 'route': task_route}
        expected_ctx['__state'] = conductor.workflow_state.serialize()
        task = conductor.get_task(task_name, task_route)
        self.assertEqual(task['id'], task_name)
        self.assertEqual(task['route'], task_route)
        self.assertDictEqual(task['ctx'], expected_ctx)
Ejemplo n.º 2
0
    def test_get_next_tasks_repeatedly(self):
        inputs = {"a": 123}
        conductor = self._prep_conductor(inputs=inputs,
                                         status=statuses.RUNNING)

        task_name = "task1"
        next_task_name = "task2"
        expected_init_ctx = dict_util.merge_dicts(json_util.deepcopy(inputs),
                                                  {"b": False})
        expected_task_ctx = json_util.deepcopy(expected_init_ctx)
        expected_task_ctx = dict_util.merge_dicts(expected_task_ctx,
                                                  {"c": "xyz"})

        self.assert_next_task(conductor, task_name, expected_init_ctx)

        self.forward_task_statuses(conductor, task_name, [statuses.RUNNING])
        self.assert_next_task(conductor, has_next_task=False)
        self.assert_next_task(conductor, has_next_task=False)

        self.forward_task_statuses(conductor, task_name, [statuses.SUCCEEDED])
        self.assert_next_task(conductor, next_task_name, expected_task_ctx)
        self.assert_next_task(conductor, next_task_name, expected_task_ctx)

        self.forward_task_statuses(conductor, next_task_name,
                                   [statuses.RUNNING])
        self.assert_next_task(conductor, has_next_task=False)
        self.assert_next_task(conductor, has_next_task=False)

        self.forward_task_statuses(conductor, task_name, [statuses.SUCCEEDED])
        self.assert_next_task(conductor, has_next_task=False)
        self.assert_next_task(conductor, has_next_task=False)
Ejemplo n.º 3
0
    def test_get_next_tasks_when_this_task_canceled(self):
        inputs = {"a": 123}
        expected_init_ctx = dict_util.merge_dicts(json_util.deepcopy(inputs),
                                                  {"b": False})
        expected_task_ctx = json_util.deepcopy(expected_init_ctx)
        expected_task_ctx = dict_util.merge_dicts(expected_task_ctx,
                                                  {"c": "xyz"})
        conductor = self._prep_conductor(inputs=inputs,
                                         status=statuses.RUNNING)

        task_name = "task1"
        next_task_name = "task2"
        self.forward_task_statuses(conductor, task_name,
                                   [statuses.RUNNING, statuses.SUCCEEDED])
        self.assert_next_task(conductor, next_task_name, expected_task_ctx)

        task_name = "task2"
        next_task_name = "task3"
        self.forward_task_statuses(conductor, task_name,
                                   [statuses.RUNNING, statuses.CANCELING])
        self.assert_next_task(conductor, has_next_task=False)
        self.forward_task_statuses(conductor, task_name, [statuses.CANCELED])
        self.assert_next_task(conductor, has_next_task=False)

        self.assertEqual(conductor.get_workflow_status(), statuses.CANCELED)
Ejemplo n.º 4
0
    def test_get_next_tasks_when_this_task_paused(self):
        inputs = {"a": 123}
        expected_init_ctx = dict_util.merge_dicts(json_util.deepcopy(inputs),
                                                  {"b": False})
        expected_task_ctx = json_util.deepcopy(expected_init_ctx)
        expected_task_ctx = dict_util.merge_dicts(expected_task_ctx,
                                                  {"c": "xyz"})
        conductor = self._prep_conductor(inputs=inputs,
                                         status=statuses.RUNNING)

        task_name = "task1"
        next_task_name = "task2"
        self.forward_task_statuses(conductor, task_name,
                                   [statuses.RUNNING, statuses.SUCCEEDED])
        self.assert_next_task(conductor, next_task_name, expected_task_ctx)

        task_name = "task2"
        next_task_name = "task3"
        self.forward_task_statuses(conductor, task_name,
                                   [statuses.RUNNING, statuses.PAUSING])
        self.assert_next_task(conductor, has_next_task=False)
        self.forward_task_statuses(conductor, task_name, [statuses.PAUSED])
        self.assert_next_task(conductor, has_next_task=False)

        # After the previous task is paused, since there is no other tasks running,
        # the workflow is paused. The workflow needs to be resumed manually.
        self.assertEqual(conductor.get_workflow_status(), statuses.PAUSED)
        conductor.request_workflow_status(statuses.RESUMING)
        self.assertEqual(conductor.get_workflow_status(), statuses.RESUMING)

        self.forward_task_statuses(conductor, task_name,
                                   [statuses.RUNNING, statuses.SUCCEEDED])
        self.assert_next_task(conductor, next_task_name, expected_task_ctx)
Ejemplo n.º 5
0
    def test_get_next_tasks_repeatedly(self):
        inputs = {'a': 123}
        conductor = self._prep_conductor(inputs=inputs,
                                         status=statuses.RUNNING)

        task_name = 'task1'
        next_task_name = 'task2'
        expected_init_ctx = dict_util.merge_dicts(copy.deepcopy(inputs),
                                                  {'b': False})
        expected_task_ctx = dict_util.merge_dicts(
            copy.deepcopy(expected_init_ctx), {'c': 'xyz'})

        self.assert_next_task(conductor, task_name, expected_init_ctx)

        self.forward_task_statuses(conductor, task_name, [statuses.RUNNING])
        self.assert_next_task(conductor, has_next_task=False)
        self.assert_next_task(conductor, has_next_task=False)

        self.forward_task_statuses(conductor, task_name, [statuses.SUCCEEDED])
        self.assert_next_task(conductor, next_task_name, expected_task_ctx)
        self.assert_next_task(conductor, next_task_name, expected_task_ctx)

        self.forward_task_statuses(conductor, next_task_name,
                                   [statuses.RUNNING])
        self.assert_next_task(conductor, has_next_task=False)
        self.assert_next_task(conductor, has_next_task=False)

        self.forward_task_statuses(conductor, task_name, [statuses.SUCCEEDED])
        self.assert_next_task(conductor, has_next_task=False)
        self.assert_next_task(conductor, has_next_task=False)
Ejemplo n.º 6
0
    def flow(self):
        if not self._flow:
            self._flow = TaskFlow()

            # Set any given context as the initial context.
            init_ctx = self.get_workflow_parent_context()

            # Render workflow inputs and merge into the initial context.
            workflow_input = self.get_workflow_input()
            rendered_inputs, input_errors = self.spec.render_input(workflow_input, init_ctx)
            init_ctx = dx.merge_dicts(init_ctx, rendered_inputs, True)

            # Render workflow variables and merge into the initial context.
            rendered_vars, var_errors = self.spec.render_vars(init_ctx)
            init_ctx = dx.merge_dicts(init_ctx, rendered_vars, True)

            # Fail workflow if there are errors.
            errors = input_errors + var_errors

            if errors:
                self.log_errors(errors)
                self.request_workflow_state(states.FAILED)

            # Proceed if there is no issue with rendering of inputs and vars.
            if self.get_workflow_state() not in states.ABENDED_STATES:
                # Set the initial workflow context.
                self._flow.contexts.append({'srcs': [], 'value': init_ctx})

                # Identify the starting tasks and set the pointer to the initial context entry.
                for task_node in self.graph.roots:
                    self._flow.staged[task_node['id']] = {'ctxs': [0], 'ready': True}

        return self._flow
Ejemplo n.º 7
0
    def test_get_task(self):
        inputs = {"a": 123}
        conductor = self._prep_conductor(inputs=inputs,
                                         status=statuses.RUNNING)

        task_route = 0
        task_name = "task1"
        expected_ctx = dict_util.merge_dicts(json_util.deepcopy(inputs),
                                             {"b": False})
        expected_ctx["__current_task"] = {"id": task_name, "route": task_route}
        expected_ctx["__state"] = conductor.workflow_state.serialize()
        task = conductor.get_task(task_name, task_route)
        self.assertEqual(task["id"], task_name)
        self.assertEqual(task["route"], task_route)
        self.assertDictEqual(task["ctx"], expected_ctx)

        self.forward_task_statuses(conductor, task_name,
                                   [statuses.RUNNING, statuses.SUCCEEDED])

        task_name = "task2"
        expected_ctx = dict_util.merge_dicts(json_util.deepcopy(expected_ctx),
                                             {"c": "xyz"})
        expected_ctx["__current_task"] = {"id": task_name, "route": task_route}
        expected_ctx["__state"] = conductor.workflow_state.serialize()
        task = conductor.get_task(task_name, task_route)
        self.assertEqual(task["id"], task_name)
        self.assertEqual(task["route"], task_route)
        self.assertDictEqual(task["ctx"], expected_ctx)
Ejemplo n.º 8
0
    def test_get_task_transition_contexts(self):
        inputs = {'a': 123, 'b': True}
        conductor = self._prep_conductor(inputs=inputs, state=states.RUNNING)

        # Use task1 to get context for task2 that is staged by not yet running.
        conductor.update_task_flow('task1',
                                   events.ActionExecutionEvent(states.RUNNING))
        conductor.update_task_flow(
            'task1', events.ActionExecutionEvent(states.SUCCEEDED))
        task2_in_ctx = {
            'srcs': [0],
            'value': dx.merge_dicts(copy.deepcopy(inputs), {'c': 'xyz'})
        }
        expected_contexts = {'task2__0': task2_in_ctx}
        self.assertDictEqual(conductor.get_task_transition_contexts('task1'),
                             expected_contexts)

        # Use task1 to get context for task2 that is alstaged running.
        conductor.update_task_flow('task2',
                                   events.ActionExecutionEvent(states.RUNNING))
        task2_in_ctx = {
            'srcs': [0],
            'value': dx.merge_dicts(copy.deepcopy(inputs), {'c': 'xyz'})
        }
        expected_contexts = {'task2__0': task2_in_ctx}
        self.assertDictEqual(conductor.get_task_transition_contexts('task1'),
                             expected_contexts)

        # Use task2 to get context for task3 that is not staged yet.
        self.assertDictEqual(conductor.get_task_transition_contexts('task2'),
                             {})

        # Use task3 that is not yet staged to get context.
        self.assertRaises(exc.InvalidTaskFlowEntry,
                          conductor.get_task_transition_contexts, 'task3')
Ejemplo n.º 9
0
def merge_object_schema(s1, s2, overwrite=True):
    schema = {"type": "object"}

    properties = dict_util.merge_dicts(
        copy.deepcopy(s1.get("properties", {})),
        copy.deepcopy(s2.get("properties", {})),
        overwrite=overwrite,
    )

    if properties:
        schema["properties"] = properties

    required = list(
        set(copy.deepcopy(s1.get("required", []))).union(set(copy.deepcopy(s2.get("required", []))))
    )

    if required:
        schema["required"] = sorted(required)

    additional = s1.get("additionalProperties", True) and s2.get("additionalProperties", True)

    if not additional:
        schema["additionalProperties"] = additional

    pattern_properties = dict_util.merge_dicts(
        copy.deepcopy(s1.get("patternProperties", {})),
        copy.deepcopy(s2.get("patternProperties", {})),
        overwrite=overwrite,
    )

    if pattern_properties:
        schema["patternProperties"] = pattern_properties

    min_properties = (
        s1.get("minProperties", 0)
        if not overwrite
        else max(s1.get("minProperties", 0), s2.get("minProperties", 0))
    )

    if min_properties > 0:
        schema["minProperties"] = min_properties

    max_properties = (
        s1.get("maxProperties", 0)
        if not overwrite
        else min(s1.get("maxProperties", 0), s2.get("maxProperties", 0))
    )

    if max_properties > 0:
        schema["maxProperties"] = max_properties

    return schema
Ejemplo n.º 10
0
    def test_get_next_tasks_when_graph_abended(self):
        inputs = {'a': 123}
        expected_init_ctx = dict_util.merge_dicts(copy.deepcopy(inputs), {'b': False})
        expected_task_ctx = dict_util.merge_dicts(copy.deepcopy(expected_init_ctx), {'c': 'xyz'})
        conductor = self._prep_conductor(inputs=inputs, status=statuses.RUNNING)

        task_name = 'task1'
        next_task_name = 'task2'
        self.forward_task_statuses(conductor, task_name, [statuses.RUNNING, statuses.SUCCEEDED])
        self.assert_next_task(conductor, next_task_name, expected_task_ctx)

        conductor.request_workflow_status(statuses.FAILED)
        self.assert_next_task(conductor, has_next_task=False)
Ejemplo n.º 11
0
    def finalize_context(self, next_task_name, task_transition_meta, in_ctx):
        rolling_ctx = copy.deepcopy(in_ctx)
        new_ctx = {}
        errors = []

        task_transition_specs = getattr(self, 'next') or []
        task_transition_spec = task_transition_specs[task_transition_meta[3]
                                                     ['ref']]
        next_task_names = getattr(task_transition_spec, 'do') or []

        if next_task_name in next_task_names:
            for task_publish_spec in (getattr(task_transition_spec, 'publish')
                                      or {}):
                var_name = list(task_publish_spec.items())[0][0]
                default_var_value = list(task_publish_spec.items())[0][1]

                try:
                    rendered_var_value = expr_base.evaluate(
                        default_var_value, rolling_ctx)
                    rolling_ctx[var_name] = rendered_var_value
                    new_ctx[var_name] = rendered_var_value
                except exc.ExpressionEvaluationException as e:
                    errors.append(e)

        out_ctx = dict_util.merge_dicts(in_ctx, new_ctx, overwrite=True)

        for key in list(out_ctx.keys()):
            if key.startswith('__'):
                out_ctx.pop(key)

        return out_ctx, new_ctx, errors
Ejemplo n.º 12
0
    def get_workflow_terminal_context(self):
        if self.get_workflow_status() not in statuses.COMPLETED_STATUSES:
            raise exc.WorkflowContextError(
                'Workflow is not in completed status.')

        wf_term_ctx = {}

        term_tasks = self.workflow_state.get_terminal_tasks()

        if not term_tasks:
            return wf_term_ctx

        first_term_task = term_tasks[0:1][0]
        other_term_tasks = term_tasks[1:]

        wf_term_ctx = self.get_task_context(first_term_task['ctxs']['in'])

        for task in other_term_tasks:
            # Remove the initial context since the first task processed above already
            # inclulded that and we only want to apply the differences.
            in_ctx_idxs = copy.deepcopy(task['ctxs']['in'])
            in_ctx_idxs.remove(0)

            wf_term_ctx = dict_util.merge_dicts(
                wf_term_ctx,
                self.get_task_context(in_ctx_idxs),
                overwrite=True)

        return wf_term_ctx
Ejemplo n.º 13
0
 def get_task_attributes(self, attribute):
     return dict_util.merge_dicts(
         {n: None
          for n in self._graph.nodes()},
         nx.get_node_attributes(self._graph, attribute),
         overwrite=True,
     )
Ejemplo n.º 14
0
    def finalize_context(self, next_task_name, task_transition_meta, in_ctx):
        criteria = task_transition_meta[3].get("criteria") or []
        expected_criteria_pattern = r"<\% task_status\(\w+\) in \['succeeded'\] \%>"
        new_ctx = {}
        errors = []

        if not re.match(expected_criteria_pattern, criteria[0]):
            return in_ctx, new_ctx, errors

        task_publish_spec = getattr(self, "publish") or {}

        try:
            new_ctx = {
                var_name: expr_base.evaluate(var_expr, in_ctx)
                for var_name, var_expr in six.iteritems(task_publish_spec)
            }
        except exc.ExpressionEvaluationException as e:
            errors.append(str(e))

        out_ctx = dict_util.merge_dicts(in_ctx, new_ctx, overwrite=True)

        for key in list(out_ctx.keys()):
            if key.startswith("__"):
                out_ctx.pop(key)

        return out_ctx, new_ctx, errors
Ejemplo n.º 15
0
 def test_get_start_tasks(self):
     inputs = {'a': 123}
     expected_task_ctx = dict_util.merge_dicts(copy.deepcopy(inputs),
                                               {'b': False})
     conductor = self._prep_conductor(inputs=inputs,
                                      status=statuses.RUNNING)
     self.assert_next_task(conductor, 'task1', expected_task_ctx)
Ejemplo n.º 16
0
    def get_task_context(self, ctx_idxs):
        ctx = {}

        for ctx_idx in ctx_idxs:
            ctx = dict_util.merge_dicts(ctx, self.workflow_state.contexts[ctx_idx], overwrite=True)

        return ctx
Ejemplo n.º 17
0
 def test_get_start_tasks(self):
     inputs = {"a": 123}
     expected_task_ctx = dict_util.merge_dicts(json_util.deepcopy(inputs),
                                               {"b": False})
     conductor = self._prep_conductor(inputs=inputs,
                                      status=statuses.RUNNING)
     self.assert_next_task(conductor, "task1", expected_task_ctx)
Ejemplo n.º 18
0
def merge_object_schema(s1, s2, overwrite=True):
    schema = {'type': 'object'}

    properties = dict_utils.merge_dicts(copy.deepcopy(s1.get('properties',
                                                             {})),
                                        copy.deepcopy(s2.get('properties',
                                                             {})),
                                        overwrite=overwrite)

    if properties:
        schema['properties'] = properties

    required = list(
        set(copy.deepcopy(s1.get('required', []))).union(
            set(copy.deepcopy(s2.get('required', [])))))

    if required:
        schema['required'] = sorted(required)

    additional = (s1.get('additionalProperties', True)
                  and s2.get('additionalProperties', True))

    if not additional:
        schema['additionalProperties'] = additional

    pattern_properties = dict_utils.merge_dicts(
        copy.deepcopy(s1.get('patternProperties', {})),
        copy.deepcopy(s2.get('patternProperties', {})),
        overwrite=overwrite)

    if pattern_properties:
        schema['patternProperties'] = pattern_properties

    min_properties = (s1.get('minProperties', 0) if not overwrite else max(
        s1.get('minProperties', 0), s2.get('minProperties', 0)))

    if min_properties > 0:
        schema['minProperties'] = min_properties

    max_properties = (s1.get('maxProperties', 0) if not overwrite else min(
        s1.get('maxProperties', 0), s2.get('maxProperties', 0)))

    if max_properties > 0:
        schema['maxProperties'] = max_properties

    return schema
Ejemplo n.º 19
0
    def test_get_next_tasks_when_graph_abended(self):
        inputs = {"a": 123}
        expected_init_ctx = dict_util.merge_dicts(json_util.deepcopy(inputs),
                                                  {"b": False})
        expected_task_ctx = json_util.deepcopy(expected_init_ctx)
        expected_task_ctx = dict_util.merge_dicts(expected_task_ctx,
                                                  {"c": "xyz"})
        conductor = self._prep_conductor(inputs=inputs,
                                         status=statuses.RUNNING)

        task_name = "task1"
        next_task_name = "task2"
        self.forward_task_statuses(conductor, task_name,
                                   [statuses.RUNNING, statuses.SUCCEEDED])
        self.assert_next_task(conductor, next_task_name, expected_task_ctx)

        conductor.request_workflow_status(statuses.FAILED)
        self.assert_next_task(conductor, has_next_task=False)
Ejemplo n.º 20
0
    def test_get_next_tasks_when_this_task_canceled(self):
        inputs = {'a': 123}
        expected_init_ctx = dict_util.merge_dicts(copy.deepcopy(inputs), {'b': False})
        expected_task_ctx = dict_util.merge_dicts(copy.deepcopy(expected_init_ctx), {'c': 'xyz'})
        conductor = self._prep_conductor(inputs=inputs, status=statuses.RUNNING)

        task_name = 'task1'
        next_task_name = 'task2'
        self.forward_task_statuses(conductor, task_name, [statuses.RUNNING, statuses.SUCCEEDED])
        self.assert_next_task(conductor, next_task_name, expected_task_ctx)

        task_name = 'task2'
        next_task_name = 'task3'
        self.forward_task_statuses(conductor, task_name, [statuses.RUNNING, statuses.CANCELING])
        self.assert_next_task(conductor, has_next_task=False)
        self.forward_task_statuses(conductor, task_name, [statuses.CANCELED])
        self.assert_next_task(conductor, has_next_task=False)

        self.assertEqual(conductor.get_workflow_status(), statuses.CANCELED)
Ejemplo n.º 21
0
    def workflow_state(self):
        if not self._workflow_state:
            self._workflow_state = WorkflowState(conductor=self)

            # Set any given context as the initial context.
            init_ctx = self.get_workflow_parent_context()

            # Render workflow inputs and merge into the initial context.
            workflow_input = self.get_workflow_input()
            rendered_inputs, input_errors = self.spec.render_input(workflow_input, init_ctx)
            init_ctx = dict_util.merge_dicts(init_ctx, rendered_inputs, True)

            # Render workflow variables and merge into the initial context.
            rendered_vars, var_errors = self.spec.render_vars(init_ctx)
            init_ctx = dict_util.merge_dicts(init_ctx, rendered_vars, True)

            # Fail workflow if there are errors.
            errors = input_errors + var_errors

            if errors:
                self.log_errors(errors)
                self.request_workflow_status(statuses.FAILED)

            # Proceed if there is no issue with rendering of inputs and vars.
            if self.get_workflow_status() not in statuses.ABENDED_STATUSES:
                # Set the initial workflow context.
                self._workflow_state.contexts.append(init_ctx)

                # Set the initial execution route.
                self._workflow_state.routes.append([])

                # Identify the starting tasks and set the pointer to the initial context entry.
                for task_node in self.graph.roots:
                    ctxs, route = [0], 0
                    self._workflow_state.add_staged_task(
                        task_node['id'],
                        route,
                        ctxs=ctxs,
                        ready=True
                    )

        return self._workflow_state
Ejemplo n.º 22
0
    def test_dict_merge_overwrite_false(self):
        left = copy.deepcopy(LEFT)
        right = copy.deepcopy(RIGHT)

        utils.merge_dicts(left, right, overwrite=False)

        expected = {
            'k1': '123',
            'k2': 'abc',
            'k3': {
                'k31': True,
                'k32': 1.0,
                'k33': {
                    'k331': 'foo'
                }
            },
            'k4': 'bar'
        }

        self.assertDictEqual(left, expected)
Ejemplo n.º 23
0
    def test_dict_merge_overwrite(self):
        left = copy.deepcopy(LEFT)
        right = copy.deepcopy(RIGHT)

        dict_util.merge_dicts(left, right)

        expected = {
            'k1': '123',
            'k2': 'def',
            'k3': {
                'k31': True,
                'k32': 2.0,
                'k33': {
                    'k331': 'foo'
                }
            },
            'k4': 'bar'
        }

        self.assertDictEqual(left, expected)
Ejemplo n.º 24
0
    def test_get_workflow_terminal_context_when_workflow_completed(self):
        inputs = {'a': 123, 'b': True}
        expected_init_ctx = copy.deepcopy(inputs)
        expected_term_ctx = dict_util.merge_dicts(copy.deepcopy(expected_init_ctx), {'c': 'xyz'})
        conductor = self._prep_conductor(inputs=inputs, status=statuses.RUNNING)

        for i in range(1, 6):
            task_name = 'task' + str(i)
            self.forward_task_statuses(conductor, task_name, [statuses.RUNNING, statuses.SUCCEEDED])

        self.assertEqual(conductor.get_workflow_status(), statuses.SUCCEEDED)
        self.assertDictEqual(conductor.get_workflow_terminal_context(), expected_term_ctx)
Ejemplo n.º 25
0
    def _converge_task_contexts(self, ctx_idxs):
        if len(ctx_idxs) <= 0 or all(x == ctx_idxs[0] for x in ctx_idxs):
            return self.flow.contexts[ctx_idxs[0]]

        ctx_srcs = []
        merged_ctx = {}

        for i in ctx_idxs:
            ctx_entry = self.flow.contexts[i]
            merged_ctx = dx.merge_dicts(merged_ctx, copy.deepcopy(ctx_entry['value']), True)
            ctx_srcs.extend(ctx_entry['srcs'])

        return {'srcs': list(set(ctx_srcs)), 'value': merged_ctx}
    def render_input(self, runtime_inputs, in_ctx=None):
        input_specs = getattr(self, 'input') or []
        default_inputs = dict([list(i.items())[0] for i in input_specs if isinstance(i, dict)])
        merged_inputs = dict_util.merge_dicts(default_inputs, runtime_inputs, True)
        rendered_inputs = {}
        errors = []

        try:
            rendered_inputs = expr_base.evaluate(merged_inputs, {})
        except exc.ExpressionEvaluationException as e:
            errors.append(str(e))

        return rendered_inputs, errors
Ejemplo n.º 27
0
    def _update_workflow_terminal_context(self, ctx_diff, task_flow_idx):
        term_ctx_idx = self._get_workflow_terminal_context_idx()

        if not term_ctx_idx:
            term_ctx_val = copy.deepcopy(ctx_diff)
            term_ctx_entry = {'src': [task_flow_idx], 'term': True, 'value': term_ctx_val}
            self.flow.contexts.append(term_ctx_entry)
            term_ctx_idx = len(self.flow.contexts) - 1
        else:
            term_ctx_entry = self.flow.contexts[term_ctx_idx]
            if task_flow_idx not in term_ctx_entry['src']:
                term_ctx_val = dx.merge_dicts(term_ctx_entry['value'], ctx_diff, True)
                term_ctx_entry['src'].append(task_flow_idx)
                term_ctx_entry['value'] = term_ctx_val
Ejemplo n.º 28
0
    def test_init_with_context(self):
        context = {'parent': {'ex_id': '12345'}}
        inputs = {'a': 123, 'b': True}
        init_ctx = dict_util.merge_dicts(json_util.deepcopy(inputs),
                                         json_util.deepcopy(context))

        conductor = self._prep_conductor(context=context, inputs=inputs)

        # Serialize and check.
        data = conductor.serialize()

        expected_data = {
            'spec': conductor.spec.serialize(),
            'graph': conductor.graph.serialize(),
            'context': context,
            'input': inputs,
            'output': None,
            'errors': [],
            'log': [],
            'state': {
                'status':
                statuses.UNSET,
                'routes': [[]],
                'staged': [{
                    'id': 'task1',
                    'route': 0,
                    'prev': {},
                    'ctxs': {
                        'in': [0]
                    },
                    'ready': True
                }],
                'tasks': {},
                'sequence': [],
                'contexts': [init_ctx]
            }
        }

        self.assertDictEqual(data, expected_data)

        # Deserialize and check.
        conductor = conducting.WorkflowConductor.deserialize(data)

        self.assertIsInstance(conductor.spec, native_specs.WorkflowSpec)
        self.assertEqual(conductor.workflow_state.status, statuses.UNSET)
        self.assertEqual(conductor.get_workflow_status(), statuses.UNSET)
        self.assertIsInstance(conductor.graph, graphing.WorkflowGraph)
        self.assertEqual(len(conductor.graph._graph.node), 5)
        self.assertIsInstance(conductor.workflow_state,
                              conducting.WorkflowState)
Ejemplo n.º 29
0
    def test_init_with_context(self):
        context = {"parent": {"ex_id": "12345"}}
        inputs = {"a": 123, "b": True}
        init_ctx = dict_util.merge_dicts(json_util.deepcopy(inputs),
                                         json_util.deepcopy(context))

        conductor = self._prep_conductor(context=context, inputs=inputs)

        # Serialize and check.
        data = conductor.serialize()

        expected_data = {
            "spec": conductor.spec.serialize(),
            "graph": conductor.graph.serialize(),
            "context": context,
            "input": inputs,
            "output": None,
            "errors": [],
            "log": [],
            "state": {
                "status":
                statuses.UNSET,
                "routes": [[]],
                "staged": [{
                    "id": "task1",
                    "route": 0,
                    "prev": {},
                    "ctxs": {
                        "in": [0]
                    },
                    "ready": True
                }],
                "tasks": {},
                "sequence": [],
                "contexts": [init_ctx],
            },
        }

        self.assertDictEqual(data, expected_data)

        # Deserialize and check.
        conductor = conducting.WorkflowConductor.deserialize(data)

        self.assertIsInstance(conductor.spec, native_specs.WorkflowSpec)
        self.assertEqual(conductor.workflow_state.status, statuses.UNSET)
        self.assertEqual(conductor.get_workflow_status(), statuses.UNSET)
        self.assertIsInstance(conductor.graph, graphing.WorkflowGraph)
        self.assertEqual(len(conductor.graph._graph.node), 5)
        self.assertIsInstance(conductor.workflow_state,
                              conducting.WorkflowState)
Ejemplo n.º 30
0
    def test_get_task_transition_contexts(self):
        inputs = {"a": 123, "b": True}
        expected_init_ctx = json_util.deepcopy(inputs)
        expected_task_ctx = json_util.deepcopy(expected_init_ctx)
        expected_task_ctx = dict_util.merge_dicts(expected_task_ctx,
                                                  {"c": "xyz"})
        conductor = self._prep_conductor(inputs=inputs,
                                         status=statuses.RUNNING)

        # Get context for task2 that is staged by not yet running.
        task_route = 0
        task_name = "task1"
        next_task_name = "task2"
        self.forward_task_statuses(conductor, task_name,
                                   [statuses.RUNNING, statuses.SUCCEEDED])

        next_task_in_ctx = expected_task_ctx
        expected_task_transition_ctx = {
            "%s__t0" % next_task_name: next_task_in_ctx
        }

        self.assertDictEqual(
            conductor.get_task_transition_contexts(task_name, task_route),
            expected_task_transition_ctx,
        )

        # Get context for task2 that is already running.
        task_name = "task2"
        next_task_name = "task3"
        self.forward_task_statuses(conductor, task_name, [statuses.RUNNING])

        expected_task_transition_ctx = {}

        self.assertDictEqual(
            conductor.get_task_transition_contexts(task_name, task_route),
            expected_task_transition_ctx,
        )

        # Get context for task3 that is not staged yet.
        self.assertDictEqual(
            conductor.get_task_transition_contexts(task_name, task_route), {})

        # Get transition context for task3 that has not yet run.
        self.assertRaises(
            exc.InvalidTaskStateEntry,
            conductor.get_task_transition_contexts,
            next_task_name,
            task_route,
        )