Пример #1
0
    def test_workflow_spec_cache_update_via_workbook_service(self):
        wb_text = """
        version: '2.0'

        name: wb

        workflows:
          wf:
            tasks:
              task1:
                action: std.echo output="Echo"
        """

        wb_service.create_workbook_v2(wb_text)

        self.assertEqual(0, spec_parser.get_wf_execution_spec_cache_size())
        self.assertEqual(0, spec_parser.get_wf_definition_spec_cache_size())

        wf = db_api.get_workflow_definition('wb.wf')

        wf_spec = spec_parser.get_workflow_spec_by_definition_id(
            wf.id, wf.updated_at)

        self.assertEqual(1, len(wf_spec.get_tasks()))
        self.assertEqual(0, spec_parser.get_wf_execution_spec_cache_size())
        self.assertEqual(1, spec_parser.get_wf_definition_spec_cache_size())

        # Now update workflow definition and check that cache is updated too.

        wb_text = """
        version: '2.0'

        name: wb

        workflows:
          wf:
            tasks:
              task1:
                action: std.echo output="1"

              task2:
                action: std.echo output="2"
        """

        wb_service.update_workbook_v2(wb_text)

        self.assertEqual(0, spec_parser.get_wf_execution_spec_cache_size())
        self.assertEqual(1, spec_parser.get_wf_definition_spec_cache_size())

        wf = db_api.get_workflow_definition(wf.id)

        wf_spec = spec_parser.get_workflow_spec_by_definition_id(
            wf.id, wf.updated_at)

        self.assertEqual(2, len(wf_spec.get_tasks()))
        self.assertEqual(0, spec_parser.get_wf_execution_spec_cache_size())
        self.assertEqual(2, spec_parser.get_wf_definition_spec_cache_size())
Пример #2
0
    def test_workflow_spec_cache_update_via_workflow_service(self):
        wf_text = """
        version: '2.0'

        wf:
          tasks:
            task1:
              action: std.echo output="Echo"
        """

        wfs = wf_service.create_workflows(wf_text)

        self.assertEqual(0, spec_parser.get_wf_execution_spec_cache_size())
        self.assertEqual(0, spec_parser.get_wf_definition_spec_cache_size())

        wf_spec = spec_parser.get_workflow_spec_by_definition_id(
            wfs[0].id,
            wfs[0].updated_at
        )

        self.assertEqual(1, len(wf_spec.get_tasks()))
        self.assertEqual(0, spec_parser.get_wf_execution_spec_cache_size())
        self.assertEqual(1, spec_parser.get_wf_definition_spec_cache_size())

        # Now update workflow definition and check that cache is updated too.

        wf_text = """
        version: '2.0'

        wf:
          tasks:
            task1:
              action: std.echo output="1"

            task2:
              action: std.echo output="2"
        """

        wfs = wf_service.update_workflows(wf_text)

        self.assertEqual(1, spec_parser.get_wf_definition_spec_cache_size())

        wf_spec = spec_parser.get_workflow_spec_by_definition_id(
            wfs[0].id,
            wfs[0].updated_at
        )

        self.assertEqual(2, len(wf_spec.get_tasks()))
        self.assertEqual(2, spec_parser.get_wf_definition_spec_cache_size())
        self.assertEqual(0, spec_parser.get_wf_execution_spec_cache_size())
Пример #3
0
    def schedule(self, input_dict, target, index=0, desc='', safe_rerun=False):
        assert not self.action_ex

        parent_wf_ex = self.task_ex.workflow_execution
        parent_wf_spec = spec_parser.get_workflow_spec_by_execution_id(
            parent_wf_ex.id)

        task_spec = spec_parser.get_task_spec(self.task_ex.spec)

        wf_spec_name = task_spec.get_workflow_name()

        wf_def = engine_utils.resolve_workflow_definition(
            parent_wf_ex.workflow_name, parent_wf_spec.get_name(),
            wf_spec_name)

        wf_spec = spec_parser.get_workflow_spec_by_definition_id(
            wf_def.id, wf_def.updated_at)

        wf_params = {'task_execution_id': self.task_ex.id, 'index': index}

        if 'env' in parent_wf_ex.params:
            wf_params['env'] = parent_wf_ex.params['env']
            wf_params['evaluate_env'] = parent_wf_ex.params.get('evaluate_env')

        for k, v in list(input_dict.items()):
            if k not in wf_spec.get_input():
                wf_params[k] = v
                del input_dict[k]

        wf_handler.start_workflow(wf_def.id, input_dict,
                                  "sub-workflow execution", wf_params)
Пример #4
0
    def start(self, wf_def, wf_ex_id, input_dict, desc='', params=None):
        """Start workflow.

        :param wf_def: Workflow definition.
        :param wf_ex_id: Workflow execution id.
        :param input_dict: Workflow input.
        :param desc: Workflow execution description.
        :param params: Workflow type specific parameters.

        :raises
        """

        assert not self.wf_ex

        # New workflow execution.
        self.wf_spec = spec_parser.get_workflow_spec_by_definition_id(
            wf_def.id, wf_def.updated_at)

        wf_trace.info(
            self.wf_ex, 'Starting workflow [name=%s, input=%s]' %
            (wf_def.name, utils.cut(input_dict)))

        self.validate_input(input_dict)

        self._create_execution(wf_def, wf_ex_id,
                               self.prepare_input(input_dict), desc, params)

        self.set_state(states.RUNNING)

        wf_ctrl = wf_base.get_controller(self.wf_ex, self.wf_spec)

        dispatcher.dispatch_workflow_commands(self.wf_ex,
                                              wf_ctrl.continue_workflow())
Пример #5
0
    def test_workflow_spec_cache_update_via_workflow_service(self):
        wf_text = """
        version: '2.0'

        wf:
          tasks:
            task1:
              action: std.echo output="Echo"
        """

        wfs = wf_service.create_workflows(wf_text)

        self.assertEqual(0, spec_parser.get_wf_execution_spec_cache_size())
        self.assertEqual(0, spec_parser.get_wf_definition_spec_cache_size())

        wf_spec = spec_parser.get_workflow_spec_by_definition_id(
            wfs[0].id, wfs[0].updated_at)

        self.assertEqual(1, len(wf_spec.get_tasks()))
        self.assertEqual(0, spec_parser.get_wf_execution_spec_cache_size())
        self.assertEqual(1, spec_parser.get_wf_definition_spec_cache_size())

        # Now update workflow definition and check that cache is updated too.

        wf_text = """
        version: '2.0'

        wf:
          tasks:
            task1:
              action: std.echo output="1"

            task2:
              action: std.echo output="2"
        """

        wfs = wf_service.update_workflows(wf_text)

        self.assertEqual(1, spec_parser.get_wf_definition_spec_cache_size())

        wf_spec = spec_parser.get_workflow_spec_by_definition_id(
            wfs[0].id, wfs[0].updated_at)

        self.assertEqual(2, len(wf_spec.get_tasks()))
        self.assertEqual(2, spec_parser.get_wf_definition_spec_cache_size())
        self.assertEqual(0, spec_parser.get_wf_execution_spec_cache_size())
Пример #6
0
    def schedule(self, input_dict, target, index=0, desc='', safe_rerun=False,
                 timeout=None):
        assert not self.action_ex

        parent_wf_ex = self.task_ex.workflow_execution
        parent_wf_spec = spec_parser.get_workflow_spec_by_execution_id(
            parent_wf_ex.id
        )

        wf_def = engine_utils.resolve_workflow_definition(
            parent_wf_ex.workflow_name,
            parent_wf_spec.get_name(),
            namespace=parent_wf_ex.params['namespace'],
            wf_spec_name=self.wf_name
        )

        wf_spec = spec_parser.get_workflow_spec_by_definition_id(
            wf_def.id,
            wf_def.updated_at
        )

        # If the parent has a root_execution_id, it must be a sub-workflow. So
        # we should propogate that ID down. Otherwise the parent must be the
        # root execution and we should use the parents ID.
        root_execution_id = parent_wf_ex.root_execution_id or parent_wf_ex.id

        wf_params = {
            'root_execution_id': root_execution_id,
            'task_execution_id': self.task_ex.id,
            'index': index,
            'namespace': parent_wf_ex.params['namespace']
        }

        if 'env' in parent_wf_ex.params:
            wf_params['env'] = parent_wf_ex.params['env']
            wf_params['evaluate_env'] = parent_wf_ex.params.get('evaluate_env')

        if 'notify' in parent_wf_ex.params:
            wf_params['notify'] = parent_wf_ex.params['notify']

        for k, v in list(input_dict.items()):
            if k not in wf_spec.get_input():
                wf_params[k] = v
                del input_dict[k]

        wf_handler.start_workflow(
            wf_def.id,
            wf_def.namespace,
            None,
            input_dict,
            "sub-workflow execution",
            wf_params
        )
Пример #7
0
def create_event_trigger(name, exchange, topic, event, workflow_id,
                         scope='private', workflow_input=None,
                         workflow_params=None):
    with db_api.transaction():
        wf_def = db_api.get_workflow_definition_by_id(workflow_id)

        wf_spec = parser.get_workflow_spec_by_definition_id(
            wf_def.id,
            wf_def.updated_at
        )

        # TODO(rakhmerov): Use Workflow object here instead of utils.
        eng_utils.validate_input(
            wf_spec.get_input(),
            workflow_input,
            wf_spec.get_name(),
            wf_spec.__class__.__name__
        )

        values = {
            'name': name,
            'workflow_id': workflow_id,
            'workflow_input': workflow_input or {},
            'workflow_params': workflow_params or {},
            'exchange': exchange,
            'topic': topic,
            'event': event,
            'scope': scope,
        }

        security.add_trust_id(values)

        trig = db_api.create_event_trigger(values)

        trigs = db_api.get_event_triggers(insecure=True, exchange=exchange,
                                          topic=topic)
        events = [t.event for t in trigs]

        # NOTE(kong): Send RPC message within the db transaction, rollback if
        # any error occurs.
        trig_dict = trig.to_dict()
        trig_dict['workflow_namespace'] = wf_def.namespace

        rpc.get_event_engine_client().create_event_trigger(
            trig_dict,
            events
        )

    return trig
Пример #8
0
def create_event_trigger(name,
                         exchange,
                         topic,
                         event,
                         workflow_id,
                         scope='private',
                         workflow_input=None,
                         workflow_params=None):
    with db_api.transaction():
        wf_def = db_api.get_workflow_definition_by_id(workflow_id)

        wf_spec = parser.get_workflow_spec_by_definition_id(
            wf_def.id, wf_def.updated_at)

        # TODO(rakhmerov): Use Workflow object here instead of utils.
        eng_utils.validate_input(wf_spec.get_input(), workflow_input,
                                 wf_spec.get_name(),
                                 wf_spec.__class__.__name__)

        values = {
            'name': name,
            'workflow_id': workflow_id,
            'workflow_input': workflow_input or {},
            'workflow_params': workflow_params or {},
            'exchange': exchange,
            'topic': topic,
            'event': event,
            'scope': scope,
        }

        security.add_trust_id(values)

        trig = db_api.create_event_trigger(values)

        trigs = db_api.get_event_triggers(insecure=True,
                                          exchange=exchange,
                                          topic=topic)
        events = [t.event for t in trigs]

        # NOTE(kong): Send RPC message within the db transaction, rollback if
        # any error occurs.
        trig_dict = trig.to_dict()
        trig_dict['workflow_namespace'] = wf_def.namespace

        rpc.get_event_engine_client().create_event_trigger(trig_dict, events)

    return trig
Пример #9
0
    def start(self, wf_def, wf_ex_id, input_dict, desc='', params=None):
        """Start workflow.

        :param wf_def: Workflow definition.
        :param wf_ex_id: Workflow execution id.
        :param input_dict: Workflow input.
        :param desc: Workflow execution description.
        :param params: Workflow type specific parameters.

        :raises
        """

        assert not self.wf_ex

        # New workflow execution.
        self.wf_spec = spec_parser.get_workflow_spec_by_definition_id(
            wf_def.id,
            wf_def.updated_at
        )

        wf_trace.info(
            self.wf_ex,
            'Starting workflow [name=%s, input=%s]' %
            (wf_def.name, utils.cut(input_dict))
        )

        self.validate_input(input_dict)

        self._create_execution(
            wf_def,
            wf_ex_id,
            self.prepare_input(input_dict),
            desc,
            params
        )

        self.set_state(states.RUNNING)

        # Publish event as soon as state is set to running.
        self.notify(events.WORKFLOW_LAUNCHED)

        wf_ctrl = wf_base.get_controller(self.wf_ex, self.wf_spec)

        dispatcher.dispatch_workflow_commands(
            self.wf_ex,
            wf_ctrl.continue_workflow()
        )
Пример #10
0
    def _prepare_test(self, wf_text):
        wfs = wf_service.create_workflows(wf_text)
        wf_spec = spec_parser.get_workflow_spec_by_definition_id(
            wfs[0].id, wfs[0].updated_at)

        wf_ex = models.WorkflowExecution(id='1-2-3-4',
                                         spec=wf_spec.to_dict(),
                                         state=states.RUNNING,
                                         workflow_id=wfs[0].id,
                                         input={},
                                         params={},
                                         context={})

        self.wf_ex = wf_ex
        self.wf_spec = wf_spec

        return wf_ex
Пример #11
0
    def _prepare_test(self, wf_text):
        wfs = wf_service.create_workflows(wf_text)
        wf_spec = spec_parser.get_workflow_spec_by_definition_id(
            wfs[0].id,
            wfs[0].updated_at
        )

        wf_ex = models.WorkflowExecution(
            id='1-2-3-4',
            spec=wf_spec.to_dict(),
            state=states.RUNNING,
            workflow_id=wfs[0].id,
            input={},
            params={},
            context={}
        )

        self.wf_ex = wf_ex
        self.wf_spec = wf_spec

        return wf_ex
Пример #12
0
    def test_workflow_spec_caching(self):
        wf_text = """
        version: '2.0'

        wf:
          tasks:
            task1:
              action: std.echo output="Echo"
        """

        wfs = wf_service.create_workflows(wf_text)

        self.assertEqual(0, spec_parser.get_wf_execution_spec_cache_size())
        self.assertEqual(0, spec_parser.get_wf_definition_spec_cache_size())

        wf_spec = spec_parser.get_workflow_spec_by_definition_id(
            wfs[0].id, wfs[0].updated_at)

        self.assertIsNotNone(wf_spec)
        self.assertEqual(0, spec_parser.get_wf_execution_spec_cache_size())
        self.assertEqual(1, spec_parser.get_wf_definition_spec_cache_size())
Пример #13
0
    def test_workflow_spec_caching(self):
        wf_text = """
        version: '2.0'

        wf:
          tasks:
            task1:
              action: std.echo output="Echo"
        """

        wfs = wf_service.create_workflows(wf_text)

        self.assertEqual(0, spec_parser.get_wf_execution_spec_cache_size())
        self.assertEqual(0, spec_parser.get_wf_definition_spec_cache_size())

        wf_spec = spec_parser.get_workflow_spec_by_definition_id(
            wfs[0].id,
            wfs[0].updated_at
        )

        self.assertIsNotNone(wf_spec)
        self.assertEqual(0, spec_parser.get_wf_execution_spec_cache_size())
        self.assertEqual(1, spec_parser.get_wf_definition_spec_cache_size())
Пример #14
0
    def test_cache_workflow_spec_by_execution_id(self):
        wf_text = """
        version: '2.0'

        wf:
          tasks:
            task1:
              action: std.echo output="Echo"
        """

        wfs = wf_service.create_workflows(wf_text)

        self.assertEqual(0, spec_parser.get_wf_execution_spec_cache_size())
        self.assertEqual(0, spec_parser.get_wf_definition_spec_cache_size())

        wf_def = wfs[0]

        wf_spec = spec_parser.get_workflow_spec_by_definition_id(
            wf_def.id, wf_def.updated_at)

        self.assertEqual(1, len(wf_spec.get_tasks()))
        self.assertEqual(0, spec_parser.get_wf_execution_spec_cache_size())
        self.assertEqual(1, spec_parser.get_wf_definition_spec_cache_size())

        with db_api.transaction():
            wf_ex = db_api.create_workflow_execution({
                'id': '1-2-3-4',
                'name': 'wf',
                'workflow_id': wf_def.id,
                'spec': wf_spec.to_dict(),
                'state': states.RUNNING
            })

            # Check that we can get a valid spec by execution id.
            wf_spec_by_exec_id = spec_parser.get_workflow_spec_by_execution_id(
                wf_ex.id)

        self.assertEqual(1, len(wf_spec_by_exec_id.get_tasks()))

        # Now update workflow definition and check that cache is updated too.

        wf_text = """
        version: '2.0'

        wf:
          tasks:
            task1:
              action: std.echo output="1"

            task2:
              action: std.echo output="2"
        """

        wfs = wf_service.update_workflows(wf_text)

        self.assertEqual(1, spec_parser.get_wf_definition_spec_cache_size())

        wf_spec = spec_parser.get_workflow_spec_by_definition_id(
            wfs[0].id, wfs[0].updated_at)

        self.assertEqual(2, len(wf_spec.get_tasks()))
        self.assertEqual(2, spec_parser.get_wf_definition_spec_cache_size())
        self.assertEqual(1, spec_parser.get_wf_execution_spec_cache_size())

        # Now finally update execution cache and check that we can
        # get a valid spec by execution id.
        spec_parser.cache_workflow_spec_by_execution_id(wf_ex.id, wf_spec)

        wf_spec_by_exec_id = spec_parser.get_workflow_spec_by_execution_id(
            wf_ex.id)

        self.assertEqual(2, len(wf_spec_by_exec_id.get_tasks()))
Пример #15
0
    def schedule(self,
                 input_dict,
                 target,
                 index=0,
                 desc='',
                 safe_rerun=False,
                 timeout=None):
        assert not self.action_ex

        self.validate_input(input_dict)

        parent_wf_ex = self.task_ex.workflow_execution
        parent_wf_spec = spec_parser.get_workflow_spec_by_execution_id(
            parent_wf_ex.id)

        wf_def = engine_utils.resolve_workflow_definition(
            parent_wf_ex.workflow_name,
            parent_wf_spec.get_name(),
            namespace=parent_wf_ex.params['namespace'],
            wf_spec_name=self.wf_name)

        wf_spec = spec_parser.get_workflow_spec_by_definition_id(
            wf_def.id, wf_def.updated_at)

        # If the parent has a root_execution_id, it must be a sub-workflow. So
        # we should propagate that ID down. Otherwise the parent must be the
        # root execution and we should use the parents ID.
        root_execution_id = parent_wf_ex.root_execution_id or parent_wf_ex.id

        wf_params = {
            'root_execution_id': root_execution_id,
            'task_execution_id': self.task_ex.id,
            'index': index,
            'namespace': parent_wf_ex.params['namespace']
        }

        if 'notify' in parent_wf_ex.params:
            wf_params['notify'] = parent_wf_ex.params['notify']

        for k, v in list(input_dict.items()):
            if k not in wf_spec.get_input():
                wf_params[k] = v
                del input_dict[k]

        if cfg.CONF.engine.start_subworkflows_via_rpc:

            def _start_subworkflow():
                rpc.get_engine_client().start_workflow(
                    wf_def.id,
                    wf_def.namespace,
                    None,
                    input_dict,
                    "sub-workflow execution",
                    async_=True,
                    **wf_params)

            post_tx_queue.register_operation(_start_subworkflow)
        else:
            wf_handler.start_workflow(wf_def.id, wf_def.namespace, None,
                                      input_dict, "sub-workflow execution",
                                      wf_params)
Пример #16
0
    def schedule(self, input_dict, target, index=0, desc='', safe_rerun=False,
                 timeout=None):
        assert not self.action_ex

        self.validate_input(input_dict)

        parent_wf_ex = self.task_ex.workflow_execution
        parent_wf_spec = spec_parser.get_workflow_spec_by_execution_id(
            parent_wf_ex.id
        )

        wf_def = engine_utils.resolve_workflow_definition(
            parent_wf_ex.workflow_name,
            parent_wf_spec.get_name(),
            namespace=parent_wf_ex.params['namespace'],
            wf_spec_name=self.wf_name
        )

        wf_spec = spec_parser.get_workflow_spec_by_definition_id(
            wf_def.id,
            wf_def.updated_at
        )

        # If the parent has a root_execution_id, it must be a sub-workflow. So
        # we should propagate that ID down. Otherwise the parent must be the
        # root execution and we should use the parents ID.
        root_execution_id = parent_wf_ex.root_execution_id or parent_wf_ex.id

        wf_params = {
            'root_execution_id': root_execution_id,
            'task_execution_id': self.task_ex.id,
            'index': index,
            'namespace': parent_wf_ex.params['namespace']
        }

        if 'notify' in parent_wf_ex.params:
            wf_params['notify'] = parent_wf_ex.params['notify']

        for k, v in list(input_dict.items()):
            if k not in wf_spec.get_input():
                wf_params[k] = v
                del input_dict[k]

        if cfg.CONF.engine.start_subworkflows_via_rpc:
            def _start_subworkflow():
                rpc.get_engine_client().start_workflow(
                    wf_def.id,
                    wf_def.namespace,
                    None,
                    input_dict,
                    "sub-workflow execution",
                    async_=True,
                    **wf_params
                )

            post_tx_queue.register_operation(_start_subworkflow)
        else:
            wf_handler.start_workflow(
                wf_def.id,
                wf_def.namespace,
                None,
                input_dict,
                "sub-workflow execution",
                wf_params
            )
Пример #17
0
def create_cron_trigger(name,
                        workflow_name,
                        workflow_input,
                        workflow_params=None,
                        pattern=None,
                        first_time=None,
                        count=None,
                        start_time=None,
                        workflow_id=None):
    if not start_time:
        start_time = datetime.datetime.utcnow()

    if isinstance(first_time, six.string_types):
        try:
            first_time = datetime.datetime.strptime(first_time,
                                                    '%Y-%m-%d %H:%M')
        except ValueError as e:
            raise exc.InvalidModelException(str(e))

    validate_cron_trigger_input(pattern, first_time, count)

    if first_time:
        next_time = first_time

        if not (pattern or count):
            count = 1
    else:
        next_time = get_next_execution_time(pattern, start_time)

    with db_api.transaction():
        wf_def = db_api.get_workflow_definition(
            workflow_id if workflow_id else workflow_name)

        wf_spec = parser.get_workflow_spec_by_definition_id(
            wf_def.id, wf_def.updated_at)

        # TODO(rakhmerov): Use Workflow object here instead of utils.
        eng_utils.validate_input(wf_spec.get_input(), workflow_input,
                                 wf_spec.get_name(),
                                 wf_spec.__class__.__name__)

        trigger_parameters = {
            'name': name,
            'pattern': pattern,
            'first_execution_time': first_time,
            'next_execution_time': next_time,
            'remaining_executions': count,
            'workflow_name': wf_def.name,
            'workflow_id': wf_def.id,
            'workflow_input': workflow_input or {},
            'workflow_params': workflow_params or {},
            'scope': 'private'
        }

        security.add_trust_id(trigger_parameters)

        try:
            trig = db_api.create_cron_trigger(trigger_parameters)
        except Exception:
            # Delete trust before raising exception.
            security.delete_trust(trigger_parameters.get('trust_id'))
            raise

    return trig
Пример #18
0
def create_cron_trigger(name, workflow_name, workflow_input,
                        workflow_params=None, pattern=None, first_time=None,
                        count=None, start_time=None, workflow_id=None):
    if not start_time:
        start_time = datetime.datetime.utcnow()

    if isinstance(first_time, six.string_types):
        try:
            first_time = datetime.datetime.strptime(
                first_time,
                '%Y-%m-%d %H:%M'
            )
        except ValueError as e:
            raise exc.InvalidModelException(str(e))

    validate_cron_trigger_input(pattern, first_time, count)

    if first_time:
        next_time = first_time

        if not (pattern or count):
            count = 1
    else:
        next_time = get_next_execution_time(pattern, start_time)

    with db_api.transaction():
        wf_def = db_api.get_workflow_definition(
            workflow_id if workflow_id else workflow_name
        )

        wf_spec = parser.get_workflow_spec_by_definition_id(
            wf_def.id,
            wf_def.updated_at
        )

        # TODO(rakhmerov): Use Workflow object here instead of utils.
        eng_utils.validate_input(
            wf_spec.get_input(),
            workflow_input,
            wf_spec.get_name(),
            wf_spec.__class__.__name__
        )

        trigger_parameters = {
            'name': name,
            'pattern': pattern,
            'first_execution_time': first_time,
            'next_execution_time': next_time,
            'remaining_executions': count,
            'workflow_name': wf_def.name,
            'workflow_id': wf_def.id,
            'workflow_input': workflow_input or {},
            'workflow_params': workflow_params or {},
            'scope': 'private'
        }

        security.add_trust_id(trigger_parameters)

        try:
            trig = db_api.create_cron_trigger(trigger_parameters)
        except Exception:
            # Delete trust before raising exception.
            security.delete_trust(trigger_parameters.get('trust_id'))
            raise

    return trig
Пример #19
0
    def test_workflow_spec_cache_update_via_workbook_service(self):
        wb_text = """
        version: '2.0'

        name: wb

        workflows:
          wf:
            tasks:
              task1:
                action: std.echo output="Echo"
        """

        wb_service.create_workbook_v2(wb_text)

        self.assertEqual(0, spec_parser.get_wf_execution_spec_cache_size())
        self.assertEqual(0, spec_parser.get_wf_definition_spec_cache_size())

        wf = db_api.get_workflow_definition('wb.wf')

        wf_spec = spec_parser.get_workflow_spec_by_definition_id(
            wf.id,
            wf.updated_at
        )

        self.assertEqual(1, len(wf_spec.get_tasks()))
        self.assertEqual(0, spec_parser.get_wf_execution_spec_cache_size())
        self.assertEqual(1, spec_parser.get_wf_definition_spec_cache_size())

        # Now update workflow definition and check that cache is updated too.

        wb_text = """
        version: '2.0'

        name: wb

        workflows:
          wf:
            tasks:
              task1:
                action: std.echo output="1"

              task2:
                action: std.echo output="2"
        """

        wb_service.update_workbook_v2(wb_text)

        self.assertEqual(0, spec_parser.get_wf_execution_spec_cache_size())
        self.assertEqual(1, spec_parser.get_wf_definition_spec_cache_size())

        wf = db_api.get_workflow_definition(wf.id)

        wf_spec = spec_parser.get_workflow_spec_by_definition_id(
            wf.id,
            wf.updated_at
        )

        self.assertEqual(2, len(wf_spec.get_tasks()))
        self.assertEqual(0, spec_parser.get_wf_execution_spec_cache_size())
        self.assertEqual(2, spec_parser.get_wf_definition_spec_cache_size())
Пример #20
0
    def test_cache_workflow_spec_by_execution_id(self):
        wf_text = """
        version: '2.0'

        wf:
          tasks:
            task1:
              action: std.echo output="Echo"
        """

        wfs = wf_service.create_workflows(wf_text)

        self.assertEqual(0, spec_parser.get_wf_execution_spec_cache_size())
        self.assertEqual(0, spec_parser.get_wf_definition_spec_cache_size())

        wf_def = wfs[0]

        wf_spec = spec_parser.get_workflow_spec_by_definition_id(
            wf_def.id,
            wf_def.updated_at
        )

        self.assertEqual(1, len(wf_spec.get_tasks()))
        self.assertEqual(0, spec_parser.get_wf_execution_spec_cache_size())
        self.assertEqual(1, spec_parser.get_wf_definition_spec_cache_size())

        wf_ex = db_api.create_workflow_execution({
            'id': '1-2-3-4',
            'name': 'wf',
            'workflow_id': wf_def.id,
            'spec': wf_spec.to_dict(),
            'state': states.RUNNING
        })

        # Check that we can get a valid spec by execution id.

        wf_spec_by_exec_id = spec_parser.get_workflow_spec_by_execution_id(
            wf_ex.id
        )

        self.assertEqual(1, len(wf_spec_by_exec_id.get_tasks()))

        # Now update workflow definition and check that cache is updated too.

        wf_text = """
        version: '2.0'

        wf:
          tasks:
            task1:
              action: std.echo output="1"

            task2:
              action: std.echo output="2"
        """

        wfs = wf_service.update_workflows(wf_text)

        self.assertEqual(1, spec_parser.get_wf_definition_spec_cache_size())

        wf_spec = spec_parser.get_workflow_spec_by_definition_id(
            wfs[0].id,
            wfs[0].updated_at
        )

        self.assertEqual(2, len(wf_spec.get_tasks()))
        self.assertEqual(2, spec_parser.get_wf_definition_spec_cache_size())
        self.assertEqual(1, spec_parser.get_wf_execution_spec_cache_size())

        # Now finally update execution cache and check that we can
        # get a valid spec by execution id.
        spec_parser.cache_workflow_spec_by_execution_id(wf_ex.id, wf_spec)

        wf_spec_by_exec_id = spec_parser.get_workflow_spec_by_execution_id(
            wf_ex.id
        )

        self.assertEqual(2, len(wf_spec_by_exec_id.get_tasks()))