예제 #1
0
def test_simple_schedule_pipeline():
    start_event_a_id = node_uniqid()
    act_b_id = node_uniqid()
    end_event_c_id = node_uniqid()

    start_event = EmptyStartEvent(start_event_a_id)
    act = ServiceActivity(act_b_id,
                          service=ScheduleService(),
                          data=DataObject({}))
    end_event = EmptyEndEvent(end_event_c_id)

    flow_ab = SequenceFlow('ab', start_event, act)
    flow_bc = SequenceFlow('bc', act, end_event)

    start_event.outgoing.add_flow(flow_ab)
    act.incoming.add_flow(flow_ab)
    act.outgoing.add_flow(flow_bc)
    end_event.incoming.add_flow(flow_bc)

    spec = PipelineSpec(start_event,
                        end_event, [flow_ab, flow_bc], [act], [],
                        data=DataObject({}),
                        context=context.Context({}))
    pipeline = Pipeline(node_uniqid(), spec)
    run_pipeline(pipeline)
예제 #2
0
    def subprocess():
        start_event_a_id = node_uniqid()
        act_b_id = node_uniqid()
        end_event_c_id = node_uniqid()

        start_event = EmptyStartEvent(start_event_a_id)
        act = ServiceActivity(act_b_id,
                              service=RetryTestService(),
                              data=DataObject({'data': '0'}))
        end_event = EmptyEndEvent(end_event_c_id)

        flow_ab = SequenceFlow('ab', start_event, act)
        flow_bc = SequenceFlow('bc', act, end_event)

        start_event.outgoing.add_flow(flow_ab)
        act.incoming.add_flow(flow_ab)
        act.outgoing.add_flow(flow_bc)
        end_event.incoming.add_flow(flow_bc)

        spec = PipelineSpec(start_event,
                            end_event, [flow_ab, flow_bc], [act], [],
                            data=DataObject({}),
                            context=context.Context({}))
        pipeline = Pipeline(node_uniqid(), spec)
        return pipeline
예제 #3
0
def get_simple_pipeline():
    start_event_a_id = node_uniqid()
    act_b_id = node_uniqid()
    end_event_c_id = node_uniqid()

    start_event = EmptyStartEvent(start_event_a_id)
    act = ServiceActivity(id=act_b_id,
                          service=RetryTestService(),
                          data=DataObject({
                              'data': '0',
                              'value2': '2',
                              'value3': '3',
                              'timing': 1000
                          }))
    end_event = EmptyEndEvent(end_event_c_id)

    flow_ab = SequenceFlow('ab', start_event, act)
    flow_bc = SequenceFlow('bc', act, end_event)

    start_event.outgoing.add_flow(flow_ab)
    act.incoming.add_flow(flow_ab)
    act.outgoing.add_flow(flow_bc)
    end_event.incoming.add_flow(flow_bc)

    spec = PipelineSpec(start_event,
                        end_event, [flow_ab, flow_bc], [act], [],
                        data=DataObject({}),
                        context=context.Context({}))
    return Pipeline(node_uniqid(), spec)
예제 #4
0
 def test_inputs_copy(self):
     inputs = {'args': '1', 'kwargs': {'1': 1, '2': 2}}
     data_object = DataObject(inputs=inputs)
     inputs_copy = data_object.inputs_copy()
     self.assertIsInstance(inputs_copy, FancyDict)
     self.assertEqual(inputs_copy, inputs)
     self.assertFalse(inputs is inputs_copy)
예제 #5
0
 def test_outputs_copy(self):
     outputs = {'args': '1', 'kwargs': {'1': 1, '2': 2}}
     data_object = DataObject(inputs={}, outputs=outputs)
     outputs_copy = data_object.outputs_copy()
     self.assertIsInstance(outputs_copy, FancyDict)
     self.assertEqual(outputs_copy, outputs)
     self.assertFalse(outputs_copy is outputs)
예제 #6
0
    def test_write_node_data(self):
        node = IdentifyObject()
        data_obj = DataObject({'input_key': 'value'},
                              outputs={'output_key': 'value'})
        node.data = data_obj

        Data.objects.write_node_data(node)
        data = Data.objects.get(id=node.id)
        self.assertEqual(data.inputs, data_obj.inputs)
        self.assertEqual(data.outputs, data_obj.outputs)
        self.assertIsNone(data.ex_data)

        data_obj.inputs = {'new_inputs': 'new_value'}
        Data.objects.write_node_data(node, ex_data='ex_data')
        data = Data.objects.get(id=node.id)
        self.assertEqual(data.inputs, data_obj.inputs)
        self.assertEqual(data.outputs, data_obj.outputs)
        self.assertEqual(data.ex_data, 'ex_data')

        data_obj.outputs.ex_data = 'new_ex_data'
        Data.objects.write_node_data(node, ex_data='ex_data')
        data = Data.objects.get(id=node.id)
        self.assertEqual(data.inputs, data_obj.inputs)
        self.assertEqual(data.outputs, data_obj.outputs)
        self.assertEqual(data.ex_data, 'new_ex_data')
예제 #7
0
 def test_sequence_flow(self):
     flow_id = '1'
     source = ServiceActivity(id='1', service=None, data=DataObject({}))
     target = ServiceActivity(id='2', service=None, data=DataObject({}))
     flow = SequenceFlow(flow_id, source, target)
     self.assertTrue(isinstance(flow, FlowElement))
     self.assertEqual(flow_id, flow.id)
     self.assertEqual(source, flow.source)
     self.assertEqual(target, flow.target)
     self.assertEqual(False, flow.is_default)
예제 #8
0
 def test_service_activity(self):
     act_id = '1'
     service = FabricService()
     inputs = {'args': [1, 2, 3], 'kwargs': {'1': 1, '2': 2}}
     service_act = ServiceActivity(id=act_id,
                                   service=service,
                                   data=DataObject(inputs))
     service_act.execute(DataObject(inputs))
     print(service_act.get_result_bit())
     self.assertTrue(isinstance(service_act, Activity))
     self.assertEqual(service, service_act.service)
예제 #9
0
    def retry(self, process, node, inputs):
        action_res = self.transit(id=node.id,
                                  to_state=states.READY,
                                  appoint=True)
        if not action_res.result:
            return action_res

        # add retry times
        s = Status.objects.get(id=node.id)
        s.version = uniqid()
        history = History.objects.record(s)
        LogEntry.objects.link_history(node_id=node.id, history_id=history.id)
        s.retry += 1
        s.save()

        # update inputs
        if inputs:
            new_data = DataObject(inputs=inputs, outputs={})
            node.data = new_data
            Data.objects.write_node_data(node)

        self.recover_from_block(process.root_pipeline.id,
                                process.subprocess_stack)
        signals.node_retry_ready.send(sender=Status,
                                      process=process,
                                      node=node)

        # because node may be updated
        process.save()

        return action_res
예제 #10
0
파일: core.py 프로젝트: zhengxj91/bk-sops
    def retry(self, process, node, inputs):
        if RERUN_MAX_LIMIT != 0 and self.get(id=node.id).loop > RERUN_MAX_LIMIT:
            return ActionResult(result=False, message='rerun times exceed max limit: {limit}, can not retry'.format(
                limit=RERUN_MAX_LIMIT
            ))

        action_res = self.transit(id=node.id, to_state=states.READY, appoint=True)
        if not action_res.result:
            return action_res

        # add retry times
        s = Status.objects.get(id=node.id)
        s.version = uniqid()
        history = History.objects.record(s)
        LogEntry.objects.link_history(node_id=node.id, history_id=history.id)
        s.retry += 1
        s.save()

        # update inputs
        if inputs:
            new_data = DataObject(inputs=inputs, outputs={})
            node.data = new_data
            Data.objects.write_node_data(node)

        # mark
        node.next_exec_is_retry()

        self.recover_from_block(process.root_pipeline.id, process.subprocess_stack)
        signals.node_retry_ready.send(sender=Status, process=process, node=node)

        # because node may be updated
        process.save()

        return action_res
예제 #11
0
 def test_service_activity(self):
     act_id = '1'
     service = 'a_service'
     inputs = {'args': [1, 2, 3], 'kwargs': {'1': 1, '2': 2}}
     service_act = ServiceActivity(id=act_id,
                                   service=service,
                                   data=DataObject(inputs))
     self.assertTrue(isinstance(service_act, Activity))
     self.assertEqual(service, service_act.service)
예제 #12
0
    def data_for_execution(self, context, pipeline_data):
        data_dict = self.clean_execute_data(context)
        inputs = {}

        for key, tag_info in data_dict.items():
            if tag_info is None:
                raise ComponentDataLackException('Lack of inputs: %s' % key)

            inputs[key] = get_variable(key, tag_info, context, pipeline_data)

        return DataObject(inputs)
예제 #13
0
    def test_end_event(self):
        start_event = EmptyStartEvent(id='a')
        act = ServiceActivity(id='b', service=None, data=DataObject({}))
        end_event = EmptyEndEvent(id='c')

        flow_ab = SequenceFlow('ab', start_event, act)
        flow_bc = SequenceFlow('bc', act, end_event)

        start_event.outgoing.add_flow(flow_ab)
        act.incoming.add_flow(flow_ab)
        act.outgoing.add_flow(flow_bc)
        end_event.incoming.add_flow(flow_bc)

        spec = PipelineSpec(start_event, end_event, [flow_ab, flow_bc], [act],
                            [], None, None)
        pipeline = Pipeline('pipeline', spec)
        self.assertEqual(end_event, pipeline.end_event)
예제 #14
0
    def test_data_object(self):
        inputs = {'args': '1', 'kwargs': {'1': 1, '2': 2}}

        self.assertRaises(exceptions.DataTypeErrorException, DataObject, None)

        data_object = DataObject(inputs)
        self.assertIsInstance(data_object, DataObject)

        self.assertEqual(data_object.get_inputs(), inputs)
        self.assertEqual(data_object.get_outputs(), {})

        self.assertEqual(data_object.get_one_of_inputs('args'), '1')
        self.assertIsNone(data_object.get_one_of_outputs('args'))

        self.assertRaises(exceptions.DataTypeErrorException,
                          data_object.reset_outputs, None)
        self.assertTrue(data_object.reset_outputs({'a': str}))
예제 #15
0
def get_empty_parallel_gateway_test_pipeline():
    start_event_a_id = node_uniqid()
    gateway_b_id = node_uniqid()
    gateway_h_id = node_uniqid()
    end_event_i_id = node_uniqid()

    start_event_a = EmptyStartEvent(start_event_a_id)
    gateway_b = ParallelGateway(gateway_b_id, gateway_h_id)
    gateway_h = ConvergeGateway(gateway_h_id)
    end_event_i = EmptyEndEvent(end_event_i_id)

    flow_ab = SequenceFlow('ab', start_event_a, gateway_b)

    flow_bc = SequenceFlow('bc', gateway_b, gateway_h)
    flow_bd = SequenceFlow('bd', gateway_b, gateway_h)
    flow_be = SequenceFlow('be', gateway_b, gateway_h)

    flow_hi = SequenceFlow('hi', gateway_h, end_event_i)

    start_event_a.outgoing.add_flow(flow_ab)
    gateway_b.incoming.add_flow(flow_ab)

    gateway_b.outgoing.add_flow(flow_bc)
    gateway_b.outgoing.add_flow(flow_bd)
    gateway_b.outgoing.add_flow(flow_be)

    gateway_h.incoming.add_flow(flow_bc)
    gateway_h.incoming.add_flow(flow_bd)
    gateway_h.incoming.add_flow(flow_be)
    gateway_h.outgoing.add_flow(flow_hi)

    end_event_i.incoming.add_flow(flow_hi)

    spec = PipelineSpec(start_event_a,
                        end_event_i, [flow_ab, flow_bc, flow_bd, flow_be], [],
                        [gateway_b, gateway_h],
                        data=DataObject({}),
                        context=context.Context(act_outputs={}, output_key=[]))
    return Pipeline(node_uniqid(), spec)
예제 #16
0
    def retry(self, process, node, inputs):
        result = self.transit(id=node.id, to_state=states.READY, appoint=True)
        if not result:
            return result

        # add retry times
        s = Status.objects.get(id=node.id)
        s.version = uniqid()
        history = History.objects.record(s)
        LogEntry.objects.link_history(node_id=node.id, history_id=history.id)
        s.retry += 1
        s.save()

        # update inputs
        if inputs:
            new_data = DataObject(inputs=inputs, outputs={})
            node.data = new_data
            Data.objects.write_node_data(node)
            process.save()

        self.recover_from_block(process.root_pipeline.id,
                                process.subprocess_stack)
        return result
예제 #17
0
def retry_activity(act_id, inputs):
    if not inputs:
        api.retry_activity(act_id)
    origin_inputs = api.get_inputs(act_id)
    origin_inputs[0][0].data = DataObject(inputs)
    api.retry_activity(act_id, args=origin_inputs[0])
예제 #18
0
    def test_data_object(self):
        inputs = {'args': '1', 'kwargs': {'1': 1, '2': 2}}

        self.assertRaises(exceptions.DataTypeErrorException, DataObject, None)

        data_object = DataObject(inputs)
        self.assertIsInstance(data_object, DataObject)
        self.assertIsInstance(data_object.inputs, FancyDict)
        self.assertIsInstance(data_object.outputs, FancyDict)

        self.assertEqual(data_object.get_inputs(), inputs)
        self.assertEqual(data_object.get_outputs(), {})

        self.assertEqual(data_object.get_one_of_inputs('args'), '1')
        self.assertEqual(data_object.inputs.args, '1')
        self.assertIsNone(data_object.get_one_of_outputs('args'))

        self.assertRaises(exceptions.DataTypeErrorException,
                          data_object.reset_outputs, None)
        self.assertTrue(data_object.reset_outputs({'a': str}))
        self.assertEqual(data_object.outputs.a, str)

        data_object.update_outputs({'args': '1', 'kwargs': {'1': 1, '2': 2}})
        self.assertEqual(data_object.get_outputs(), {
            'a': str,
            'args': '1',
            'kwargs': {
                '1': 1,
                '2': 2
            }
        })
        self.assertEqual(
            jsonschema.validate(json.loads(data_object.serializer()),
                                BASE_PARAM), None)
예제 #19
0
    def _parse(self,
               root_pipeline_data=None,
               params=None,
               is_subprocess=False,
               parent_context=None):
        if root_pipeline_data is None:
            root_pipeline_data = {}
        if params is None:
            params = {}
        pipeline_data = deepcopy(
            root_pipeline_data) if is_subprocess else root_pipeline_data

        pipeline_inputs = self.pipeline_tree[PE.data][PE.inputs]
        act_outputs = {}
        scope_info = {}
        process_params = {}
        for key, info in pipeline_inputs.items():
            if info.get(PE.source_act):
                act_outputs.setdefault(info[PE.source_act],
                                       {}).update({info[PE.source_key]: key})
                continue

            if info.get(PE.is_param, False):
                info = params.get(key, info)

            if is_subprocess:
                process_params.update({key: info})
                continue

            scope_info.update({key: info})

        output_keys = self.pipeline_tree[PE.data][PE.outputs].keys()
        context = Context(act_outputs, output_keys)
        for key, info in scope_info.items():
            var = get_variable(key, info, context, pipeline_data)
            context.set_global_var(key, var)

        if is_subprocess:
            if parent_context is None:
                raise exceptions.DataTypeErrorException(
                    'parent context of subprocess cannot be none')
            for key, info in process_params.items():
                var = get_variable(key, info, parent_context, pipeline_data)
                pipeline_data.update({key: var})

        start = self.pipeline_tree[PE.start_event]
        start_cls = getattr(event, start[PE.type])
        start_event = start_cls(id=start[PE.id], name=start[PE.name])

        end = self.pipeline_tree[PE.end_event]
        end_cls = getattr(event, end[PE.type])
        end_event = end_cls(id=end[PE.id], name=end[PE.name])

        acts = self.pipeline_tree[PE.activities]
        act_objs = []
        for act in acts.values():
            act_cls = getattr(activity, act[PE.type])
            if act[PE.type] == PE.ServiceActivity:
                component = ComponentLibrary.get_component(
                    act[PE.component][PE.code], act[PE.component][PE.inputs])
                service = component.service()
                data = component.data_for_execution(context, pipeline_data)
                act_objs.append(
                    act_cls(id=act[PE.id],
                            service=service,
                            name=act[PE.name],
                            data=data,
                            error_ignorable=act.get(PE.error_ignorable, False),
                            skippable=act.get(PE.skippable, True),
                            can_retry=act.get(PE.can_retry, True),
                            timeout=act.get(PE.timeout)))
            elif act[PE.type] == PE.SubProcess:
                sub_tree = act[PE.pipeline]
                params = act[PE.params]
                sub_parser = PipelineParser(pipeline_tree=sub_tree)
                act_objs.append(
                    act_cls(id=act[PE.id],
                            pipeline=sub_parser._parse(
                                root_pipeline_data=root_pipeline_data,
                                params=params,
                                is_subprocess=True,
                                parent_context=context),
                            name=act[PE.name]))
            elif act[PE.type] == PE.LoopServiceActivity:
                act_cls = getattr(activity, act[PE.type])
                component = ComponentLibrary.get_component(
                    act[PE.component][PE.code], act[PE.component][PE.inputs])
                service = component.service()
                data = component.data_for_execution(context, pipeline_data)
                act_objs.append(
                    act_cls(id=act[PE.id],
                            service=service,
                            name=act[PE.name],
                            data=data,
                            error_ignorable=act.get(PE.error_ignorable, False),
                            loop_times=act[PE.loop_times]))
            else:
                raise exceptions.FlowTypeError(u"Unknown Activity type: %s" %
                                               act[PE.type])

        gateways = self.pipeline_tree[PE.gateways]
        flows = self.pipeline_tree[PE.flows]
        gateway_objs = []
        for gw in gateways.values():
            gw_cls = getattr(gateway, gw[PE.type])
            if gw[PE.type] in [PE.ParallelGateway]:
                gateway_objs.append(
                    gw_cls(id=gw[PE.id],
                           converge_gateway_id=gw[PE.converge_gateway_id],
                           name=gw[PE.name]))
            elif gw[PE.type] in [PE.ExclusiveGateway, PE.ConvergeGateway]:
                gateway_objs.append(gw_cls(id=gw[PE.id], name=gw[PE.name]))
            else:
                raise exceptions.FlowTypeError(u"Unknown Gateway type: %s" %
                                               gw[PE.type])

        flow_objs_dict = {}
        for fl in flows.values():
            flow_nodes = act_objs + gateway_objs
            if fl[PE.source] == start[PE.id]:
                source = start_event
            else:
                source = filter(lambda x: x.id == fl[PE.source], flow_nodes)[0]
            if fl[PE.target] == end[PE.id]:
                target = end_event
            else:
                target = filter(lambda x: x.id == fl[PE.target], flow_nodes)[0]
            flow_objs_dict[fl[PE.id]] = base.SequenceFlow(
                fl[PE.id], source, target)
        flow_objs = flow_objs_dict.values()

        # add incoming and outgoing flow to acts
        if not isinstance(start[PE.outgoing], list):
            start[PE.outgoing] = [start[PE.outgoing]]
        for outgoing_id in start[PE.outgoing]:
            start_event.outgoing.add_flow(flow_objs_dict[outgoing_id])

        if not isinstance(end[PE.incoming], list):
            end[PE.incoming] = [end[PE.incoming]]
        for incoming_id in end[PE.incoming]:
            end_event.incoming.add_flow(flow_objs_dict[incoming_id])

        for act in act_objs:
            incoming = acts[act.id][PE.incoming]
            if isinstance(incoming, list):
                for s in incoming:
                    act.incoming.add_flow(flow_objs_dict[s])
            else:
                act.incoming.add_flow(flow_objs_dict[incoming])

            act.outgoing.add_flow(flow_objs_dict[acts[act.id][PE.outgoing]])

        for gw in gateway_objs:
            if isinstance(gw, gateway.ExclusiveGateway):
                for flow_id, con in gateways[gw.id][PE.conditions].items():
                    con_obj = gateway.Condition(
                        con[PE.evaluate],
                        flow_objs_dict[flow_id],
                    )
                    gw.add_condition(con_obj)
                gw.incoming.add_flow(
                    flow_objs_dict[gateways[gw.id][PE.incoming]])
                for outgoing_id in gateways[gw.id][PE.outgoing]:
                    gw.outgoing.add_flow(flow_objs_dict[outgoing_id])

            elif isinstance(gw, gateway.ParallelGateway):
                gw.incoming.add_flow(
                    flow_objs_dict[gateways[gw.id][PE.incoming]])
                for outgoing_id in gateways[gw.id][PE.outgoing]:
                    gw.outgoing.add_flow(flow_objs_dict[outgoing_id])

            elif isinstance(gw, gateway.ConvergeGateway):
                for incoming_id in gateways[gw.id][PE.incoming]:
                    gw.incoming.add_flow(flow_objs_dict[incoming_id])
                gw.outgoing.add_flow(
                    flow_objs_dict[gateways[gw.id][PE.outgoing]])

            else:
                raise exceptions.FlowTypeError(u"Unknown Gateway type: %s" %
                                               type(gw))

        pipeline_data = DataObject(pipeline_data)
        pipeline_spec = PipelineSpec(start_event, end_event, flow_objs,
                                     act_objs, gateway_objs, pipeline_data,
                                     context)
        return Pipeline(self.pipeline_tree[PE.id], pipeline_spec)
예제 #20
0
def get_exclusive_gateway_pipeline(result):
    start_event_a_id = node_uniqid()
    act_b_id = node_uniqid()
    gateway_c_id = node_uniqid()
    act_d_id = node_uniqid()
    act_e_id = node_uniqid()
    act_f_id = node_uniqid()
    gateway_g_id = node_uniqid()
    end_event_h_id = node_uniqid()

    start_event_a = EmptyStartEvent(start_event_a_id)
    b_act = ServiceActivity(act_b_id,
                            service=EchoService(),
                            data=DataObject({'a': 3}))
    c_gateway = ExclusiveGateway(gateway_c_id, gateway_g_id)
    d_act = ServiceActivity(act_d_id,
                            service=LogService(),
                            data=DataObject({'node': 'd'}))
    e_act = ServiceActivity(act_e_id,
                            service=EchoService(),
                            data=DataObject({'node': 'e'}))
    f_act = ServiceActivity(act_f_id,
                            service=TestService(),
                            data=DataObject({'node': 'f'}))
    g_gateway = ConvergeGateway(gateway_g_id)
    end_event_h = EmptyEndEvent(end_event_h_id)

    ab_flow = SequenceFlow('ab', start_event_a, b_act)
    bc_flow = SequenceFlow('bc', b_act, c_gateway)
    cd_flow = SequenceFlow('cd', c_gateway, d_act)
    ce_flow = SequenceFlow('ce', c_gateway, e_act)
    cf_flow = SequenceFlow('cf', c_gateway, f_act)
    dg_flow = SequenceFlow('dg', d_act, g_gateway)
    eg_flow = SequenceFlow('eg', e_act, g_gateway)
    fg_flow = SequenceFlow('fg', f_act, g_gateway)
    gh_flow = SequenceFlow('gh', g_gateway, end_event_h)

    start_event_a.outgoing.add_flow(ab_flow)
    b_act.incoming.add_flow(ab_flow)
    b_act.outgoing.add_flow(bc_flow)
    c_gateway.incoming.add_flow(bc_flow)
    c_gateway.outgoing.add_flow(cd_flow)
    c_gateway.outgoing.add_flow(ce_flow)
    c_gateway.outgoing.add_flow(cf_flow)
    d_act.incoming.add_flow(cd_flow)
    d_act.outgoing.add_flow(dg_flow)
    e_act.incoming.add_flow(ce_flow)
    e_act.outgoing.add_flow(eg_flow)
    f_act.incoming.add_flow(cf_flow)
    f_act.outgoing.add_flow(fg_flow)
    g_gateway.incoming.add_flow(dg_flow)
    g_gateway.incoming.add_flow(eg_flow)
    g_gateway.incoming.add_flow(fg_flow)
    g_gateway.outgoing.add_flow(gh_flow)
    end_event_h.incoming.add_flow(gh_flow)

    c_gateway.add_condition(Condition('result == 1', cd_flow))
    c_gateway.add_condition(Condition('result == 2', ce_flow))
    c_gateway.add_condition(Condition('result == 3', cf_flow))
    spec = PipelineSpec(start_event_a,
                        end_event_h, [
                            ab_flow, bc_flow, cd_flow, ce_flow, cf_flow,
                            dg_flow, eg_flow, fg_flow, gh_flow
                        ], [b_act, d_act, e_act, f_act],
                        [c_gateway, g_gateway],
                        data=DataObject({}),
                        context=context.Context(act_outputs={},
                                                scope={'result': result}))
    return Pipeline(node_uniqid(), spec)
예제 #21
0
def get_parallel_gateway_test_pipeline():
    start_event_a_id = node_uniqid()
    gateway_b_id = node_uniqid()
    act_c_id = node_uniqid()
    act_d_id = node_uniqid()
    act_e_id = node_uniqid()
    act_f_id = node_uniqid()
    act_g_id = node_uniqid()
    gateway_h_id = node_uniqid()
    end_event_i_id = node_uniqid()

    start_event_a = EmptyStartEvent(start_event_a_id)
    gateway_b = ParallelGateway(gateway_b_id, gateway_h_id)
    act_c = ServiceActivity(act_c_id,
                            service=SleepService(),
                            data=DataObject({'data': '1'}))
    act_d = ServiceActivity(act_d_id,
                            service=SleepService(),
                            data=DataObject({'data': '1'}))
    act_e = ServiceActivity(act_e_id,
                            service=SleepService(),
                            data=DataObject({'data': '1'}))
    act_f = ServiceActivity(act_f_id,
                            service=SleepService(),
                            data=DataObject({'node_1': 'd'}))
    act_g = ServiceActivity(act_g_id,
                            service=SleepService(),
                            data=DataObject({'node_2': 'd'}))
    gateway_h = ConvergeGateway(gateway_h_id)
    end_event_i = EmptyEndEvent(end_event_i_id)

    flow_ab = SequenceFlow('ab', start_event_a, gateway_b)

    flow_bc = SequenceFlow('bc', gateway_b, act_c)
    flow_bd = SequenceFlow('bd', gateway_b, act_d)
    flow_be = SequenceFlow('be', gateway_b, act_e)

    flow_cf = SequenceFlow('cf', act_c, act_f)
    flow_dg = SequenceFlow('dg', act_d, act_g)

    flow_fh = SequenceFlow('fh', act_f, gateway_h)
    flow_gh = SequenceFlow('gh', act_g, gateway_h)
    flow_eh = SequenceFlow('eh', act_e, gateway_h)

    flow_hi = SequenceFlow('hi', gateway_h, end_event_i)

    start_event_a.outgoing.add_flow(flow_ab)
    gateway_b.incoming.add_flow(flow_ab)

    gateway_b.outgoing.add_flow(flow_bc)
    gateway_b.outgoing.add_flow(flow_bd)
    gateway_b.outgoing.add_flow(flow_be)
    act_c.incoming.add_flow(flow_bc)
    act_d.incoming.add_flow(flow_bd)
    act_e.incoming.add_flow(flow_be)

    act_c.outgoing.add_flow(flow_cf)
    act_d.outgoing.add_flow(flow_dg)
    act_e.outgoing.add_flow(flow_eh)

    act_f.incoming.add_flow(flow_cf)
    act_g.incoming.add_flow(flow_dg)
    act_f.outgoing.add_flow(flow_fh)
    act_g.outgoing.add_flow(flow_gh)

    gateway_h.incoming.add_flow(flow_fh)
    gateway_h.incoming.add_flow(flow_gh)
    gateway_h.incoming.add_flow(flow_eh)
    gateway_h.outgoing.add_flow(flow_hi)

    end_event_i.incoming.add_flow(flow_hi)

    spec = PipelineSpec(
        start_event_a,
        end_event_i, [
            flow_ab, flow_bc, flow_bd, flow_be, flow_cf, flow_dg, flow_fh,
            flow_gh, flow_eh
        ], [act_c, act_d, act_e, act_f, act_g], [gateway_b, gateway_h],
        data=DataObject({}),
        context=context.Context(act_outputs={
            act_f_id: {
                'node_1': 'node_1_heihei'
            },
            act_d_id: {
                'data': 'data_haha'
            }
        },
                                output_key=['node_1_heihei', 'data_haha']))
    return Pipeline(node_uniqid(), spec)
예제 #22
0
    def _parse(self,
               root_pipeline_data=None,
               root_pipeline_params=None,
               params=None,
               is_subprocess=False,
               parent_context=None):
        """
        @summary: parse pipeline and subprocess recursively
        @param root_pipeline_data: root data from root pipeline parsing, witch will be passed to subprocess recursively
        @param root_pipeline_params: params from root pipeline for all subprocess
        @param params: params from parent for son subprocess
        @param is_subprocess: whither is subprocess
        @param parent_context: parent context for activity of subprocess to resolving inputs
        @return: Pipeline object
        """
        if root_pipeline_data is None:
            root_pipeline_data = {}
        if root_pipeline_params is None:
            root_pipeline_params = {}
        if params is None:
            params = {}

        pipeline_inputs = self.pipeline_tree[PE.data][PE.inputs]
        classification = classify_inputs(pipeline_inputs, params,
                                         is_subprocess, root_pipeline_params)

        output_keys = self.pipeline_tree[PE.data][PE.outputs]
        context = Context(classification['act_outputs'], output_keys)
        for key, info in classification['scope_info'].items():
            var = get_variable(key, info, context, root_pipeline_data)
            context.set_global_var(key, var)

        pipeline_data = deepcopy(root_pipeline_data)
        if is_subprocess:
            if parent_context is None:
                raise exceptions.DataTypeErrorException(
                    'parent context of subprocess cannot be none')
            for key, info in classification['subprocess_params'].items():
                var = get_variable(key, info, parent_context, pipeline_data)
                pipeline_data.update({key: var})

        start = self.pipeline_tree[PE.start_event]
        start_cls = FlowNodeClsFactory.get_node_cls(start[PE.type])
        start_event = start_cls(id=start[PE.id], name=start[PE.name])

        end = self.pipeline_tree[PE.end_event]
        end_cls = FlowNodeClsFactory.get_node_cls(end[PE.type])
        end_event = end_cls(id=end[PE.id],
                            name=end[PE.name],
                            data=DataObject({}))

        acts = self.pipeline_tree[PE.activities]
        act_objs = []
        for act in acts.values():
            act_cls = FlowNodeClsFactory.get_node_cls(act[PE.type])
            if act[PE.type] == PE.ServiceActivity:
                component = ComponentLibrary.get_component(
                    act[PE.component][PE.code], act[PE.component][PE.inputs])
                service = component.service()
                data = component.data_for_execution(context, pipeline_data)
                handler_path = act.get('failure_handler')
                failure_handler = import_string(
                    handler_path) if handler_path else None
                act_objs.append(
                    act_cls(id=act[PE.id],
                            service=service,
                            name=act[PE.name],
                            data=data,
                            error_ignorable=act.get(PE.error_ignorable, False),
                            skippable=act.get(PE.skippable)
                            or act.get(PE.skippable_old, True),
                            retryable=act.get(PE.retryable)
                            or act.get(PE.retryable_old, True),
                            timeout=act.get(PE.timeout),
                            failure_handler=failure_handler))
            elif act[PE.type] == PE.SubProcess:
                sub_tree = act[PE.pipeline]
                params = act[PE.params]
                sub_parser = PipelineParser(pipeline_tree=sub_tree)
                act_objs.append(
                    act_cls(id=act[PE.id],
                            pipeline=sub_parser._parse(
                                root_pipeline_data=root_pipeline_data,
                                root_pipeline_params=root_pipeline_params,
                                params=params,
                                is_subprocess=True,
                                parent_context=context),
                            name=act[PE.name]))
            else:
                raise exceptions.FlowTypeError(u"Unknown Activity type: %s" %
                                               act[PE.type])

        gateways = self.pipeline_tree[PE.gateways]
        flows = self.pipeline_tree[PE.flows]
        gateway_objs = []
        for gw in gateways.values():
            gw_cls = FlowNodeClsFactory.get_node_cls(gw[PE.type])
            if gw[PE.type] in {
                    PE.ParallelGateway, PE.ConditionalParallelGateway
            }:
                gateway_objs.append(
                    gw_cls(id=gw[PE.id],
                           converge_gateway_id=gw[PE.converge_gateway_id],
                           name=gw[PE.name]))
            elif gw[PE.type] in {PE.ExclusiveGateway, PE.ConvergeGateway}:
                gateway_objs.append(gw_cls(id=gw[PE.id], name=gw[PE.name]))
            else:
                raise exceptions.FlowTypeError(u"Unknown Gateway type: %s" %
                                               gw[PE.type])

        flow_objs_dict = {}
        for fl in flows.values():
            flow_nodes = act_objs + gateway_objs
            if fl[PE.source] == start[PE.id]:
                source = start_event
            else:
                source = filter(lambda x: x.id == fl[PE.source], flow_nodes)[0]
            if fl[PE.target] == end[PE.id]:
                target = end_event
            else:
                target = filter(lambda x: x.id == fl[PE.target], flow_nodes)[0]
            flow_objs_dict[fl[PE.id]] = SequenceFlow(fl[PE.id], source, target)
        flow_objs = flow_objs_dict.values()

        # add incoming and outgoing flow to acts
        if not isinstance(start[PE.outgoing], list):
            start[PE.outgoing] = [start[PE.outgoing]]
        for outgoing_id in start[PE.outgoing]:
            start_event.outgoing.add_flow(flow_objs_dict[outgoing_id])

        if not isinstance(end[PE.incoming], list):
            end[PE.incoming] = [end[PE.incoming]]
        for incoming_id in end[PE.incoming]:
            end_event.incoming.add_flow(flow_objs_dict[incoming_id])

        for act in act_objs:
            incoming = acts[act.id][PE.incoming]
            if isinstance(incoming, list):
                for s in incoming:
                    act.incoming.add_flow(flow_objs_dict[s])
            else:
                act.incoming.add_flow(flow_objs_dict[incoming])

            act.outgoing.add_flow(flow_objs_dict[acts[act.id][PE.outgoing]])

        for gw in gateway_objs:
            if isinstance(gw, ExclusiveGateway) or isinstance(
                    gw, ConditionalParallelGateway):
                for flow_id, con in gateways[gw.id][PE.conditions].items():
                    con_obj = Condition(con[PE.evaluate],
                                        flow_objs_dict[flow_id])
                    gw.add_condition(con_obj)

                if isinstance(gateways[gw.id][PE.incoming], list):
                    for incoming_id in gateways[gw.id][PE.incoming]:
                        gw.incoming.add_flow(flow_objs_dict[incoming_id])
                else:
                    gw.incoming.add_flow(
                        flow_objs_dict[gateways[gw.id][PE.incoming]])

                for outgoing_id in gateways[gw.id][PE.outgoing]:
                    gw.outgoing.add_flow(flow_objs_dict[outgoing_id])

            elif isinstance(gw, ParallelGateway):
                if isinstance(gateways[gw.id][PE.incoming], list):
                    for incoming_id in gateways[gw.id][PE.incoming]:
                        gw.incoming.add_flow(flow_objs_dict[incoming_id])
                else:
                    gw.incoming.add_flow(
                        flow_objs_dict[gateways[gw.id][PE.incoming]])

                for outgoing_id in gateways[gw.id][PE.outgoing]:
                    gw.outgoing.add_flow(flow_objs_dict[outgoing_id])

            elif isinstance(gw, ConvergeGateway):
                for incoming_id in gateways[gw.id][PE.incoming]:
                    gw.incoming.add_flow(flow_objs_dict[incoming_id])
                gw.outgoing.add_flow(
                    flow_objs_dict[gateways[gw.id][PE.outgoing]])

            else:
                raise exceptions.FlowTypeError(u"Unknown Gateway type: %s" %
                                               type(gw))

        context.duplicate_variables()
        pipeline_data = DataObject(pipeline_data)
        pipeline_spec = PipelineSpec(start_event, end_event, flow_objs,
                                     act_objs, gateway_objs, pipeline_data,
                                     context)
        return Pipeline(self.pipeline_tree[PE.id], pipeline_spec)
예제 #23
0
    def parser(self, root_pipeline_data=None):
        if root_pipeline_data is None:
            root_pipeline_data = {}

        pipeline_inputs = self.pipeline_tree['data']['inputs']
        act_outputs = {}
        scope_info = {}
        for key, info in pipeline_inputs.iteritems():
            if info.get('source_act'):
                act_outputs.setdefault(info['source_act'],
                                       {}).update({info['source_key']: key})
            else:
                scope_info.update({key: info})
        output_keys = self.pipeline_tree['data']['outputs'].keys()
        context = Context(act_outputs, output_keys)
        for key, info in scope_info.iteritems():
            value = get_variable(key, info, context, root_pipeline_data)
            context.set_global_var(key, value)

        start = self.pipeline_tree['start_event']
        start_cls = getattr(event, start['type'])
        start_event = start_cls(id=start['id'],
                                name=start['name'])

        end = self.pipeline_tree['end_event']
        end_cls = getattr(event, end['type'])
        end_event = end_cls(id=end['id'],
                            name=end['name'])

        acts = self.pipeline_tree['activities']
        act_objs = []
        for act in acts.values():
            act_cls = getattr(activity, act['type'])
            if act['type'] == 'ServiceActivity':
                component = ComponentLibrary.get_component(
                    act['component']['code'], act['component']['inputs']
                )
                service = component.service()
                data = component.data_for_execution(context, root_pipeline_data)
                act_objs.append(act_cls(id=act['id'],
                                        service=service,
                                        name=act['name'],
                                        data=data,
                                        error_ignorable=act.get('error_ignorable', False)))
            elif act['type'] == 'SubProcess':
                pipeline_info = act['pipeline']
                sub_parser = PipelineParser(pipeline_info)
                act_objs.append(act_cls(id=act['id'],
                                        pipeline=sub_parser.parser(root_pipeline_data),
                                        name=act['name']))
            else:
                raise exceptions.FlowTypeError(u"Unknown Activity type: %s" %
                                               act['type'])

        gateways = self.pipeline_tree['gateways']
        flows = self.pipeline_tree['flows']
        gateway_objs = []
        for gw in gateways.values():
            gw_cls = getattr(gateway, gw['type'])
            if gw['type'] in ['ParallelGateway']:
                gateway_objs.append(
                    gw_cls(id=gw['id'],
                           converge_gateway_id=gw['converge_gateway_id'],
                           name=gw['name']))
            elif gw['type'] in ['ExclusiveGateway', 'ConvergeGateway']:
                gateway_objs.append(gw_cls(id=gw['id'],
                                           name=gw['name']))
            else:
                raise exceptions.FlowTypeError(u"Unknown Gateway type: %s" %
                                               gw['type'])

        flow_objs_dict = {}
        for fl in flows.values():
            flow_nodes = act_objs + gateway_objs
            if fl['source'] == start['id']:
                source = start_event
            else:
                source = filter(lambda x: x.id == fl['source'], flow_nodes)[0]
            if fl['target'] == end['id']:
                target = end_event
            else:
                target = filter(lambda x: x.id == fl['target'], flow_nodes)[0]
            flow_objs_dict[fl['id']] = base.SequenceFlow(fl['id'],
                                                         source,
                                                         target)
        flow_objs = flow_objs_dict.values()

        # add incoming and outgoing flow to acts
        if not isinstance(start['outgoing'], list):
            start['outgoing'] = [start['outgoing']]
        for outgoing_id in start['outgoing']:
            start_event.outgoing.add_flow(flow_objs_dict[outgoing_id])

        if not isinstance(end['incoming'], list):
            end['incoming'] = [end['incoming']]
        for incoming_id in end['incoming']:
            end_event.incoming.add_flow(flow_objs_dict[incoming_id])

        for act in act_objs:
            act.incoming.add_flow(flow_objs_dict[acts[act.id]['incoming']])
            act.outgoing.add_flow(flow_objs_dict[acts[act.id]['outgoing']])

        for gw in gateway_objs:
            if isinstance(gw, gateway.ExclusiveGateway):
                for flow_id, con in gateways[gw.id]['conditions'].iteritems():
                    con_obj = gateway.Condition(
                        con['evaluate'],
                        flow_objs_dict[flow_id],
                    )
                    gw.add_condition(con_obj)
                gw.incoming.add_flow(
                    flow_objs_dict[gateways[gw.id]['incoming']]
                )
                for outgoing_id in gateways[gw.id]['outgoing']:
                    gw.outgoing.add_flow(flow_objs_dict[outgoing_id])

            elif isinstance(gw, gateway.ParallelGateway):
                gw.incoming.add_flow(
                    flow_objs_dict[gateways[gw.id]['incoming']]
                )
                for outgoing_id in gateways[gw.id]['outgoing']:
                    gw.outgoing.add_flow(flow_objs_dict[outgoing_id])

            elif isinstance(gw, gateway.ConvergeGateway):
                for incoming_id in gateways[gw.id]['incoming']:
                    gw.incoming.add_flow(flow_objs_dict[incoming_id])
                gw.outgoing.add_flow(
                    flow_objs_dict[gateways[gw.id]['outgoing']]
                )

            else:
                raise exceptions.FlowTypeError(u"Unknown Gateway type: %s" %
                                               type(gw))

        root_pipeline_data = DataObject(root_pipeline_data)
        pipeline_spec = PipelineSpec(start_event, end_event, flow_objs,
                                     act_objs, gateway_objs, root_pipeline_data,
                                     context)
        return Pipeline(self.pipeline_tree['id'], pipeline_spec)
import logging
import mock

from django.test import TestCase
from mock import MagicMock

from pipeline.core.data.base import DataObject

from pipeline_plugins.components.collections.sites.open.job.base import GetJobHistoryResultMixin

TEST_INPUTS = {
    "job_success_id": 12345,
    "biz_cc_id": 11111,
    "executor": "executor"
}
TEST_DATA = DataObject(TEST_INPUTS)
TEST_PARENT_DATA = DataObject(TEST_INPUTS)

logger = logging.getLogger("component")

GET_CLIENT_BY_USER = "******"
GET_JOB_INSTANCE_URL = "pipeline_plugins.components.collections.sites.open.job.base.get_job_instance_status"
GET_JOB_STATUS_RETURN = {
    "result": True,
    "code": 0,
    "message": "",
    "data": {
        "finished":
        True,
        "job_instance": {
            "job_instance_id": 100,
예제 #25
0
파일: test.py 프로젝트: zhengxj91/bk-sops
    def test_component(self):
        component = self._component_cls({})

        for no, case in enumerate(self._cases):
            try:

                patchers = [patcher.mock_patcher() for patcher in case.patchers]

                with patch_context(patchers):

                    bound_service = component.service()

                    setattr(bound_service, 'id', case.service_id)

                    data = DataObject(inputs=case.inputs)
                    parent_data = DataObject(inputs=case.parent_data)

                    # execute result check
                    do_continue = self._do_case_assert(service=bound_service,
                                                       method='execute',
                                                       args=(data, parent_data),
                                                       assertion=case.execute_assertion,
                                                       no=no,
                                                       name=case.name)

                    for call_assertion in case.execute_call_assertion:
                        self._do_call_assertion(name=case.name,
                                                no=no,
                                                assertion=call_assertion)

                    if do_continue:
                        self.case_pass(case)
                        continue

                    if bound_service.need_schedule():

                        if bound_service.interval is None:
                            # callback case
                            self._do_case_assert(service=bound_service,
                                                 method='schedule',
                                                 args=(data, parent_data, case.schedule_assertion.callback_data),
                                                 assertion=case.schedule_assertion,
                                                 no=no,
                                                 name=case.name)

                        else:
                            # schedule case
                            assertions = case.schedule_assertion
                            assertions = assertions if isinstance(assertions, list) else [assertions]

                            for assertion in assertions:
                                do_continue = self._do_case_assert(service=bound_service,
                                                                   method='schedule',
                                                                   args=(data, parent_data),
                                                                   assertion=assertion,
                                                                   no=no,
                                                                   name=case.name)

                                self.assertEqual(assertion.schedule_finished,
                                                 bound_service.is_schedule_finished(),
                                                 msg=self._format_failure_message(
                                                     no=no,
                                                     name=case.name,
                                                     msg='schedule_finished assertion failed:'
                                                         '\nexpected: {expected}\nactual: {actual}'.format(
                                                         expected=assertion.schedule_finished,  # noqa
                                                         actual=bound_service.is_schedule_finished())))  # noqa

                                if do_continue:
                                    break

                        for call_assertion in case.schedule_call_assertion:
                            self._do_call_assertion(name=case.name,
                                                    no=no,
                                                    assertion=call_assertion)

                    self.case_pass(case)

            except Exception:
                self.case_fail(case)
                logger.error(traceback.format_exc())