示例#1
0
文件: base.py 项目: lvtu0316/bk-sops
 def __init__(self, inputs, outputs=None):
     if not isinstance(inputs, dict):
         raise exceptions.DataTypeErrorException('inputs is not dict')
     self.inputs = inputs
     if outputs is None:
         outputs = {}
     if not isinstance(outputs, dict):
         raise exceptions.DataTypeErrorException('outputs is not dict')
     self.outputs = outputs
示例#2
0
def get_variable(key, info, context, pipeline_data):
    if isinstance(info['value'], Variable):
        variable = info['value']
    else:
        if info.get('type', 'plain') == 'plain':
            variable = PlainVariable(key, info['value'])
        elif info['type'] == 'splice':
            variable = SpliceVariable(key, info['value'], context)
        elif info['type'] == 'lazy':
            variable = library.VariableLibrary.get_var_class(
                info['custom_type'])(key, info['value'], context,
                                     pipeline_data)
        else:
            raise exceptions.DataTypeErrorException(
                'Unknown type: %s, which should be one of [plain, splice, lazy]'
                % info['type'])
    return variable
示例#3
0
文件: base.py 项目: lvtu0316/bk-sops
 def reset_outputs(self, outputs):
     if not isinstance(outputs, dict):
         raise exceptions.DataTypeErrorException('outputs is not dict')
     self.outputs = outputs
     return True
示例#4
0
    def _parse(self,
               root_pipeline_data=None,
               params=None,
               is_subprocess=False,
               parent_context=None):
        if root_pipeline_data is None:
            root_pipeline_data = {}
        if params is None:
            params = {}
        pipeline_data = deepcopy(
            root_pipeline_data) if is_subprocess else root_pipeline_data

        pipeline_inputs = self.pipeline_tree[PE.data][PE.inputs]
        act_outputs = {}
        scope_info = {}
        process_params = {}
        for key, info in pipeline_inputs.items():
            if info.get(PE.source_act):
                act_outputs.setdefault(info[PE.source_act],
                                       {}).update({info[PE.source_key]: key})
                continue

            if info.get(PE.is_param, False):
                info = params.get(key, info)

            if is_subprocess:
                process_params.update({key: info})
                continue

            scope_info.update({key: info})

        output_keys = self.pipeline_tree[PE.data][PE.outputs].keys()
        context = Context(act_outputs, output_keys)
        for key, info in scope_info.items():
            var = get_variable(key, info, context, pipeline_data)
            context.set_global_var(key, var)

        if is_subprocess:
            if parent_context is None:
                raise exceptions.DataTypeErrorException(
                    'parent context of subprocess cannot be none')
            for key, info in process_params.items():
                var = get_variable(key, info, parent_context, pipeline_data)
                pipeline_data.update({key: var})

        start = self.pipeline_tree[PE.start_event]
        start_cls = getattr(event, start[PE.type])
        start_event = start_cls(id=start[PE.id], name=start[PE.name])

        end = self.pipeline_tree[PE.end_event]
        end_cls = getattr(event, end[PE.type])
        end_event = end_cls(id=end[PE.id], name=end[PE.name])

        acts = self.pipeline_tree[PE.activities]
        act_objs = []
        for act in acts.values():
            act_cls = getattr(activity, act[PE.type])
            if act[PE.type] == PE.ServiceActivity:
                component = ComponentLibrary.get_component(
                    act[PE.component][PE.code], act[PE.component][PE.inputs])
                service = component.service()
                data = component.data_for_execution(context, pipeline_data)
                act_objs.append(
                    act_cls(id=act[PE.id],
                            service=service,
                            name=act[PE.name],
                            data=data,
                            error_ignorable=act.get(PE.error_ignorable, False),
                            skippable=act.get(PE.skippable, True),
                            can_retry=act.get(PE.can_retry, True),
                            timeout=act.get(PE.timeout)))
            elif act[PE.type] == PE.SubProcess:
                sub_tree = act[PE.pipeline]
                params = act[PE.params]
                sub_parser = PipelineParser(pipeline_tree=sub_tree)
                act_objs.append(
                    act_cls(id=act[PE.id],
                            pipeline=sub_parser._parse(
                                root_pipeline_data=root_pipeline_data,
                                params=params,
                                is_subprocess=True,
                                parent_context=context),
                            name=act[PE.name]))
            elif act[PE.type] == PE.LoopServiceActivity:
                act_cls = getattr(activity, act[PE.type])
                component = ComponentLibrary.get_component(
                    act[PE.component][PE.code], act[PE.component][PE.inputs])
                service = component.service()
                data = component.data_for_execution(context, pipeline_data)
                act_objs.append(
                    act_cls(id=act[PE.id],
                            service=service,
                            name=act[PE.name],
                            data=data,
                            error_ignorable=act.get(PE.error_ignorable, False),
                            loop_times=act[PE.loop_times]))
            else:
                raise exceptions.FlowTypeError(u"Unknown Activity type: %s" %
                                               act[PE.type])

        gateways = self.pipeline_tree[PE.gateways]
        flows = self.pipeline_tree[PE.flows]
        gateway_objs = []
        for gw in gateways.values():
            gw_cls = getattr(gateway, gw[PE.type])
            if gw[PE.type] in [PE.ParallelGateway]:
                gateway_objs.append(
                    gw_cls(id=gw[PE.id],
                           converge_gateway_id=gw[PE.converge_gateway_id],
                           name=gw[PE.name]))
            elif gw[PE.type] in [PE.ExclusiveGateway, PE.ConvergeGateway]:
                gateway_objs.append(gw_cls(id=gw[PE.id], name=gw[PE.name]))
            else:
                raise exceptions.FlowTypeError(u"Unknown Gateway type: %s" %
                                               gw[PE.type])

        flow_objs_dict = {}
        for fl in flows.values():
            flow_nodes = act_objs + gateway_objs
            if fl[PE.source] == start[PE.id]:
                source = start_event
            else:
                source = filter(lambda x: x.id == fl[PE.source], flow_nodes)[0]
            if fl[PE.target] == end[PE.id]:
                target = end_event
            else:
                target = filter(lambda x: x.id == fl[PE.target], flow_nodes)[0]
            flow_objs_dict[fl[PE.id]] = base.SequenceFlow(
                fl[PE.id], source, target)
        flow_objs = flow_objs_dict.values()

        # add incoming and outgoing flow to acts
        if not isinstance(start[PE.outgoing], list):
            start[PE.outgoing] = [start[PE.outgoing]]
        for outgoing_id in start[PE.outgoing]:
            start_event.outgoing.add_flow(flow_objs_dict[outgoing_id])

        if not isinstance(end[PE.incoming], list):
            end[PE.incoming] = [end[PE.incoming]]
        for incoming_id in end[PE.incoming]:
            end_event.incoming.add_flow(flow_objs_dict[incoming_id])

        for act in act_objs:
            incoming = acts[act.id][PE.incoming]
            if isinstance(incoming, list):
                for s in incoming:
                    act.incoming.add_flow(flow_objs_dict[s])
            else:
                act.incoming.add_flow(flow_objs_dict[incoming])

            act.outgoing.add_flow(flow_objs_dict[acts[act.id][PE.outgoing]])

        for gw in gateway_objs:
            if isinstance(gw, gateway.ExclusiveGateway):
                for flow_id, con in gateways[gw.id][PE.conditions].items():
                    con_obj = gateway.Condition(
                        con[PE.evaluate],
                        flow_objs_dict[flow_id],
                    )
                    gw.add_condition(con_obj)
                gw.incoming.add_flow(
                    flow_objs_dict[gateways[gw.id][PE.incoming]])
                for outgoing_id in gateways[gw.id][PE.outgoing]:
                    gw.outgoing.add_flow(flow_objs_dict[outgoing_id])

            elif isinstance(gw, gateway.ParallelGateway):
                gw.incoming.add_flow(
                    flow_objs_dict[gateways[gw.id][PE.incoming]])
                for outgoing_id in gateways[gw.id][PE.outgoing]:
                    gw.outgoing.add_flow(flow_objs_dict[outgoing_id])

            elif isinstance(gw, gateway.ConvergeGateway):
                for incoming_id in gateways[gw.id][PE.incoming]:
                    gw.incoming.add_flow(flow_objs_dict[incoming_id])
                gw.outgoing.add_flow(
                    flow_objs_dict[gateways[gw.id][PE.outgoing]])

            else:
                raise exceptions.FlowTypeError(u"Unknown Gateway type: %s" %
                                               type(gw))

        pipeline_data = DataObject(pipeline_data)
        pipeline_spec = PipelineSpec(start_event, end_event, flow_objs,
                                     act_objs, gateway_objs, pipeline_data,
                                     context)
        return Pipeline(self.pipeline_tree[PE.id], pipeline_spec)
示例#5
0
    def _parse(self,
               root_pipeline_data=None,
               root_pipeline_params=None,
               params=None,
               is_subprocess=False,
               parent_context=None):
        """
        @summary: parse pipeline and subprocess recursively
        @param root_pipeline_data: root data from root pipeline parsing, witch will be passed to subprocess recursively
        @param root_pipeline_params: params from root pipeline for all subprocess
        @param params: params from parent for son subprocess
        @param is_subprocess: whither is subprocess
        @param parent_context: parent context for activity of subprocess to resolving inputs
        @return: Pipeline object
        """
        if root_pipeline_data is None:
            root_pipeline_data = {}
        if root_pipeline_params is None:
            root_pipeline_params = {}
        if params is None:
            params = {}

        pipeline_inputs = self.pipeline_tree[PE.data][PE.inputs]
        classification = classify_inputs(pipeline_inputs, params,
                                         is_subprocess, root_pipeline_params)

        output_keys = self.pipeline_tree[PE.data][PE.outputs]
        context = Context(classification['act_outputs'], output_keys)
        for key, info in classification['scope_info'].items():
            var = get_variable(key, info, context, root_pipeline_data)
            context.set_global_var(key, var)

        pipeline_data = deepcopy(root_pipeline_data)
        if is_subprocess:
            if parent_context is None:
                raise exceptions.DataTypeErrorException(
                    'parent context of subprocess cannot be none')
            for key, info in classification['subprocess_params'].items():
                var = get_variable(key, info, parent_context, pipeline_data)
                pipeline_data.update({key: var})

        start = self.pipeline_tree[PE.start_event]
        start_cls = FlowNodeClsFactory.get_node_cls(start[PE.type])
        start_event = start_cls(id=start[PE.id], name=start[PE.name])

        end = self.pipeline_tree[PE.end_event]
        end_cls = FlowNodeClsFactory.get_node_cls(end[PE.type])
        end_event = end_cls(id=end[PE.id],
                            name=end[PE.name],
                            data=DataObject({}))

        acts = self.pipeline_tree[PE.activities]
        act_objs = []
        for act in acts.values():
            act_cls = FlowNodeClsFactory.get_node_cls(act[PE.type])
            if act[PE.type] == PE.ServiceActivity:
                component = ComponentLibrary.get_component(
                    act[PE.component][PE.code], act[PE.component][PE.inputs])
                service = component.service()
                data = component.data_for_execution(context, pipeline_data)
                handler_path = act.get('failure_handler')
                failure_handler = import_string(
                    handler_path) if handler_path else None
                act_objs.append(
                    act_cls(id=act[PE.id],
                            service=service,
                            name=act[PE.name],
                            data=data,
                            error_ignorable=act.get(PE.error_ignorable, False),
                            skippable=act.get(PE.skippable)
                            or act.get(PE.skippable_old, True),
                            retryable=act.get(PE.retryable)
                            or act.get(PE.retryable_old, True),
                            timeout=act.get(PE.timeout),
                            failure_handler=failure_handler))
            elif act[PE.type] == PE.SubProcess:
                sub_tree = act[PE.pipeline]
                params = act[PE.params]
                sub_parser = PipelineParser(pipeline_tree=sub_tree)
                act_objs.append(
                    act_cls(id=act[PE.id],
                            pipeline=sub_parser._parse(
                                root_pipeline_data=root_pipeline_data,
                                root_pipeline_params=root_pipeline_params,
                                params=params,
                                is_subprocess=True,
                                parent_context=context),
                            name=act[PE.name]))
            else:
                raise exceptions.FlowTypeError(u"Unknown Activity type: %s" %
                                               act[PE.type])

        gateways = self.pipeline_tree[PE.gateways]
        flows = self.pipeline_tree[PE.flows]
        gateway_objs = []
        for gw in gateways.values():
            gw_cls = FlowNodeClsFactory.get_node_cls(gw[PE.type])
            if gw[PE.type] in {
                    PE.ParallelGateway, PE.ConditionalParallelGateway
            }:
                gateway_objs.append(
                    gw_cls(id=gw[PE.id],
                           converge_gateway_id=gw[PE.converge_gateway_id],
                           name=gw[PE.name]))
            elif gw[PE.type] in {PE.ExclusiveGateway, PE.ConvergeGateway}:
                gateway_objs.append(gw_cls(id=gw[PE.id], name=gw[PE.name]))
            else:
                raise exceptions.FlowTypeError(u"Unknown Gateway type: %s" %
                                               gw[PE.type])

        flow_objs_dict = {}
        for fl in flows.values():
            flow_nodes = act_objs + gateway_objs
            if fl[PE.source] == start[PE.id]:
                source = start_event
            else:
                source = filter(lambda x: x.id == fl[PE.source], flow_nodes)[0]
            if fl[PE.target] == end[PE.id]:
                target = end_event
            else:
                target = filter(lambda x: x.id == fl[PE.target], flow_nodes)[0]
            flow_objs_dict[fl[PE.id]] = SequenceFlow(fl[PE.id], source, target)
        flow_objs = flow_objs_dict.values()

        # add incoming and outgoing flow to acts
        if not isinstance(start[PE.outgoing], list):
            start[PE.outgoing] = [start[PE.outgoing]]
        for outgoing_id in start[PE.outgoing]:
            start_event.outgoing.add_flow(flow_objs_dict[outgoing_id])

        if not isinstance(end[PE.incoming], list):
            end[PE.incoming] = [end[PE.incoming]]
        for incoming_id in end[PE.incoming]:
            end_event.incoming.add_flow(flow_objs_dict[incoming_id])

        for act in act_objs:
            incoming = acts[act.id][PE.incoming]
            if isinstance(incoming, list):
                for s in incoming:
                    act.incoming.add_flow(flow_objs_dict[s])
            else:
                act.incoming.add_flow(flow_objs_dict[incoming])

            act.outgoing.add_flow(flow_objs_dict[acts[act.id][PE.outgoing]])

        for gw in gateway_objs:
            if isinstance(gw, ExclusiveGateway) or isinstance(
                    gw, ConditionalParallelGateway):
                for flow_id, con in gateways[gw.id][PE.conditions].items():
                    con_obj = Condition(con[PE.evaluate],
                                        flow_objs_dict[flow_id])
                    gw.add_condition(con_obj)

                if isinstance(gateways[gw.id][PE.incoming], list):
                    for incoming_id in gateways[gw.id][PE.incoming]:
                        gw.incoming.add_flow(flow_objs_dict[incoming_id])
                else:
                    gw.incoming.add_flow(
                        flow_objs_dict[gateways[gw.id][PE.incoming]])

                for outgoing_id in gateways[gw.id][PE.outgoing]:
                    gw.outgoing.add_flow(flow_objs_dict[outgoing_id])

            elif isinstance(gw, ParallelGateway):
                if isinstance(gateways[gw.id][PE.incoming], list):
                    for incoming_id in gateways[gw.id][PE.incoming]:
                        gw.incoming.add_flow(flow_objs_dict[incoming_id])
                else:
                    gw.incoming.add_flow(
                        flow_objs_dict[gateways[gw.id][PE.incoming]])

                for outgoing_id in gateways[gw.id][PE.outgoing]:
                    gw.outgoing.add_flow(flow_objs_dict[outgoing_id])

            elif isinstance(gw, ConvergeGateway):
                for incoming_id in gateways[gw.id][PE.incoming]:
                    gw.incoming.add_flow(flow_objs_dict[incoming_id])
                gw.outgoing.add_flow(
                    flow_objs_dict[gateways[gw.id][PE.outgoing]])

            else:
                raise exceptions.FlowTypeError(u"Unknown Gateway type: %s" %
                                               type(gw))

        context.duplicate_variables()
        pipeline_data = DataObject(pipeline_data)
        pipeline_spec = PipelineSpec(start_event, end_event, flow_objs,
                                     act_objs, gateway_objs, pipeline_data,
                                     context)
        return Pipeline(self.pipeline_tree[PE.id], pipeline_spec)