def format_web_data_to_pipeline(web_pipeline, is_subprocess=False): """ @summary: @param web_pipeline: @return: """ pipeline_tree = copy.deepcopy(web_pipeline) constants = pipeline_tree.pop('constants') classification = classify_constants(constants, is_subprocess) # 解析隐藏全局变量互引用 pool_obj = ConstantPool(classification['constant_pool']) pre_resolved_constants = pool_obj.pool classification['data_inputs'] = calculate_constants_type( pre_resolved_constants, classification['data_inputs']) classification['data_inputs'] = calculate_constants_type( classification['params'], classification['data_inputs']) pipeline_tree['data'] = { 'inputs': classification['data_inputs'], 'outputs': {key: key for key in pipeline_tree.pop('outputs')}, } for act_id, act in pipeline_tree['activities'].items(): if act['type'] == 'ServiceActivity': act_data = act['component'].pop('data') # for key, info in act_data.items(): # info['value'] = pool_obj.resolve_value(info['value']) all_inputs = calculate_constants_type( act_data, classification['data_inputs']) act['component']['inputs'] = { key: value for key, value in all_inputs.items() if key in act_data } act['component']['global_outputs'] = classification[ 'acts_outputs'].get(act_id, {}) elif act['type'] == 'SubProcess': parent_params = {} act_constants = {} for key, info in act['pipeline']['constants'].items(): act_constants[key] = info if info['show_type'] == 'show': references = ConstantTemplate( info['value']).get_reference() for ref_key in references: formatted_key = format_constant_key(ref_key) if formatted_key in classification['data_inputs']: parent_params[formatted_key] = classification[ 'data_inputs'][formatted_key] act['params'] = parent_params act['pipeline'] = format_web_data_to_pipeline(act['pipeline'], is_subprocess=True) else: raise exceptions.FlowTypeError(u"Unknown Activity type: %s" % act['type']) return pipeline_tree
def _parse(self, root_pipeline_data=None, params=None, is_subprocess=False, parent_context=None): if root_pipeline_data is None: root_pipeline_data = {} if params is None: params = {} pipeline_data = deepcopy( root_pipeline_data) if is_subprocess else root_pipeline_data pipeline_inputs = self.pipeline_tree[PE.data][PE.inputs] act_outputs = {} scope_info = {} process_params = {} for key, info in pipeline_inputs.items(): if info.get(PE.source_act): act_outputs.setdefault(info[PE.source_act], {}).update({info[PE.source_key]: key}) continue if info.get(PE.is_param, False): info = params.get(key, info) if is_subprocess: process_params.update({key: info}) continue scope_info.update({key: info}) output_keys = self.pipeline_tree[PE.data][PE.outputs].keys() context = Context(act_outputs, output_keys) for key, info in scope_info.items(): var = get_variable(key, info, context, pipeline_data) context.set_global_var(key, var) if is_subprocess: if parent_context is None: raise exceptions.DataTypeErrorException( 'parent context of subprocess cannot be none') for key, info in process_params.items(): var = get_variable(key, info, parent_context, pipeline_data) pipeline_data.update({key: var}) start = self.pipeline_tree[PE.start_event] start_cls = getattr(event, start[PE.type]) start_event = start_cls(id=start[PE.id], name=start[PE.name]) end = self.pipeline_tree[PE.end_event] end_cls = getattr(event, end[PE.type]) end_event = end_cls(id=end[PE.id], name=end[PE.name]) acts = self.pipeline_tree[PE.activities] act_objs = [] for act in acts.values(): act_cls = getattr(activity, act[PE.type]) if act[PE.type] == PE.ServiceActivity: component = ComponentLibrary.get_component( act[PE.component][PE.code], act[PE.component][PE.inputs]) service = component.service() data = component.data_for_execution(context, pipeline_data) act_objs.append( act_cls(id=act[PE.id], service=service, name=act[PE.name], data=data, error_ignorable=act.get(PE.error_ignorable, False), skippable=act.get(PE.skippable, True), can_retry=act.get(PE.can_retry, True), timeout=act.get(PE.timeout))) elif act[PE.type] == PE.SubProcess: sub_tree = act[PE.pipeline] params = act[PE.params] sub_parser = PipelineParser(pipeline_tree=sub_tree) act_objs.append( act_cls(id=act[PE.id], pipeline=sub_parser._parse( root_pipeline_data=root_pipeline_data, params=params, is_subprocess=True, parent_context=context), name=act[PE.name])) elif act[PE.type] == PE.LoopServiceActivity: act_cls = getattr(activity, act[PE.type]) component = ComponentLibrary.get_component( act[PE.component][PE.code], act[PE.component][PE.inputs]) service = component.service() data = component.data_for_execution(context, pipeline_data) act_objs.append( act_cls(id=act[PE.id], service=service, name=act[PE.name], data=data, error_ignorable=act.get(PE.error_ignorable, False), loop_times=act[PE.loop_times])) else: raise exceptions.FlowTypeError(u"Unknown Activity type: %s" % act[PE.type]) gateways = self.pipeline_tree[PE.gateways] flows = self.pipeline_tree[PE.flows] gateway_objs = [] for gw in gateways.values(): gw_cls = getattr(gateway, gw[PE.type]) if gw[PE.type] in [PE.ParallelGateway]: gateway_objs.append( gw_cls(id=gw[PE.id], converge_gateway_id=gw[PE.converge_gateway_id], name=gw[PE.name])) elif gw[PE.type] in [PE.ExclusiveGateway, PE.ConvergeGateway]: gateway_objs.append(gw_cls(id=gw[PE.id], name=gw[PE.name])) else: raise exceptions.FlowTypeError(u"Unknown Gateway type: %s" % gw[PE.type]) flow_objs_dict = {} for fl in flows.values(): flow_nodes = act_objs + gateway_objs if fl[PE.source] == start[PE.id]: source = start_event else: source = filter(lambda x: x.id == fl[PE.source], flow_nodes)[0] if fl[PE.target] == end[PE.id]: target = end_event else: target = filter(lambda x: x.id == fl[PE.target], flow_nodes)[0] flow_objs_dict[fl[PE.id]] = base.SequenceFlow( fl[PE.id], source, target) flow_objs = flow_objs_dict.values() # add incoming and outgoing flow to acts if not isinstance(start[PE.outgoing], list): start[PE.outgoing] = [start[PE.outgoing]] for outgoing_id in start[PE.outgoing]: start_event.outgoing.add_flow(flow_objs_dict[outgoing_id]) if not isinstance(end[PE.incoming], list): end[PE.incoming] = [end[PE.incoming]] for incoming_id in end[PE.incoming]: end_event.incoming.add_flow(flow_objs_dict[incoming_id]) for act in act_objs: incoming = acts[act.id][PE.incoming] if isinstance(incoming, list): for s in incoming: act.incoming.add_flow(flow_objs_dict[s]) else: act.incoming.add_flow(flow_objs_dict[incoming]) act.outgoing.add_flow(flow_objs_dict[acts[act.id][PE.outgoing]]) for gw in gateway_objs: if isinstance(gw, gateway.ExclusiveGateway): for flow_id, con in gateways[gw.id][PE.conditions].items(): con_obj = gateway.Condition( con[PE.evaluate], flow_objs_dict[flow_id], ) gw.add_condition(con_obj) gw.incoming.add_flow( flow_objs_dict[gateways[gw.id][PE.incoming]]) for outgoing_id in gateways[gw.id][PE.outgoing]: gw.outgoing.add_flow(flow_objs_dict[outgoing_id]) elif isinstance(gw, gateway.ParallelGateway): gw.incoming.add_flow( flow_objs_dict[gateways[gw.id][PE.incoming]]) for outgoing_id in gateways[gw.id][PE.outgoing]: gw.outgoing.add_flow(flow_objs_dict[outgoing_id]) elif isinstance(gw, gateway.ConvergeGateway): for incoming_id in gateways[gw.id][PE.incoming]: gw.incoming.add_flow(flow_objs_dict[incoming_id]) gw.outgoing.add_flow( flow_objs_dict[gateways[gw.id][PE.outgoing]]) else: raise exceptions.FlowTypeError(u"Unknown Gateway type: %s" % type(gw)) pipeline_data = DataObject(pipeline_data) pipeline_spec = PipelineSpec(start_event, end_event, flow_objs, act_objs, gateway_objs, pipeline_data, context) return Pipeline(self.pipeline_tree[PE.id], pipeline_spec)
def _parse(self, root_pipeline_data=None, root_pipeline_params=None, params=None, is_subprocess=False, parent_context=None): """ @summary: parse pipeline and subprocess recursively @param root_pipeline_data: root data from root pipeline parsing, witch will be passed to subprocess recursively @param root_pipeline_params: params from root pipeline for all subprocess @param params: params from parent for son subprocess @param is_subprocess: whither is subprocess @param parent_context: parent context for activity of subprocess to resolving inputs @return: Pipeline object """ if root_pipeline_data is None: root_pipeline_data = {} if root_pipeline_params is None: root_pipeline_params = {} if params is None: params = {} pipeline_inputs = self.pipeline_tree[PE.data][PE.inputs] classification = classify_inputs(pipeline_inputs, params, is_subprocess, root_pipeline_params) output_keys = self.pipeline_tree[PE.data][PE.outputs] context = Context(classification['act_outputs'], output_keys) for key, info in classification['scope_info'].items(): var = get_variable(key, info, context, root_pipeline_data) context.set_global_var(key, var) pipeline_data = deepcopy(root_pipeline_data) if is_subprocess: if parent_context is None: raise exceptions.DataTypeErrorException( 'parent context of subprocess cannot be none') for key, info in classification['subprocess_params'].items(): var = get_variable(key, info, parent_context, pipeline_data) pipeline_data.update({key: var}) start = self.pipeline_tree[PE.start_event] start_cls = FlowNodeClsFactory.get_node_cls(start[PE.type]) start_event = start_cls(id=start[PE.id], name=start[PE.name]) end = self.pipeline_tree[PE.end_event] end_cls = FlowNodeClsFactory.get_node_cls(end[PE.type]) end_event = end_cls(id=end[PE.id], name=end[PE.name], data=DataObject({})) acts = self.pipeline_tree[PE.activities] act_objs = [] for act in acts.values(): act_cls = FlowNodeClsFactory.get_node_cls(act[PE.type]) if act[PE.type] == PE.ServiceActivity: component = ComponentLibrary.get_component( act[PE.component][PE.code], act[PE.component][PE.inputs]) service = component.service() data = component.data_for_execution(context, pipeline_data) handler_path = act.get('failure_handler') failure_handler = import_string( handler_path) if handler_path else None act_objs.append( act_cls(id=act[PE.id], service=service, name=act[PE.name], data=data, error_ignorable=act.get(PE.error_ignorable, False), skippable=act.get(PE.skippable) or act.get(PE.skippable_old, True), retryable=act.get(PE.retryable) or act.get(PE.retryable_old, True), timeout=act.get(PE.timeout), failure_handler=failure_handler)) elif act[PE.type] == PE.SubProcess: sub_tree = act[PE.pipeline] params = act[PE.params] sub_parser = PipelineParser(pipeline_tree=sub_tree) act_objs.append( act_cls(id=act[PE.id], pipeline=sub_parser._parse( root_pipeline_data=root_pipeline_data, root_pipeline_params=root_pipeline_params, params=params, is_subprocess=True, parent_context=context), name=act[PE.name])) else: raise exceptions.FlowTypeError(u"Unknown Activity type: %s" % act[PE.type]) gateways = self.pipeline_tree[PE.gateways] flows = self.pipeline_tree[PE.flows] gateway_objs = [] for gw in gateways.values(): gw_cls = FlowNodeClsFactory.get_node_cls(gw[PE.type]) if gw[PE.type] in { PE.ParallelGateway, PE.ConditionalParallelGateway }: gateway_objs.append( gw_cls(id=gw[PE.id], converge_gateway_id=gw[PE.converge_gateway_id], name=gw[PE.name])) elif gw[PE.type] in {PE.ExclusiveGateway, PE.ConvergeGateway}: gateway_objs.append(gw_cls(id=gw[PE.id], name=gw[PE.name])) else: raise exceptions.FlowTypeError(u"Unknown Gateway type: %s" % gw[PE.type]) flow_objs_dict = {} for fl in flows.values(): flow_nodes = act_objs + gateway_objs if fl[PE.source] == start[PE.id]: source = start_event else: source = filter(lambda x: x.id == fl[PE.source], flow_nodes)[0] if fl[PE.target] == end[PE.id]: target = end_event else: target = filter(lambda x: x.id == fl[PE.target], flow_nodes)[0] flow_objs_dict[fl[PE.id]] = SequenceFlow(fl[PE.id], source, target) flow_objs = flow_objs_dict.values() # add incoming and outgoing flow to acts if not isinstance(start[PE.outgoing], list): start[PE.outgoing] = [start[PE.outgoing]] for outgoing_id in start[PE.outgoing]: start_event.outgoing.add_flow(flow_objs_dict[outgoing_id]) if not isinstance(end[PE.incoming], list): end[PE.incoming] = [end[PE.incoming]] for incoming_id in end[PE.incoming]: end_event.incoming.add_flow(flow_objs_dict[incoming_id]) for act in act_objs: incoming = acts[act.id][PE.incoming] if isinstance(incoming, list): for s in incoming: act.incoming.add_flow(flow_objs_dict[s]) else: act.incoming.add_flow(flow_objs_dict[incoming]) act.outgoing.add_flow(flow_objs_dict[acts[act.id][PE.outgoing]]) for gw in gateway_objs: if isinstance(gw, ExclusiveGateway) or isinstance( gw, ConditionalParallelGateway): for flow_id, con in gateways[gw.id][PE.conditions].items(): con_obj = Condition(con[PE.evaluate], flow_objs_dict[flow_id]) gw.add_condition(con_obj) if isinstance(gateways[gw.id][PE.incoming], list): for incoming_id in gateways[gw.id][PE.incoming]: gw.incoming.add_flow(flow_objs_dict[incoming_id]) else: gw.incoming.add_flow( flow_objs_dict[gateways[gw.id][PE.incoming]]) for outgoing_id in gateways[gw.id][PE.outgoing]: gw.outgoing.add_flow(flow_objs_dict[outgoing_id]) elif isinstance(gw, ParallelGateway): if isinstance(gateways[gw.id][PE.incoming], list): for incoming_id in gateways[gw.id][PE.incoming]: gw.incoming.add_flow(flow_objs_dict[incoming_id]) else: gw.incoming.add_flow( flow_objs_dict[gateways[gw.id][PE.incoming]]) for outgoing_id in gateways[gw.id][PE.outgoing]: gw.outgoing.add_flow(flow_objs_dict[outgoing_id]) elif isinstance(gw, ConvergeGateway): for incoming_id in gateways[gw.id][PE.incoming]: gw.incoming.add_flow(flow_objs_dict[incoming_id]) gw.outgoing.add_flow( flow_objs_dict[gateways[gw.id][PE.outgoing]]) else: raise exceptions.FlowTypeError(u"Unknown Gateway type: %s" % type(gw)) context.duplicate_variables() pipeline_data = DataObject(pipeline_data) pipeline_spec = PipelineSpec(start_event, end_event, flow_objs, act_objs, gateway_objs, pipeline_data, context) return Pipeline(self.pipeline_tree[PE.id], pipeline_spec)
def parser(self, root_pipeline_data=None): if root_pipeline_data is None: root_pipeline_data = {} pipeline_inputs = self.pipeline_tree['data']['inputs'] act_outputs = {} scope_info = {} for key, info in pipeline_inputs.iteritems(): if info.get('source_act'): act_outputs.setdefault(info['source_act'], {}).update({info['source_key']: key}) else: scope_info.update({key: info}) output_keys = self.pipeline_tree['data']['outputs'].keys() context = Context(act_outputs, output_keys) for key, info in scope_info.iteritems(): value = get_variable(key, info, context, root_pipeline_data) context.set_global_var(key, value) start = self.pipeline_tree['start_event'] start_cls = getattr(event, start['type']) start_event = start_cls(id=start['id'], name=start['name']) end = self.pipeline_tree['end_event'] end_cls = getattr(event, end['type']) end_event = end_cls(id=end['id'], name=end['name']) acts = self.pipeline_tree['activities'] act_objs = [] for act in acts.values(): act_cls = getattr(activity, act['type']) if act['type'] == 'ServiceActivity': component = ComponentLibrary.get_component( act['component']['code'], act['component']['inputs'] ) service = component.service() data = component.data_for_execution(context, root_pipeline_data) act_objs.append(act_cls(id=act['id'], service=service, name=act['name'], data=data, error_ignorable=act.get('error_ignorable', False))) elif act['type'] == 'SubProcess': pipeline_info = act['pipeline'] sub_parser = PipelineParser(pipeline_info) act_objs.append(act_cls(id=act['id'], pipeline=sub_parser.parser(root_pipeline_data), name=act['name'])) else: raise exceptions.FlowTypeError(u"Unknown Activity type: %s" % act['type']) gateways = self.pipeline_tree['gateways'] flows = self.pipeline_tree['flows'] gateway_objs = [] for gw in gateways.values(): gw_cls = getattr(gateway, gw['type']) if gw['type'] in ['ParallelGateway']: gateway_objs.append( gw_cls(id=gw['id'], converge_gateway_id=gw['converge_gateway_id'], name=gw['name'])) elif gw['type'] in ['ExclusiveGateway', 'ConvergeGateway']: gateway_objs.append(gw_cls(id=gw['id'], name=gw['name'])) else: raise exceptions.FlowTypeError(u"Unknown Gateway type: %s" % gw['type']) flow_objs_dict = {} for fl in flows.values(): flow_nodes = act_objs + gateway_objs if fl['source'] == start['id']: source = start_event else: source = filter(lambda x: x.id == fl['source'], flow_nodes)[0] if fl['target'] == end['id']: target = end_event else: target = filter(lambda x: x.id == fl['target'], flow_nodes)[0] flow_objs_dict[fl['id']] = base.SequenceFlow(fl['id'], source, target) flow_objs = flow_objs_dict.values() # add incoming and outgoing flow to acts if not isinstance(start['outgoing'], list): start['outgoing'] = [start['outgoing']] for outgoing_id in start['outgoing']: start_event.outgoing.add_flow(flow_objs_dict[outgoing_id]) if not isinstance(end['incoming'], list): end['incoming'] = [end['incoming']] for incoming_id in end['incoming']: end_event.incoming.add_flow(flow_objs_dict[incoming_id]) for act in act_objs: act.incoming.add_flow(flow_objs_dict[acts[act.id]['incoming']]) act.outgoing.add_flow(flow_objs_dict[acts[act.id]['outgoing']]) for gw in gateway_objs: if isinstance(gw, gateway.ExclusiveGateway): for flow_id, con in gateways[gw.id]['conditions'].iteritems(): con_obj = gateway.Condition( con['evaluate'], flow_objs_dict[flow_id], ) gw.add_condition(con_obj) gw.incoming.add_flow( flow_objs_dict[gateways[gw.id]['incoming']] ) for outgoing_id in gateways[gw.id]['outgoing']: gw.outgoing.add_flow(flow_objs_dict[outgoing_id]) elif isinstance(gw, gateway.ParallelGateway): gw.incoming.add_flow( flow_objs_dict[gateways[gw.id]['incoming']] ) for outgoing_id in gateways[gw.id]['outgoing']: gw.outgoing.add_flow(flow_objs_dict[outgoing_id]) elif isinstance(gw, gateway.ConvergeGateway): for incoming_id in gateways[gw.id]['incoming']: gw.incoming.add_flow(flow_objs_dict[incoming_id]) gw.outgoing.add_flow( flow_objs_dict[gateways[gw.id]['outgoing']] ) else: raise exceptions.FlowTypeError(u"Unknown Gateway type: %s" % type(gw)) root_pipeline_data = DataObject(root_pipeline_data) pipeline_spec = PipelineSpec(start_event, end_event, flow_objs, act_objs, gateway_objs, root_pipeline_data, context) return Pipeline(self.pipeline_tree['id'], pipeline_spec)
def format_web_data_to_pipeline(web_pipeline): """ @summary: @param web_pipeline: @return: """ pipeline_tree = copy.deepcopy(web_pipeline) constants = pipeline_tree.pop('constants') constant_pool = {} data_inputs = {} acts_outputs = {} for key, info in constants.iteritems(): if info['source_tag']: var_cls = library.VariableLibrary.get_var_class( info['source_tag'].split('.')[0]) if info['source_type'] == 'component_outputs': source_key = info['source_info'].values()[0][0] source_step = info['source_info'].keys()[0] data_inputs[key] = { 'type': 'splice', 'source_act': source_step, 'source_key': source_key, 'value': info['value'], } acts_outputs.setdefault(source_step, {}).update({ source_key: key, }) # 自定义的Lazy类型变量 elif info['source_tag'] and var_cls and issubclass( var_cls, var.LazyVariable): data_inputs[key] = { 'type': 'lazy', 'source_tag': info['source_tag'], 'value': info['value'], } else: constant_pool[key] = info pool_obj = ConstantPool(constant_pool) resolved_constants = pool_obj.pool data_inputs = calculate_constants_type(resolved_constants, data_inputs) pipeline_tree['data'] = { 'inputs': data_inputs, 'outputs': {key: key for key in pipeline_tree.pop('outputs')}, } for act_id, act in pipeline_tree['activities'].iteritems(): if act['type'] == 'ServiceActivity': act_data = act['component'].pop('data') for key, info in act_data.iteritems(): info['value'] = pool_obj.resolve_value(info['value']) all_inputs = calculate_constants_type(act_data, data_inputs) act['component']['inputs'] = { key: value for key, value in all_inputs.iteritems() if key in act_data } act['component']['global_outputs'] = acts_outputs.get(act_id, {}) elif act['type'] == 'SubProcess': act_data = {} act_constants = {} for key, info in act['pipeline']['constants'].iteritems(): if info['show_type'] == 'show': info['value'] = pool_obj.resolve_value(info['value']) act_constants[key] = info act_data[key] = info else: act_constants[key] = info act['pipeline']['constants'] = act_constants act['exposed_constants'] = act_data.keys() all_inputs = calculate_constants_type(act_data, data_inputs) act['pipeline'] = format_web_data_to_pipeline(act['pipeline']) for key in act['exposed_constants']: act['pipeline']['data']['inputs'][key] = all_inputs[key] else: raise exceptions.FlowTypeError(u"Unknown Activity type: %s" % act['type']) return pipeline_tree
def format_web_data_to_pipeline(web_pipeline, is_subprocess=False): """ @summary: @param web_pipeline: pipeline 前端数据 @param is_subprocess: 是否子流程 @return: """ pipeline_tree = copy.deepcopy(web_pipeline) constants = pipeline_tree.pop("constants") # classify inputs and outputs classification = classify_constants(constants, is_subprocess) pipeline_tree["data"] = { "inputs": classification["data_inputs"], "outputs": [key for key in pipeline_tree.pop("outputs")], "pre_render_keys": sorted(list(get_pre_render_mako_keys(constants))), } for act_id, act in list(pipeline_tree["activities"].items()): if act["type"] == "ServiceActivity": act_data = act["component"].pop("data") all_inputs = calculate_constants_type( act_data, classification["data_inputs"]) act["component"]["inputs"] = { key: value for key, value in list(all_inputs.items()) if key in act_data } act["component"]["global_outputs"] = classification[ "acts_outputs"].get(act_id, {}) # old web field process if "skippable" not in act: act["skippable"] = act.get("isSkipped", True) if "retryable" not in act: act["retryable"] = act.get("can_retry", True) # 检查节点配置冲突 if act.get("timeout_config", {}).get("enable") and ( act["error_ignorable"] or act.get("auto_retry", {}).get("enable")): raise exceptions.InvalidOperationException( "timeout_config can not be enabled with error_ignorable or auto_retry at the same time" ) elif act["type"] == "SubProcess": parent_params = {} for key, info in list(act["pipeline"]["constants"].items()): # 为子流程设置 params 使得外层参数能够往子流程中传递 if info["show_type"] == "show": # lazy 变量 var_cls = library.VariableLibrary.get_var_class( info["custom_type"]) if var_cls and issubclass(var_cls, var.LazyVariable): if (var_cls.type == "meta" and hasattr(var_cls, "process_meta_avalue") and callable(var_cls.process_meta_avalue)): value = var_cls.process_meta_avalue( info["meta"], info["value"]) else: value = info["value"] # 如果 lazy 类型的变量被勾选到了全局变量 # 则将 lazy 类型改为 splice 类型,避免两次解析 lazy 的值 # 用 value 从 constants 中检索是因为勾选时 key 可能会发生变化 if isinstance(value, str) and key in set( constants.get(value, {}).get( "source_info", {}).get(act["id"], [])): parent_params[key] = { "type": "splice", "value": value, } else: parent_params[key] = { "type": "lazy", "source_tag": info["source_tag"], "custom_type": info["custom_type"], "value": value, } else: parent_params[key] = { "type": "splice", "value": info["value"] } # 注入处理need_render parent_params[key]["need_render"] = info.get( "need_render", True) act["params"] = parent_params act["pipeline"] = format_web_data_to_pipeline(act["pipeline"], is_subprocess=True) else: raise exceptions.FlowTypeError("Unknown Activity type: %s" % act["type"]) for act in list(pipeline_tree["activities"].values()): format_node_io_to_list(act, o=False) for gateway in list(pipeline_tree["gateways"].values()): format_node_io_to_list(gateway, o=False) format_node_io_to_list(pipeline_tree["end_event"], o=False) return pipeline_tree