def blend(source, target, custom_stream=None): """ blend source and target streams :param source: :param target: :param custom_stream: :return: """ if custom_stream: # use custom stream instead of source's stream if isinstance(custom_stream, set): for stream in custom_stream: target[STREAM].add(stream) else: target[STREAM].add(custom_stream) return if len(source[STREAM]) == 0: raise exceptions.InvalidOperationException( 'stream validation error, node(%s) stream is empty' % source[PE.id]) # blend for s in source[STREAM]: target[STREAM].add(s)
def get_node_for_sequence(sid, tree, node_type): target_id = tree[PE.flows][sid][node_type] if target_id in tree[PE.activities]: return tree[PE.activities][target_id] elif target_id in tree[PE.gateways]: return tree[PE.gateways][target_id] elif target_id == tree[PE.end_event][PE.id]: return tree[PE.end_event] elif target_id == tree[PE.start_event][PE.id]: return tree[PE.start_event] raise exceptions.InvalidOperationException('node(%s) not in data' % target_id)
def format_web_data_to_pipeline(web_pipeline, is_subprocess=False): """ @summary: @param web_pipeline: pipeline 前端数据 @param is_subprocess: 是否子流程 @return: """ pipeline_tree = copy.deepcopy(web_pipeline) constants = pipeline_tree.pop("constants") # classify inputs and outputs classification = classify_constants(constants, is_subprocess) pipeline_tree["data"] = { "inputs": classification["data_inputs"], "outputs": [key for key in pipeline_tree.pop("outputs")], "pre_render_keys": sorted(list(get_pre_render_mako_keys(constants))), } for act_id, act in list(pipeline_tree["activities"].items()): if act["type"] == "ServiceActivity": act_data = act["component"].pop("data") all_inputs = calculate_constants_type( act_data, classification["data_inputs"]) act["component"]["inputs"] = { key: value for key, value in list(all_inputs.items()) if key in act_data } act["component"]["global_outputs"] = classification[ "acts_outputs"].get(act_id, {}) # old web field process if "skippable" not in act: act["skippable"] = act.get("isSkipped", True) if "retryable" not in act: act["retryable"] = act.get("can_retry", True) # 检查节点配置冲突 if act.get("timeout_config", {}).get("enable") and ( act["error_ignorable"] or act.get("auto_retry", {}).get("enable")): raise exceptions.InvalidOperationException( "timeout_config can not be enabled with error_ignorable or auto_retry at the same time" ) elif act["type"] == "SubProcess": parent_params = {} for key, info in list(act["pipeline"]["constants"].items()): # 为子流程设置 params 使得外层参数能够往子流程中传递 if info["show_type"] == "show": # lazy 变量 var_cls = library.VariableLibrary.get_var_class( info["custom_type"]) if var_cls and issubclass(var_cls, var.LazyVariable): if (var_cls.type == "meta" and hasattr(var_cls, "process_meta_avalue") and callable(var_cls.process_meta_avalue)): value = var_cls.process_meta_avalue( info["meta"], info["value"]) else: value = info["value"] # 如果 lazy 类型的变量被勾选到了全局变量 # 则将 lazy 类型改为 splice 类型,避免两次解析 lazy 的值 # 用 value 从 constants 中检索是因为勾选时 key 可能会发生变化 if isinstance(value, str) and key in set( constants.get(value, {}).get( "source_info", {}).get(act["id"], [])): parent_params[key] = { "type": "splice", "value": value, } else: parent_params[key] = { "type": "lazy", "source_tag": info["source_tag"], "custom_type": info["custom_type"], "value": value, } else: parent_params[key] = { "type": "splice", "value": info["value"] } # 注入处理need_render parent_params[key]["need_render"] = info.get( "need_render", True) act["params"] = parent_params act["pipeline"] = format_web_data_to_pipeline(act["pipeline"], is_subprocess=True) else: raise exceptions.FlowTypeError("Unknown Activity type: %s" % act["type"]) for act in list(pipeline_tree["activities"].values()): format_node_io_to_list(act, o=False) for gateway in list(pipeline_tree["gateways"].values()): format_node_io_to_list(gateway, o=False) format_node_io_to_list(pipeline_tree["end_event"], o=False) return pipeline_tree