def get_constant_values(constants, extra_data): constant_values = {} custom_constants = {} # 获取用户自定义变量 for key, info in list(constants.items()): if info["source_type"] == "component_inputs": constant_values[key] = info["value"] elif info["source_type"] == "component_outputs": constant_values[key] = key elif info["custom_type"] and info.get("is_meta") is True: constant_values[key] = str(info["value"]) else: custom_constants[key] = info # 获取变量类型 classified_constants = {} to_calculate_constants = {} # 先计算lazy的情况 for key, info in custom_constants.items(): var_cls = VariableLibrary.get_var_class(info["custom_type"]) if var_cls and issubclass(var_cls, var.LazyVariable): classified_constants[key] = { "type": "lazy", "source_tag": info["source_tag"], "custom_type": info["custom_type"], "value": info["value"], } else: to_calculate_constants[key] = info classified_constants = calculate_constants_type(to_calculate_constants, classified_constants, change_calculated=True) # 对变量进行第一次解析,放到context中 context = Context({}) for key, info in list(classified_constants.items()): variable = get_variable(key, info, context, extra_data) context.set_global_var(key, variable) # 变量值最终获取 for key, info in list(classified_constants.items()): var_value = get_variable(key, info, context, extra_data).get() if var_value in ["", "[]", "{}"]: var_value = _("预览值为空,需要业务相关信息的变量不支持预览") constant_values[key] = str(var_value) return constant_values
def setUp(self): self.key = 'bk_timing' self.info = {'type': 'plain', 'value': '1'} self.context = Context({}) self.pipeline_data = { 'language': 'zh-cn', 'task_id': 63, 'biz_cc_name': u'UTC', 'task_name': u'20180918165807', 'executor': u'username', 'operator': u'username', 'biz_cc_id': 81 }
def test_get_variable(self): variable = get_variable(self.key, self.info, self.context, self.pipeline_data) self.assertEqual(variable.name, 'bk_timing') self.assertEqual(variable.value, '1') self.info['type'] = 'splice' variable1 = get_variable(self.key, self.info, self.context, self.pipeline_data) self.assertEqual(variable1.name, 'bk_timing') self.assertEqual(variable1.value, '1') self.assertEqual(variable1._refs, {}) self.key = '${ip}' self.info = { 'custom_type': 'ip', 'source_tag': u'var_ip_picker.ip_picker', 'type': 'lazy', 'value': { u'var_ip_custom_value': u'1.1.1.11.1', u'var_ip_method': u'custom', u'var_ip_tree': u'' } } self.context = Context(self.key) self.pipeline_data = { 'language': 'zh-cn', 'task_id': 78, 'biz_cc_name': u'UTC', 'task_name': u'20180918175615', 'executor': u'username', 'operator': u'username', 'biz_cc_id': 0 } variable2 = get_variable(self.key, self.info, self.context, self.pipeline_data) self.assertEqual(variable2.name, '${ip}') self.assertEqual(variable2.value, self.info["value"]) self.assertEqual(variable2._refs, {}) self.assertEqual(variable2.code, 'ip') self.info["type"] = 'exception' self.assertRaises(exceptions.DataTypeErrorException, get_variable, self.key, self.info, self.context, self.pipeline_data)
def setUp(self): self.name = '${ip}' self.info = { 'source_tag': u'var_ip_picker.ip_picker', 'type': 'lazy', 'value': { u'var_ip_custom_value': u'1.1.1.11.1', u'var_ip_method': u'custom', u'var_ip_tree': u'' } } self.context = Context(self.name) self.pipeline_data = { 'language': 'zh-cn', 'task_id': 78, 'biz_cc_name': u'UTC', 'task_name': u'20180918175615', 'executor': u'username', 'operator': u'username', 'biz_cc_id': 0 } self.code = 'var_ip_picker'
def _parse(self, root_pipeline_data=None, params=None, is_subprocess=False, parent_context=None): if root_pipeline_data is None: root_pipeline_data = {} if params is None: params = {} pipeline_data = deepcopy( root_pipeline_data) if is_subprocess else root_pipeline_data pipeline_inputs = self.pipeline_tree[PE.data][PE.inputs] act_outputs = {} scope_info = {} process_params = {} for key, info in pipeline_inputs.items(): if info.get(PE.source_act): act_outputs.setdefault(info[PE.source_act], {}).update({info[PE.source_key]: key}) continue if info.get(PE.is_param, False): info = params.get(key, info) if is_subprocess: process_params.update({key: info}) continue scope_info.update({key: info}) output_keys = self.pipeline_tree[PE.data][PE.outputs].keys() context = Context(act_outputs, output_keys) for key, info in scope_info.items(): var = get_variable(key, info, context, pipeline_data) context.set_global_var(key, var) if is_subprocess: if parent_context is None: raise exceptions.DataTypeErrorException( 'parent context of subprocess cannot be none') for key, info in process_params.items(): var = get_variable(key, info, parent_context, pipeline_data) pipeline_data.update({key: var}) start = self.pipeline_tree[PE.start_event] start_cls = getattr(event, start[PE.type]) start_event = start_cls(id=start[PE.id], name=start[PE.name]) end = self.pipeline_tree[PE.end_event] end_cls = getattr(event, end[PE.type]) end_event = end_cls(id=end[PE.id], name=end[PE.name]) acts = self.pipeline_tree[PE.activities] act_objs = [] for act in acts.values(): act_cls = getattr(activity, act[PE.type]) if act[PE.type] == PE.ServiceActivity: component = ComponentLibrary.get_component( act[PE.component][PE.code], act[PE.component][PE.inputs]) service = component.service() data = component.data_for_execution(context, pipeline_data) act_objs.append( act_cls(id=act[PE.id], service=service, name=act[PE.name], data=data, error_ignorable=act.get(PE.error_ignorable, False), skippable=act.get(PE.skippable, True), can_retry=act.get(PE.can_retry, True), timeout=act.get(PE.timeout))) elif act[PE.type] == PE.SubProcess: sub_tree = act[PE.pipeline] params = act[PE.params] sub_parser = PipelineParser(pipeline_tree=sub_tree) act_objs.append( act_cls(id=act[PE.id], pipeline=sub_parser._parse( root_pipeline_data=root_pipeline_data, params=params, is_subprocess=True, parent_context=context), name=act[PE.name])) elif act[PE.type] == PE.LoopServiceActivity: act_cls = getattr(activity, act[PE.type]) component = ComponentLibrary.get_component( act[PE.component][PE.code], act[PE.component][PE.inputs]) service = component.service() data = component.data_for_execution(context, pipeline_data) act_objs.append( act_cls(id=act[PE.id], service=service, name=act[PE.name], data=data, error_ignorable=act.get(PE.error_ignorable, False), loop_times=act[PE.loop_times])) else: raise exceptions.FlowTypeError(u"Unknown Activity type: %s" % act[PE.type]) gateways = self.pipeline_tree[PE.gateways] flows = self.pipeline_tree[PE.flows] gateway_objs = [] for gw in gateways.values(): gw_cls = getattr(gateway, gw[PE.type]) if gw[PE.type] in [PE.ParallelGateway]: gateway_objs.append( gw_cls(id=gw[PE.id], converge_gateway_id=gw[PE.converge_gateway_id], name=gw[PE.name])) elif gw[PE.type] in [PE.ExclusiveGateway, PE.ConvergeGateway]: gateway_objs.append(gw_cls(id=gw[PE.id], name=gw[PE.name])) else: raise exceptions.FlowTypeError(u"Unknown Gateway type: %s" % gw[PE.type]) flow_objs_dict = {} for fl in flows.values(): flow_nodes = act_objs + gateway_objs if fl[PE.source] == start[PE.id]: source = start_event else: source = filter(lambda x: x.id == fl[PE.source], flow_nodes)[0] if fl[PE.target] == end[PE.id]: target = end_event else: target = filter(lambda x: x.id == fl[PE.target], flow_nodes)[0] flow_objs_dict[fl[PE.id]] = base.SequenceFlow( fl[PE.id], source, target) flow_objs = flow_objs_dict.values() # add incoming and outgoing flow to acts if not isinstance(start[PE.outgoing], list): start[PE.outgoing] = [start[PE.outgoing]] for outgoing_id in start[PE.outgoing]: start_event.outgoing.add_flow(flow_objs_dict[outgoing_id]) if not isinstance(end[PE.incoming], list): end[PE.incoming] = [end[PE.incoming]] for incoming_id in end[PE.incoming]: end_event.incoming.add_flow(flow_objs_dict[incoming_id]) for act in act_objs: incoming = acts[act.id][PE.incoming] if isinstance(incoming, list): for s in incoming: act.incoming.add_flow(flow_objs_dict[s]) else: act.incoming.add_flow(flow_objs_dict[incoming]) act.outgoing.add_flow(flow_objs_dict[acts[act.id][PE.outgoing]]) for gw in gateway_objs: if isinstance(gw, gateway.ExclusiveGateway): for flow_id, con in gateways[gw.id][PE.conditions].items(): con_obj = gateway.Condition( con[PE.evaluate], flow_objs_dict[flow_id], ) gw.add_condition(con_obj) gw.incoming.add_flow( flow_objs_dict[gateways[gw.id][PE.incoming]]) for outgoing_id in gateways[gw.id][PE.outgoing]: gw.outgoing.add_flow(flow_objs_dict[outgoing_id]) elif isinstance(gw, gateway.ParallelGateway): gw.incoming.add_flow( flow_objs_dict[gateways[gw.id][PE.incoming]]) for outgoing_id in gateways[gw.id][PE.outgoing]: gw.outgoing.add_flow(flow_objs_dict[outgoing_id]) elif isinstance(gw, gateway.ConvergeGateway): for incoming_id in gateways[gw.id][PE.incoming]: gw.incoming.add_flow(flow_objs_dict[incoming_id]) gw.outgoing.add_flow( flow_objs_dict[gateways[gw.id][PE.outgoing]]) else: raise exceptions.FlowTypeError(u"Unknown Gateway type: %s" % type(gw)) pipeline_data = DataObject(pipeline_data) pipeline_spec = PipelineSpec(start_event, end_event, flow_objs, act_objs, gateway_objs, pipeline_data, context) return Pipeline(self.pipeline_tree[PE.id], pipeline_spec)
def _parse(self, root_pipeline_data=None, root_pipeline_params=None, params=None, is_subprocess=False, parent_context=None): """ @summary: parse pipeline and subprocess recursively @param root_pipeline_data: root data from root pipeline parsing, witch will be passed to subprocess recursively @param root_pipeline_params: params from root pipeline for all subprocess @param params: params from parent for son subprocess @param is_subprocess: whither is subprocess @param parent_context: parent context for activity of subprocess to resolving inputs @return: Pipeline object """ if root_pipeline_data is None: root_pipeline_data = {} if root_pipeline_params is None: root_pipeline_params = {} if params is None: params = {} pipeline_inputs = self.pipeline_tree[PE.data][PE.inputs] classification = classify_inputs(pipeline_inputs, params, is_subprocess, root_pipeline_params) output_keys = self.pipeline_tree[PE.data][PE.outputs] context = Context(classification['act_outputs'], output_keys) for key, info in classification['scope_info'].items(): var = get_variable(key, info, context, root_pipeline_data) context.set_global_var(key, var) pipeline_data = deepcopy(root_pipeline_data) if is_subprocess: if parent_context is None: raise exceptions.DataTypeErrorException( 'parent context of subprocess cannot be none') for key, info in classification['subprocess_params'].items(): var = get_variable(key, info, parent_context, pipeline_data) pipeline_data.update({key: var}) start = self.pipeline_tree[PE.start_event] start_cls = FlowNodeClsFactory.get_node_cls(start[PE.type]) start_event = start_cls(id=start[PE.id], name=start[PE.name]) end = self.pipeline_tree[PE.end_event] end_cls = FlowNodeClsFactory.get_node_cls(end[PE.type]) end_event = end_cls(id=end[PE.id], name=end[PE.name], data=DataObject({})) acts = self.pipeline_tree[PE.activities] act_objs = [] for act in acts.values(): act_cls = FlowNodeClsFactory.get_node_cls(act[PE.type]) if act[PE.type] == PE.ServiceActivity: component = ComponentLibrary.get_component( act[PE.component][PE.code], act[PE.component][PE.inputs]) service = component.service() data = component.data_for_execution(context, pipeline_data) handler_path = act.get('failure_handler') failure_handler = import_string( handler_path) if handler_path else None act_objs.append( act_cls(id=act[PE.id], service=service, name=act[PE.name], data=data, error_ignorable=act.get(PE.error_ignorable, False), skippable=act.get(PE.skippable) or act.get(PE.skippable_old, True), retryable=act.get(PE.retryable) or act.get(PE.retryable_old, True), timeout=act.get(PE.timeout), failure_handler=failure_handler)) elif act[PE.type] == PE.SubProcess: sub_tree = act[PE.pipeline] params = act[PE.params] sub_parser = PipelineParser(pipeline_tree=sub_tree) act_objs.append( act_cls(id=act[PE.id], pipeline=sub_parser._parse( root_pipeline_data=root_pipeline_data, root_pipeline_params=root_pipeline_params, params=params, is_subprocess=True, parent_context=context), name=act[PE.name])) else: raise exceptions.FlowTypeError(u"Unknown Activity type: %s" % act[PE.type]) gateways = self.pipeline_tree[PE.gateways] flows = self.pipeline_tree[PE.flows] gateway_objs = [] for gw in gateways.values(): gw_cls = FlowNodeClsFactory.get_node_cls(gw[PE.type]) if gw[PE.type] in { PE.ParallelGateway, PE.ConditionalParallelGateway }: gateway_objs.append( gw_cls(id=gw[PE.id], converge_gateway_id=gw[PE.converge_gateway_id], name=gw[PE.name])) elif gw[PE.type] in {PE.ExclusiveGateway, PE.ConvergeGateway}: gateway_objs.append(gw_cls(id=gw[PE.id], name=gw[PE.name])) else: raise exceptions.FlowTypeError(u"Unknown Gateway type: %s" % gw[PE.type]) flow_objs_dict = {} for fl in flows.values(): flow_nodes = act_objs + gateway_objs if fl[PE.source] == start[PE.id]: source = start_event else: source = filter(lambda x: x.id == fl[PE.source], flow_nodes)[0] if fl[PE.target] == end[PE.id]: target = end_event else: target = filter(lambda x: x.id == fl[PE.target], flow_nodes)[0] flow_objs_dict[fl[PE.id]] = SequenceFlow(fl[PE.id], source, target) flow_objs = flow_objs_dict.values() # add incoming and outgoing flow to acts if not isinstance(start[PE.outgoing], list): start[PE.outgoing] = [start[PE.outgoing]] for outgoing_id in start[PE.outgoing]: start_event.outgoing.add_flow(flow_objs_dict[outgoing_id]) if not isinstance(end[PE.incoming], list): end[PE.incoming] = [end[PE.incoming]] for incoming_id in end[PE.incoming]: end_event.incoming.add_flow(flow_objs_dict[incoming_id]) for act in act_objs: incoming = acts[act.id][PE.incoming] if isinstance(incoming, list): for s in incoming: act.incoming.add_flow(flow_objs_dict[s]) else: act.incoming.add_flow(flow_objs_dict[incoming]) act.outgoing.add_flow(flow_objs_dict[acts[act.id][PE.outgoing]]) for gw in gateway_objs: if isinstance(gw, ExclusiveGateway) or isinstance( gw, ConditionalParallelGateway): for flow_id, con in gateways[gw.id][PE.conditions].items(): con_obj = Condition(con[PE.evaluate], flow_objs_dict[flow_id]) gw.add_condition(con_obj) if isinstance(gateways[gw.id][PE.incoming], list): for incoming_id in gateways[gw.id][PE.incoming]: gw.incoming.add_flow(flow_objs_dict[incoming_id]) else: gw.incoming.add_flow( flow_objs_dict[gateways[gw.id][PE.incoming]]) for outgoing_id in gateways[gw.id][PE.outgoing]: gw.outgoing.add_flow(flow_objs_dict[outgoing_id]) elif isinstance(gw, ParallelGateway): if isinstance(gateways[gw.id][PE.incoming], list): for incoming_id in gateways[gw.id][PE.incoming]: gw.incoming.add_flow(flow_objs_dict[incoming_id]) else: gw.incoming.add_flow( flow_objs_dict[gateways[gw.id][PE.incoming]]) for outgoing_id in gateways[gw.id][PE.outgoing]: gw.outgoing.add_flow(flow_objs_dict[outgoing_id]) elif isinstance(gw, ConvergeGateway): for incoming_id in gateways[gw.id][PE.incoming]: gw.incoming.add_flow(flow_objs_dict[incoming_id]) gw.outgoing.add_flow( flow_objs_dict[gateways[gw.id][PE.outgoing]]) else: raise exceptions.FlowTypeError(u"Unknown Gateway type: %s" % type(gw)) context.duplicate_variables() pipeline_data = DataObject(pipeline_data) pipeline_spec = PipelineSpec(start_event, end_event, flow_objs, act_objs, gateway_objs, pipeline_data, context) return Pipeline(self.pipeline_tree[PE.id], pipeline_spec)
def parser(self, root_pipeline_data=None): if root_pipeline_data is None: root_pipeline_data = {} pipeline_inputs = self.pipeline_tree['data']['inputs'] act_outputs = {} scope_info = {} for key, info in pipeline_inputs.iteritems(): if info.get('source_act'): act_outputs.setdefault(info['source_act'], {}).update({info['source_key']: key}) else: scope_info.update({key: info}) output_keys = self.pipeline_tree['data']['outputs'].keys() context = Context(act_outputs, output_keys) for key, info in scope_info.iteritems(): value = get_variable(key, info, context, root_pipeline_data) context.set_global_var(key, value) start = self.pipeline_tree['start_event'] start_cls = getattr(event, start['type']) start_event = start_cls(id=start['id'], name=start['name']) end = self.pipeline_tree['end_event'] end_cls = getattr(event, end['type']) end_event = end_cls(id=end['id'], name=end['name']) acts = self.pipeline_tree['activities'] act_objs = [] for act in acts.values(): act_cls = getattr(activity, act['type']) if act['type'] == 'ServiceActivity': component = ComponentLibrary.get_component( act['component']['code'], act['component']['inputs'] ) service = component.service() data = component.data_for_execution(context, root_pipeline_data) act_objs.append(act_cls(id=act['id'], service=service, name=act['name'], data=data, error_ignorable=act.get('error_ignorable', False))) elif act['type'] == 'SubProcess': pipeline_info = act['pipeline'] sub_parser = PipelineParser(pipeline_info) act_objs.append(act_cls(id=act['id'], pipeline=sub_parser.parser(root_pipeline_data), name=act['name'])) else: raise exceptions.FlowTypeError(u"Unknown Activity type: %s" % act['type']) gateways = self.pipeline_tree['gateways'] flows = self.pipeline_tree['flows'] gateway_objs = [] for gw in gateways.values(): gw_cls = getattr(gateway, gw['type']) if gw['type'] in ['ParallelGateway']: gateway_objs.append( gw_cls(id=gw['id'], converge_gateway_id=gw['converge_gateway_id'], name=gw['name'])) elif gw['type'] in ['ExclusiveGateway', 'ConvergeGateway']: gateway_objs.append(gw_cls(id=gw['id'], name=gw['name'])) else: raise exceptions.FlowTypeError(u"Unknown Gateway type: %s" % gw['type']) flow_objs_dict = {} for fl in flows.values(): flow_nodes = act_objs + gateway_objs if fl['source'] == start['id']: source = start_event else: source = filter(lambda x: x.id == fl['source'], flow_nodes)[0] if fl['target'] == end['id']: target = end_event else: target = filter(lambda x: x.id == fl['target'], flow_nodes)[0] flow_objs_dict[fl['id']] = base.SequenceFlow(fl['id'], source, target) flow_objs = flow_objs_dict.values() # add incoming and outgoing flow to acts if not isinstance(start['outgoing'], list): start['outgoing'] = [start['outgoing']] for outgoing_id in start['outgoing']: start_event.outgoing.add_flow(flow_objs_dict[outgoing_id]) if not isinstance(end['incoming'], list): end['incoming'] = [end['incoming']] for incoming_id in end['incoming']: end_event.incoming.add_flow(flow_objs_dict[incoming_id]) for act in act_objs: act.incoming.add_flow(flow_objs_dict[acts[act.id]['incoming']]) act.outgoing.add_flow(flow_objs_dict[acts[act.id]['outgoing']]) for gw in gateway_objs: if isinstance(gw, gateway.ExclusiveGateway): for flow_id, con in gateways[gw.id]['conditions'].iteritems(): con_obj = gateway.Condition( con['evaluate'], flow_objs_dict[flow_id], ) gw.add_condition(con_obj) gw.incoming.add_flow( flow_objs_dict[gateways[gw.id]['incoming']] ) for outgoing_id in gateways[gw.id]['outgoing']: gw.outgoing.add_flow(flow_objs_dict[outgoing_id]) elif isinstance(gw, gateway.ParallelGateway): gw.incoming.add_flow( flow_objs_dict[gateways[gw.id]['incoming']] ) for outgoing_id in gateways[gw.id]['outgoing']: gw.outgoing.add_flow(flow_objs_dict[outgoing_id]) elif isinstance(gw, gateway.ConvergeGateway): for incoming_id in gateways[gw.id]['incoming']: gw.incoming.add_flow(flow_objs_dict[incoming_id]) gw.outgoing.add_flow( flow_objs_dict[gateways[gw.id]['outgoing']] ) else: raise exceptions.FlowTypeError(u"Unknown Gateway type: %s" % type(gw)) root_pipeline_data = DataObject(root_pipeline_data) pipeline_spec = PipelineSpec(start_event, end_event, flow_objs, act_objs, gateway_objs, root_pipeline_data, context) return Pipeline(self.pipeline_tree['id'], pipeline_spec)