def alter_list_data_to_serialize(self, request, data): for bundle in data['objects']: component = ComponentLibrary.get_component_class(bundle.data['code']) bundle.data['output'] = component.outputs_format() bundle.data['form'] = component.form return data
def alter_detail_data_to_serialize(self, request, data): bundle = data component = ComponentLibrary.get_component_class(bundle.data['code']) bundle.data['output'] = component.outputs_format() bundle.data['form'] = component.form return data
def alter_list_data_to_serialize(self, request, data): component_phase_dict = DeprecatedPlugin.objects.get_components_phase_dict() altered_objects = [] for bundle in data["objects"]: # 远程插件不显示在列表中 if bundle.data["code"] == "remote_plugin": continue try: component = ComponentLibrary.get_component_class(bundle.data["code"], bundle.data["version"]) except ComponentNotExistException: # 内存中没有读取到这个插件,故忽略掉后续的步骤避免影响整个接口的返回 continue bundle.data["output"] = component.outputs_format() bundle.data["form"] = component.form bundle.data["output_form"] = component.output_form bundle.data["desc"] = component.desc bundle.data["form_is_embedded"] = component.form_is_embedded() # 国际化 name = bundle.data["name"].split("-") bundle.data["group_name"] = _(name[0]) bundle.data["group_icon"] = component.group_icon bundle.data["name"] = _(name[1]) bundle.data["phase"] = component_phase_dict.get(bundle.data["code"], {}).get( bundle.data["version"], DeprecatedPlugin.PLUGIN_PHASE_AVAILABLE ) group_name_en = group_en_pattern.findall(name[0] or "") bundle.data["sort_key_group_en"] = group_name_en[0] if group_name_en else "#" altered_objects.append(bundle) data["objects"] = altered_objects return data
def alter_detail_data_to_serialize(self, request, data): bundle = data component = ComponentLibrary.get_component_class(bundle.data['code']) bundle.data['output'] = component.outputs_format() bundle.data['form'] = component.form # 国际化 name = bundle.data['name'].split('-') bundle.data['group_name'] = _(name[0]) bundle.data['name'] = _(name[1]) return data
def _format_outputs( self, outputs: dict, component_code: str, pipeline_instance: PipelineInstance, subprocess_stack: Optional[list] = None, ) -> (bool, str, list): outputs_table = [] if component_code: version = ( self._get_node_info(self.node_id, pipeline_instance.execution_data, subprocess_stack) .get("component", {}) .get("version", None) ) try: component = ComponentLibrary.get_component_class(component_code=component_code, version=version) outputs_format = component.outputs_format() except Exception: logger.exception( "_format_outputs(node_id: {}, outputs: {}, component_code: {}) fail".format( self.node_id, outputs, component_code ) ) return False, "_format_outputs fail", [] else: # for some special empty case e.g. '' outputs_data = outputs.get("outputs") or {} # 在标准插件定义中的预设输出参数 archived_keys = [] for outputs_item in outputs_format: value = outputs_data.get(outputs_item["key"], "") outputs_table.append( {"name": outputs_item["name"], "key": outputs_item["key"], "value": value, "preset": True} ) archived_keys.append(outputs_item["key"]) # 其他输出参数 for out_key, out_value in list(outputs_data.items()): if out_key not in archived_keys: outputs_table.append({"name": out_key, "key": out_key, "value": out_value, "preset": False}) else: try: outputs_table = [ {"key": key, "value": val, "preset": False} for key, val in list((outputs.get("outputs") or {}).items()) ] except Exception: logger.exception( "_format_outputs(node_id: {}, outputs: {}, component_code: {}) fail".format( self.node_id, outputs, component_code ) ) return False, "_format_outputs fail", [] return True, "", outputs_table
def alter_list_data_to_serialize(self, request, data): for bundle in data['objects']: component = ComponentLibrary.get_component_class(bundle.data['code']) bundle.data['output'] = component.outputs_format() bundle.data['form'] = component.form bundle.data['desc'] = component.desc # 国际化 name = bundle.data['name'].split('-') bundle.data['group_name'] = _(name[0]) bundle.data['name'] = _(name[1]) return data
def to_representation(self, instance): try: self.component = ComponentLibrary.get_component_class( instance.code, instance.version) self.component_name = instance.name.split("-") self.component_phase_dict = DeprecatedPlugin.objects.get_components_phase_dict( ) except ComponentNotExistException: raise NotFound("Can not found {}({})".format( instance.code, instance.version)) return super(ComponentModelSerializer, self).to_representation(instance)
def alter_detail_data_to_serialize(self, request, data): data = super(ComponentModelResource, self).alter_detail_data_to_serialize(request, data) bundle = data component = ComponentLibrary.get_component_class(bundle.data['code']) bundle.data['output'] = component.outputs_format() bundle.data['form'] = component.form bundle.data['desc'] = component.desc bundle.data['form_is_embedded'] = component.form_is_embedded() # 国际化 name = bundle.data['name'].split('-') bundle.data['group_name'] = _(name[0]) bundle.data['name'] = _(name[1]) return data
def schema_for_var(cls, var): source_type = var.get('source_type') if source_type not in cls.accept_var_type: return None source_tag = var.get('source_tag') try: code, tag = cls.decode_source_tag(source_tag) except Exception: logger.error( 'error occurred when decode source_tag for {key}, var: {var}, error: {trace}' .format(key=var.get('key'), var=var, trace=traceback.format_exc())) return None if source_type == VAR_SOURCE_TYPE_INPUTS: component_cls = ComponentLibrary.get_component_class( component_code=code) # maybe custom var from subprocess if not component_cls: source_type = VAR_SOURCE_TYPE_CUSTOM else: schema = component_cls.get_input_schema(key=tag) ok, message = cls._schema_check(code=code, var=var, schema=schema, type='component') if not ok: logger.error(message) return None return schema.as_dict() if source_type == VAR_SOURCE_TYPE_CUSTOM: var_cls = VariableLibrary.get_var_class(code=code) schema = getattr(var_cls, 'schema', None) ok, message = cls._schema_check(code=code, var=var, schema=schema, type='variable') if not ok: logger.error(message) return None return schema.as_dict()
def get_plugin_list(request, project_id): components = ComponentModel.objects.filter(status=True) data = [] for comp_model in components: comp = ComponentLibrary.get_component_class(comp_model.code) data.append({ 'inputs': comp.inputs_format(), 'outputs': comp.outputs_format(), 'desc': comp.desc, 'code': comp.code, 'name': comp.name, 'group_name': comp.group_name }) return JsonResponse({'result': True, 'data': data})
def get_plugin_detail(request, project_id): project_id = request.project.id code = request.GET.get("code") version = request.GET.get("version", "legacy") if not code: return { "result": False, "message": "parameter code need to be provided.", "code": err_code.VALIDATION_ERROR.code, } # 排除基于业务的插件,只支持公共插件 exclude_component_codes = ProjectBasedComponent.objects.get_components_of_other_projects( project_id) try: component = ComponentModel.objects.exclude( code__in=exclude_component_codes).get(status=True, code=code, version=version) except ComponentModel.DoesNotExist: return { "result": False, "message": "can not find suitable component with code: {} and version: {}". format(code, version), "code": err_code.VALIDATION_ERROR.code, } component_info = ComponentLibrary.get_component_class( component.code, component.version) data = { "inputs": component_info.inputs_format(), "outputs": component_info.outputs_format(), "desc": component_info.desc, "code": component_info.code, "name": component_info.name, "group_name": component_info.group_name, "version": component_info.version, "form": component_info.form, } return {"result": True, "data": data, "code": err_code.SUCCESS.code}
def test_get_component(self): class TestService(Service): pass class TestComponent(Component): name = 'name' code = 'code' bound_service = TestService def clean_execute_data(self, context): pass def outputs_format(self): pass self.assertEqual( ComponentLibrary.get_component('code', {}).__class__, TestComponent)
def test_ignore_component(self): class IgnoreService(Service): def execute(self, data, parent_data): pass class IgnoreComponent(Component): name = u'ignore_service' bound_service = IgnoreService code = 'ignore_component' form = 'form path' def outputs_format(self): return {'result': bool, 'message': str} def clean_execute_data(self, context): return {} self.assertIsNone( ComponentLibrary.get_component_class('ignore_component'))
def alter_detail_data_to_serialize(self, request, data): bundle = super(ComponentModelResource, self).alter_detail_data_to_serialize(request, data) try: component = ComponentLibrary.get_component_class(bundle.data["code"], bundle.data["version"]) except ComponentNotExistException: raise NotFound("Can not found {}({})".format(bundle.data["code"], bundle.data["version"])) bundle.data["output"] = component.outputs_format() bundle.data["form"] = component.form bundle.data["output_form"] = component.output_form bundle.data["desc"] = component.desc bundle.data["form_is_embedded"] = component.form_is_embedded() # 国际化 name = bundle.data["name"].split("-") bundle.data["group_name"] = _(name[0]) bundle.data["group_icon"] = component.group_icon bundle.data["name"] = _(name[1]) # 被前端插件继承js的地址 bundle.data["base"] = getattr(component, "base", None) return data
def get_plugin_list(request, project_id): project_id = request.project.id exclude_component_codes = ProjectBasedComponent.objects.get_components_of_other_projects(project_id) components = ComponentModel.objects.filter(status=True).exclude(code__in=exclude_component_codes) data = [] for comp_model in components: comp = ComponentLibrary.get_component_class(comp_model.code, comp_model.version) data.append( { "inputs": comp.inputs_format(), "outputs": comp.outputs_format(), "desc": comp.desc, "code": comp.code, "name": comp.name, "group_name": comp.group_name, "version": comp.version, "form": comp.form, } ) return {"result": True, "data": data, "code": err_code.SUCCESS.code}
def test_kwargs_new(self): component = ComponentLibrary(component_code=self.component.code) self.assertEqual(component, self.component)
def _parse(self, root_pipeline_data=None, params=None, is_subprocess=False, parent_context=None): if root_pipeline_data is None: root_pipeline_data = {} if params is None: params = {} pipeline_data = deepcopy( root_pipeline_data) if is_subprocess else root_pipeline_data pipeline_inputs = self.pipeline_tree[PE.data][PE.inputs] act_outputs = {} scope_info = {} process_params = {} for key, info in pipeline_inputs.items(): if info.get(PE.source_act): act_outputs.setdefault(info[PE.source_act], {}).update({info[PE.source_key]: key}) continue if info.get(PE.is_param, False): info = params.get(key, info) if is_subprocess: process_params.update({key: info}) continue scope_info.update({key: info}) output_keys = self.pipeline_tree[PE.data][PE.outputs].keys() context = Context(act_outputs, output_keys) for key, info in scope_info.items(): var = get_variable(key, info, context, pipeline_data) context.set_global_var(key, var) if is_subprocess: if parent_context is None: raise exceptions.DataTypeErrorException( 'parent context of subprocess cannot be none') for key, info in process_params.items(): var = get_variable(key, info, parent_context, pipeline_data) pipeline_data.update({key: var}) start = self.pipeline_tree[PE.start_event] start_cls = getattr(event, start[PE.type]) start_event = start_cls(id=start[PE.id], name=start[PE.name]) end = self.pipeline_tree[PE.end_event] end_cls = getattr(event, end[PE.type]) end_event = end_cls(id=end[PE.id], name=end[PE.name]) acts = self.pipeline_tree[PE.activities] act_objs = [] for act in acts.values(): act_cls = getattr(activity, act[PE.type]) if act[PE.type] == PE.ServiceActivity: component = ComponentLibrary.get_component( act[PE.component][PE.code], act[PE.component][PE.inputs]) service = component.service() data = component.data_for_execution(context, pipeline_data) act_objs.append( act_cls(id=act[PE.id], service=service, name=act[PE.name], data=data, error_ignorable=act.get(PE.error_ignorable, False), skippable=act.get(PE.skippable, True), can_retry=act.get(PE.can_retry, True), timeout=act.get(PE.timeout))) elif act[PE.type] == PE.SubProcess: sub_tree = act[PE.pipeline] params = act[PE.params] sub_parser = PipelineParser(pipeline_tree=sub_tree) act_objs.append( act_cls(id=act[PE.id], pipeline=sub_parser._parse( root_pipeline_data=root_pipeline_data, params=params, is_subprocess=True, parent_context=context), name=act[PE.name])) elif act[PE.type] == PE.LoopServiceActivity: act_cls = getattr(activity, act[PE.type]) component = ComponentLibrary.get_component( act[PE.component][PE.code], act[PE.component][PE.inputs]) service = component.service() data = component.data_for_execution(context, pipeline_data) act_objs.append( act_cls(id=act[PE.id], service=service, name=act[PE.name], data=data, error_ignorable=act.get(PE.error_ignorable, False), loop_times=act[PE.loop_times])) else: raise exceptions.FlowTypeError(u"Unknown Activity type: %s" % act[PE.type]) gateways = self.pipeline_tree[PE.gateways] flows = self.pipeline_tree[PE.flows] gateway_objs = [] for gw in gateways.values(): gw_cls = getattr(gateway, gw[PE.type]) if gw[PE.type] in [PE.ParallelGateway]: gateway_objs.append( gw_cls(id=gw[PE.id], converge_gateway_id=gw[PE.converge_gateway_id], name=gw[PE.name])) elif gw[PE.type] in [PE.ExclusiveGateway, PE.ConvergeGateway]: gateway_objs.append(gw_cls(id=gw[PE.id], name=gw[PE.name])) else: raise exceptions.FlowTypeError(u"Unknown Gateway type: %s" % gw[PE.type]) flow_objs_dict = {} for fl in flows.values(): flow_nodes = act_objs + gateway_objs if fl[PE.source] == start[PE.id]: source = start_event else: source = filter(lambda x: x.id == fl[PE.source], flow_nodes)[0] if fl[PE.target] == end[PE.id]: target = end_event else: target = filter(lambda x: x.id == fl[PE.target], flow_nodes)[0] flow_objs_dict[fl[PE.id]] = base.SequenceFlow( fl[PE.id], source, target) flow_objs = flow_objs_dict.values() # add incoming and outgoing flow to acts if not isinstance(start[PE.outgoing], list): start[PE.outgoing] = [start[PE.outgoing]] for outgoing_id in start[PE.outgoing]: start_event.outgoing.add_flow(flow_objs_dict[outgoing_id]) if not isinstance(end[PE.incoming], list): end[PE.incoming] = [end[PE.incoming]] for incoming_id in end[PE.incoming]: end_event.incoming.add_flow(flow_objs_dict[incoming_id]) for act in act_objs: incoming = acts[act.id][PE.incoming] if isinstance(incoming, list): for s in incoming: act.incoming.add_flow(flow_objs_dict[s]) else: act.incoming.add_flow(flow_objs_dict[incoming]) act.outgoing.add_flow(flow_objs_dict[acts[act.id][PE.outgoing]]) for gw in gateway_objs: if isinstance(gw, gateway.ExclusiveGateway): for flow_id, con in gateways[gw.id][PE.conditions].items(): con_obj = gateway.Condition( con[PE.evaluate], flow_objs_dict[flow_id], ) gw.add_condition(con_obj) gw.incoming.add_flow( flow_objs_dict[gateways[gw.id][PE.incoming]]) for outgoing_id in gateways[gw.id][PE.outgoing]: gw.outgoing.add_flow(flow_objs_dict[outgoing_id]) elif isinstance(gw, gateway.ParallelGateway): gw.incoming.add_flow( flow_objs_dict[gateways[gw.id][PE.incoming]]) for outgoing_id in gateways[gw.id][PE.outgoing]: gw.outgoing.add_flow(flow_objs_dict[outgoing_id]) elif isinstance(gw, gateway.ConvergeGateway): for incoming_id in gateways[gw.id][PE.incoming]: gw.incoming.add_flow(flow_objs_dict[incoming_id]) gw.outgoing.add_flow( flow_objs_dict[gateways[gw.id][PE.outgoing]]) else: raise exceptions.FlowTypeError(u"Unknown Gateway type: %s" % type(gw)) pipeline_data = DataObject(pipeline_data) pipeline_spec = PipelineSpec(start_event, end_event, flow_objs, act_objs, gateway_objs, pipeline_data, context) return Pipeline(self.pipeline_tree[PE.id], pipeline_spec)
def group_icon(self): return ComponentLibrary.get_component_class(self.code).group_icon
def _parse(self, root_pipeline_data=None, root_pipeline_params=None, params=None, is_subprocess=False, parent_context=None): """ @summary: parse pipeline and subprocess recursively @param root_pipeline_data: root data from root pipeline parsing, witch will be passed to subprocess recursively @param root_pipeline_params: params from root pipeline for all subprocess @param params: params from parent for son subprocess @param is_subprocess: whither is subprocess @param parent_context: parent context for activity of subprocess to resolving inputs @return: Pipeline object """ if root_pipeline_data is None: root_pipeline_data = {} if root_pipeline_params is None: root_pipeline_params = {} if params is None: params = {} pipeline_inputs = self.pipeline_tree[PE.data][PE.inputs] classification = classify_inputs(pipeline_inputs, params, is_subprocess, root_pipeline_params) output_keys = self.pipeline_tree[PE.data][PE.outputs] context = Context(classification['act_outputs'], output_keys) for key, info in classification['scope_info'].items(): var = get_variable(key, info, context, root_pipeline_data) context.set_global_var(key, var) pipeline_data = deepcopy(root_pipeline_data) if is_subprocess: if parent_context is None: raise exceptions.DataTypeErrorException( 'parent context of subprocess cannot be none') for key, info in classification['subprocess_params'].items(): var = get_variable(key, info, parent_context, pipeline_data) pipeline_data.update({key: var}) start = self.pipeline_tree[PE.start_event] start_cls = FlowNodeClsFactory.get_node_cls(start[PE.type]) start_event = start_cls(id=start[PE.id], name=start[PE.name]) end = self.pipeline_tree[PE.end_event] end_cls = FlowNodeClsFactory.get_node_cls(end[PE.type]) end_event = end_cls(id=end[PE.id], name=end[PE.name], data=DataObject({})) acts = self.pipeline_tree[PE.activities] act_objs = [] for act in acts.values(): act_cls = FlowNodeClsFactory.get_node_cls(act[PE.type]) if act[PE.type] == PE.ServiceActivity: component = ComponentLibrary.get_component( act[PE.component][PE.code], act[PE.component][PE.inputs]) service = component.service() data = component.data_for_execution(context, pipeline_data) handler_path = act.get('failure_handler') failure_handler = import_string( handler_path) if handler_path else None act_objs.append( act_cls(id=act[PE.id], service=service, name=act[PE.name], data=data, error_ignorable=act.get(PE.error_ignorable, False), skippable=act.get(PE.skippable) or act.get(PE.skippable_old, True), retryable=act.get(PE.retryable) or act.get(PE.retryable_old, True), timeout=act.get(PE.timeout), failure_handler=failure_handler)) elif act[PE.type] == PE.SubProcess: sub_tree = act[PE.pipeline] params = act[PE.params] sub_parser = PipelineParser(pipeline_tree=sub_tree) act_objs.append( act_cls(id=act[PE.id], pipeline=sub_parser._parse( root_pipeline_data=root_pipeline_data, root_pipeline_params=root_pipeline_params, params=params, is_subprocess=True, parent_context=context), name=act[PE.name])) else: raise exceptions.FlowTypeError(u"Unknown Activity type: %s" % act[PE.type]) gateways = self.pipeline_tree[PE.gateways] flows = self.pipeline_tree[PE.flows] gateway_objs = [] for gw in gateways.values(): gw_cls = FlowNodeClsFactory.get_node_cls(gw[PE.type]) if gw[PE.type] in { PE.ParallelGateway, PE.ConditionalParallelGateway }: gateway_objs.append( gw_cls(id=gw[PE.id], converge_gateway_id=gw[PE.converge_gateway_id], name=gw[PE.name])) elif gw[PE.type] in {PE.ExclusiveGateway, PE.ConvergeGateway}: gateway_objs.append(gw_cls(id=gw[PE.id], name=gw[PE.name])) else: raise exceptions.FlowTypeError(u"Unknown Gateway type: %s" % gw[PE.type]) flow_objs_dict = {} for fl in flows.values(): flow_nodes = act_objs + gateway_objs if fl[PE.source] == start[PE.id]: source = start_event else: source = filter(lambda x: x.id == fl[PE.source], flow_nodes)[0] if fl[PE.target] == end[PE.id]: target = end_event else: target = filter(lambda x: x.id == fl[PE.target], flow_nodes)[0] flow_objs_dict[fl[PE.id]] = SequenceFlow(fl[PE.id], source, target) flow_objs = flow_objs_dict.values() # add incoming and outgoing flow to acts if not isinstance(start[PE.outgoing], list): start[PE.outgoing] = [start[PE.outgoing]] for outgoing_id in start[PE.outgoing]: start_event.outgoing.add_flow(flow_objs_dict[outgoing_id]) if not isinstance(end[PE.incoming], list): end[PE.incoming] = [end[PE.incoming]] for incoming_id in end[PE.incoming]: end_event.incoming.add_flow(flow_objs_dict[incoming_id]) for act in act_objs: incoming = acts[act.id][PE.incoming] if isinstance(incoming, list): for s in incoming: act.incoming.add_flow(flow_objs_dict[s]) else: act.incoming.add_flow(flow_objs_dict[incoming]) act.outgoing.add_flow(flow_objs_dict[acts[act.id][PE.outgoing]]) for gw in gateway_objs: if isinstance(gw, ExclusiveGateway) or isinstance( gw, ConditionalParallelGateway): for flow_id, con in gateways[gw.id][PE.conditions].items(): con_obj = Condition(con[PE.evaluate], flow_objs_dict[flow_id]) gw.add_condition(con_obj) if isinstance(gateways[gw.id][PE.incoming], list): for incoming_id in gateways[gw.id][PE.incoming]: gw.incoming.add_flow(flow_objs_dict[incoming_id]) else: gw.incoming.add_flow( flow_objs_dict[gateways[gw.id][PE.incoming]]) for outgoing_id in gateways[gw.id][PE.outgoing]: gw.outgoing.add_flow(flow_objs_dict[outgoing_id]) elif isinstance(gw, ParallelGateway): if isinstance(gateways[gw.id][PE.incoming], list): for incoming_id in gateways[gw.id][PE.incoming]: gw.incoming.add_flow(flow_objs_dict[incoming_id]) else: gw.incoming.add_flow( flow_objs_dict[gateways[gw.id][PE.incoming]]) for outgoing_id in gateways[gw.id][PE.outgoing]: gw.outgoing.add_flow(flow_objs_dict[outgoing_id]) elif isinstance(gw, ConvergeGateway): for incoming_id in gateways[gw.id][PE.incoming]: gw.incoming.add_flow(flow_objs_dict[incoming_id]) gw.outgoing.add_flow( flow_objs_dict[gateways[gw.id][PE.outgoing]]) else: raise exceptions.FlowTypeError(u"Unknown Gateway type: %s" % type(gw)) context.duplicate_variables() pipeline_data = DataObject(pipeline_data) pipeline_spec = PipelineSpec(start_event, end_event, flow_objs, act_objs, gateway_objs, pipeline_data, context) return Pipeline(self.pipeline_tree[PE.id], pipeline_spec)
def parser(self, root_pipeline_data=None): if root_pipeline_data is None: root_pipeline_data = {} pipeline_inputs = self.pipeline_tree['data']['inputs'] act_outputs = {} scope_info = {} for key, info in pipeline_inputs.iteritems(): if info.get('source_act'): act_outputs.setdefault(info['source_act'], {}).update({info['source_key']: key}) else: scope_info.update({key: info}) output_keys = self.pipeline_tree['data']['outputs'].keys() context = Context(act_outputs, output_keys) for key, info in scope_info.iteritems(): value = get_variable(key, info, context, root_pipeline_data) context.set_global_var(key, value) start = self.pipeline_tree['start_event'] start_cls = getattr(event, start['type']) start_event = start_cls(id=start['id'], name=start['name']) end = self.pipeline_tree['end_event'] end_cls = getattr(event, end['type']) end_event = end_cls(id=end['id'], name=end['name']) acts = self.pipeline_tree['activities'] act_objs = [] for act in acts.values(): act_cls = getattr(activity, act['type']) if act['type'] == 'ServiceActivity': component = ComponentLibrary.get_component( act['component']['code'], act['component']['inputs'] ) service = component.service() data = component.data_for_execution(context, root_pipeline_data) act_objs.append(act_cls(id=act['id'], service=service, name=act['name'], data=data, error_ignorable=act.get('error_ignorable', False))) elif act['type'] == 'SubProcess': pipeline_info = act['pipeline'] sub_parser = PipelineParser(pipeline_info) act_objs.append(act_cls(id=act['id'], pipeline=sub_parser.parser(root_pipeline_data), name=act['name'])) else: raise exceptions.FlowTypeError(u"Unknown Activity type: %s" % act['type']) gateways = self.pipeline_tree['gateways'] flows = self.pipeline_tree['flows'] gateway_objs = [] for gw in gateways.values(): gw_cls = getattr(gateway, gw['type']) if gw['type'] in ['ParallelGateway']: gateway_objs.append( gw_cls(id=gw['id'], converge_gateway_id=gw['converge_gateway_id'], name=gw['name'])) elif gw['type'] in ['ExclusiveGateway', 'ConvergeGateway']: gateway_objs.append(gw_cls(id=gw['id'], name=gw['name'])) else: raise exceptions.FlowTypeError(u"Unknown Gateway type: %s" % gw['type']) flow_objs_dict = {} for fl in flows.values(): flow_nodes = act_objs + gateway_objs if fl['source'] == start['id']: source = start_event else: source = filter(lambda x: x.id == fl['source'], flow_nodes)[0] if fl['target'] == end['id']: target = end_event else: target = filter(lambda x: x.id == fl['target'], flow_nodes)[0] flow_objs_dict[fl['id']] = base.SequenceFlow(fl['id'], source, target) flow_objs = flow_objs_dict.values() # add incoming and outgoing flow to acts if not isinstance(start['outgoing'], list): start['outgoing'] = [start['outgoing']] for outgoing_id in start['outgoing']: start_event.outgoing.add_flow(flow_objs_dict[outgoing_id]) if not isinstance(end['incoming'], list): end['incoming'] = [end['incoming']] for incoming_id in end['incoming']: end_event.incoming.add_flow(flow_objs_dict[incoming_id]) for act in act_objs: act.incoming.add_flow(flow_objs_dict[acts[act.id]['incoming']]) act.outgoing.add_flow(flow_objs_dict[acts[act.id]['outgoing']]) for gw in gateway_objs: if isinstance(gw, gateway.ExclusiveGateway): for flow_id, con in gateways[gw.id]['conditions'].iteritems(): con_obj = gateway.Condition( con['evaluate'], flow_objs_dict[flow_id], ) gw.add_condition(con_obj) gw.incoming.add_flow( flow_objs_dict[gateways[gw.id]['incoming']] ) for outgoing_id in gateways[gw.id]['outgoing']: gw.outgoing.add_flow(flow_objs_dict[outgoing_id]) elif isinstance(gw, gateway.ParallelGateway): gw.incoming.add_flow( flow_objs_dict[gateways[gw.id]['incoming']] ) for outgoing_id in gateways[gw.id]['outgoing']: gw.outgoing.add_flow(flow_objs_dict[outgoing_id]) elif isinstance(gw, gateway.ConvergeGateway): for incoming_id in gateways[gw.id]['incoming']: gw.incoming.add_flow(flow_objs_dict[incoming_id]) gw.outgoing.add_flow( flow_objs_dict[gateways[gw.id]['outgoing']] ) else: raise exceptions.FlowTypeError(u"Unknown Gateway type: %s" % type(gw)) root_pipeline_data = DataObject(root_pipeline_data) pipeline_spec = PipelineSpec(start_event, end_event, flow_objs, act_objs, gateway_objs, root_pipeline_data, context) return Pipeline(self.pipeline_tree['id'], pipeline_spec)