def test_retry_node__with_node_can_not_retry(self): # with service activity top_pipeline = PipelineObject( nodes={ self.node_id: ServiceActivity(id=self.node_id, service=None, can_retry=False) }) process = MockPipelineProcess(top_pipeline=top_pipeline) with patch(PIPELINE_PROCESS_GET, MagicMock(return_value=process)): act_result = api.retry_node(self.node_id) self.assertFalse(act_result.result) self.assertEqual( act_result.message, 'the node is set to not be retried, try skip it please.') # with parallel gateway pg = ParallelGateway(id=self.node_id, converge_gateway_id=uniqid()) setattr(pg, 'can_retry', False) top_pipeline = PipelineObject(nodes={self.node_id: pg}) process = MockPipelineProcess(top_pipeline=top_pipeline) with patch(PIPELINE_PROCESS_GET, MagicMock(return_value=process)): act_result = api.retry_node(self.node_id) self.assertFalse(act_result.result) self.assertEqual( act_result.message, 'the node is set to not be retried, try skip it please.')
def test_next(self): cvg_gateway = ConvergeGateway('1') parallel_gateway = ParallelGateway('2', 'cvg') out_flow = SequenceFlow('flow', cvg_gateway, parallel_gateway) cvg_gateway.outgoing.add_flow(out_flow) parallel_gateway.incoming.add_flow(out_flow) self.assertEqual(parallel_gateway, cvg_gateway.next())
def setUp(self): self.empty_start_event = EmptyStartEvent(id='1') self.empty_end_event = EmptyEndEvent(id='2') self.service_activity = ServiceActivity(id='3', service=None) self.subprocess = SubProcess(id='4', pipeline=MagicMock()) self.exclusive_gateway = ExclusiveGateway(id='5') self.parallel_gateway = ParallelGateway(id='6', converge_gateway_id=None) self.conditional_parallel_gateway = ConditionalParallelGateway(id='7', converge_gateway_id=None) self.converge_gateway = ConvergeGateway(id='8') self.executable_end_event_1 = CustomEndEventOne(id='9') self.executable_end_event_2 = CustomEndEventTwo(id='9')
def setUp(self): ex_gateway1 = ExclusiveGateway(id='1') next_node1 = ParallelGateway(id='1', converge_gateway_id='cvg') next_node2 = ParallelGateway(id='2', converge_gateway_id='cvg') flow1 = SequenceFlow('flow1', ex_gateway1, next_node1) flow2 = SequenceFlow('flow2', ex_gateway1, next_node2) condition1 = Condition('a == 1', flow1) condition2 = Condition('a != 1', flow2) ex_gateway1.add_condition(condition1) ex_gateway1.add_condition(condition2) ex_gateway1.outgoing.add_flow(flow1) ex_gateway1.outgoing.add_flow(flow2) next_node1.incoming.add_flow(flow1) next_node2.incoming.add_flow(flow2) self.gateway_for_test_determine = ex_gateway1 ex_gateway2 = ExclusiveGateway(id='2') next_node3 = ParallelGateway(id='3', converge_gateway_id='cvg') next_node4 = ParallelGateway(id='4', converge_gateway_id='cvg') next_node5 = ParallelGateway(id='5', converge_gateway_id='cvg') flow3 = SequenceFlow('flow3', ex_gateway2, next_node3) flow4 = SequenceFlow('flow4', ex_gateway2, next_node4) flow5 = SequenceFlow('flow5', ex_gateway2, next_node5, is_default=True) condition3 = Condition('a == 1', flow3) condition4 = Condition('a != 1', flow4) ex_gateway2.add_condition(condition3) ex_gateway2.add_condition(condition4) ex_gateway2.outgoing.add_flow(flow3) ex_gateway2.outgoing.add_flow(flow4) ex_gateway2.outgoing.add_flow(flow5) next_node3.incoming.add_flow(flow3) next_node4.incoming.add_flow(flow4) next_node5.incoming.add_flow(flow5) self.gateway_for_test_next = ex_gateway2
def get_empty_parallel_gateway_test_pipeline(): start_event_a_id = node_uniqid() gateway_b_id = node_uniqid() gateway_h_id = node_uniqid() end_event_i_id = node_uniqid() start_event_a = EmptyStartEvent(start_event_a_id) gateway_b = ParallelGateway(gateway_b_id, gateway_h_id) gateway_h = ConvergeGateway(gateway_h_id) end_event_i = EmptyEndEvent(end_event_i_id) flow_ab = SequenceFlow('ab', start_event_a, gateway_b) flow_bc = SequenceFlow('bc', gateway_b, gateway_h) flow_bd = SequenceFlow('bd', gateway_b, gateway_h) flow_be = SequenceFlow('be', gateway_b, gateway_h) flow_hi = SequenceFlow('hi', gateway_h, end_event_i) start_event_a.outgoing.add_flow(flow_ab) gateway_b.incoming.add_flow(flow_ab) gateway_b.outgoing.add_flow(flow_bc) gateway_b.outgoing.add_flow(flow_bd) gateway_b.outgoing.add_flow(flow_be) gateway_h.incoming.add_flow(flow_bc) gateway_h.incoming.add_flow(flow_bd) gateway_h.incoming.add_flow(flow_be) gateway_h.outgoing.add_flow(flow_hi) end_event_i.incoming.add_flow(flow_hi) spec = PipelineSpec(start_event_a, end_event_i, [flow_ab, flow_bc, flow_bd, flow_be], [], [gateway_b, gateway_h], data=DataObject({}), context=context.Context(act_outputs={}, output_key=[])) return Pipeline(node_uniqid(), spec)
def test_retry_node__with_node_can_not_retry(self): # with service activity top_pipeline = PipelineObject( nodes={ self.node_id: ServiceActivity(id=self.node_id, service=None, retryable=False) }) process = MockPipelineProcess(top_pipeline=top_pipeline) with patch(PIPELINE_PROCESS_GET, MagicMock(return_value=process)): act_result = api.retry_node(self.node_id) self.assertFalse(act_result.result) # with parallel gateway pg = ParallelGateway(id=self.node_id, converge_gateway_id=uniqid()) setattr(pg, 'retryable', False) top_pipeline = PipelineObject(nodes={self.node_id: pg}) process = MockPipelineProcess(top_pipeline=top_pipeline) with patch(PIPELINE_PROCESS_GET, MagicMock(return_value=process)): act_result = api.retry_node(self.node_id) self.assertFalse(act_result.result)
def test_parallel_gateway(self): gw_id = '1' pl_gateway = ParallelGateway(gw_id, 'cvg') self.assertTrue(isinstance(pl_gateway, FlowNode)) self.assertTrue(isinstance(pl_gateway, Gateway))
def test_next(self): gw_id = '1' pl_gateway = ParallelGateway(gw_id, None, None) self.assertRaises(InvalidOperationException, pl_gateway.next)
def get_parallel_gateway_test_pipeline(): start_event_a_id = node_uniqid() gateway_b_id = node_uniqid() act_c_id = node_uniqid() act_d_id = node_uniqid() act_e_id = node_uniqid() act_f_id = node_uniqid() act_g_id = node_uniqid() gateway_h_id = node_uniqid() end_event_i_id = node_uniqid() start_event_a = EmptyStartEvent(start_event_a_id) gateway_b = ParallelGateway(gateway_b_id, gateway_h_id) act_c = ServiceActivity(act_c_id, service=SleepService(), data=DataObject({'data': '1'})) act_d = ServiceActivity(act_d_id, service=SleepService(), data=DataObject({'data': '1'})) act_e = ServiceActivity(act_e_id, service=SleepService(), data=DataObject({'data': '1'})) act_f = ServiceActivity(act_f_id, service=SleepService(), data=DataObject({'node_1': 'd'})) act_g = ServiceActivity(act_g_id, service=SleepService(), data=DataObject({'node_2': 'd'})) gateway_h = ConvergeGateway(gateway_h_id) end_event_i = EmptyEndEvent(end_event_i_id) flow_ab = SequenceFlow('ab', start_event_a, gateway_b) flow_bc = SequenceFlow('bc', gateway_b, act_c) flow_bd = SequenceFlow('bd', gateway_b, act_d) flow_be = SequenceFlow('be', gateway_b, act_e) flow_cf = SequenceFlow('cf', act_c, act_f) flow_dg = SequenceFlow('dg', act_d, act_g) flow_fh = SequenceFlow('fh', act_f, gateway_h) flow_gh = SequenceFlow('gh', act_g, gateway_h) flow_eh = SequenceFlow('eh', act_e, gateway_h) flow_hi = SequenceFlow('hi', gateway_h, end_event_i) start_event_a.outgoing.add_flow(flow_ab) gateway_b.incoming.add_flow(flow_ab) gateway_b.outgoing.add_flow(flow_bc) gateway_b.outgoing.add_flow(flow_bd) gateway_b.outgoing.add_flow(flow_be) act_c.incoming.add_flow(flow_bc) act_d.incoming.add_flow(flow_bd) act_e.incoming.add_flow(flow_be) act_c.outgoing.add_flow(flow_cf) act_d.outgoing.add_flow(flow_dg) act_e.outgoing.add_flow(flow_eh) act_f.incoming.add_flow(flow_cf) act_g.incoming.add_flow(flow_dg) act_f.outgoing.add_flow(flow_fh) act_g.outgoing.add_flow(flow_gh) gateway_h.incoming.add_flow(flow_fh) gateway_h.incoming.add_flow(flow_gh) gateway_h.incoming.add_flow(flow_eh) gateway_h.outgoing.add_flow(flow_hi) end_event_i.incoming.add_flow(flow_hi) spec = PipelineSpec( start_event_a, end_event_i, [ flow_ab, flow_bc, flow_bd, flow_be, flow_cf, flow_dg, flow_fh, flow_gh, flow_eh ], [act_c, act_d, act_e, act_f, act_g], [gateway_b, gateway_h], data=DataObject({}), context=context.Context(act_outputs={ act_f_id: { 'node_1': 'node_1_heihei' }, act_d_id: { 'data': 'data_haha' } }, output_key=['node_1_heihei', 'data_haha'])) return Pipeline(node_uniqid(), spec)