def test_sequence_flow(self): flow_id = '1' source = ServiceActivity(id='1', service=None, data=DataObject({})) target = ServiceActivity(id='2', service=None, data=DataObject({})) flow = SequenceFlow(flow_id, source, target) self.assertTrue(isinstance(flow, FlowElement)) self.assertEqual(flow_id, flow.id) self.assertEqual(source, flow.source) self.assertEqual(target, flow.target) self.assertEqual(False, flow.is_default)
def test_forced_fail__success(self): node = ServiceActivity(id=self.node_id, service=None) setattr(node, 'failure_handler', MagicMock()) top_pipeline = PipelineObject(nodes={self.node_id: node}) process = MockPipelineProcess(top_pipeline=top_pipeline) status = MockStatus() old_version = status.version kill = True ex_data = 'ex_data' with patch(PIPELINE_STATUS_GET, MagicMock(return_value=status)): with patch(PIPELINE_PROCESS_GET, MagicMock(return_value=process)): act_result = api.forced_fail(self.node_id, kill, ex_data) self.assertTrue(act_result.result) node.failure_handler.assert_called_once_with( process.root_pipeline.data) ScheduleService.objects.delete_schedule.assert_called_once_with( status.id, old_version) Data.objects.forced_fail.assert_called_once_with( self.node_id, ex_data) ProcessCeleryTask.objects.revoke.assert_called_once_with( process.id, kill) process.sleep.assert_called_once_with(adjust_status=True) self.assertNotEqual(old_version, status.version) status.save.assert_called_once()
def subprocess(): start_event_a_id = node_uniqid() act_b_id = node_uniqid() end_event_c_id = node_uniqid() start_event = EmptyStartEvent(start_event_a_id) act = ServiceActivity(act_b_id, service=RetryTestService(), data=DataObject({'data': '0'})) end_event = EmptyEndEvent(end_event_c_id) flow_ab = SequenceFlow('ab', start_event, act) flow_bc = SequenceFlow('bc', act, end_event) start_event.outgoing.add_flow(flow_ab) act.incoming.add_flow(flow_ab) act.outgoing.add_flow(flow_bc) end_event.incoming.add_flow(flow_bc) spec = PipelineSpec(start_event, end_event, [flow_ab, flow_bc], [act], [], data=DataObject({}), context=context.Context({})) pipeline = Pipeline(node_uniqid(), spec) return pipeline
def test_retry_node__with_node_can_not_retry(self): # with service activity top_pipeline = PipelineObject( nodes={ self.node_id: ServiceActivity(id=self.node_id, service=None, can_retry=False) }) process = MockPipelineProcess(top_pipeline=top_pipeline) with patch(PIPELINE_PROCESS_GET, MagicMock(return_value=process)): act_result = api.retry_node(self.node_id) self.assertFalse(act_result.result) self.assertEqual( act_result.message, 'the node is set to not be retried, try skip it please.') # with parallel gateway pg = ParallelGateway(id=self.node_id, converge_gateway_id=uniqid()) setattr(pg, 'can_retry', False) top_pipeline = PipelineObject(nodes={self.node_id: pg}) process = MockPipelineProcess(top_pipeline=top_pipeline) with patch(PIPELINE_PROCESS_GET, MagicMock(return_value=process)): act_result = api.retry_node(self.node_id) self.assertFalse(act_result.result) self.assertEqual( act_result.message, 'the node is set to not be retried, try skip it please.')
def test_simple_schedule_pipeline(): start_event_a_id = node_uniqid() act_b_id = node_uniqid() end_event_c_id = node_uniqid() start_event = EmptyStartEvent(start_event_a_id) act = ServiceActivity(act_b_id, service=ScheduleService(), data=DataObject({})) end_event = EmptyEndEvent(end_event_c_id) flow_ab = SequenceFlow('ab', start_event, act) flow_bc = SequenceFlow('bc', act, end_event) start_event.outgoing.add_flow(flow_ab) act.incoming.add_flow(flow_ab) act.outgoing.add_flow(flow_bc) end_event.incoming.add_flow(flow_bc) spec = PipelineSpec(start_event, end_event, [flow_ab, flow_bc], [act], [], data=DataObject({}), context=context.Context({})) pipeline = Pipeline(node_uniqid(), spec) run_pipeline(pipeline)
def get_simple_pipeline(): start_event_a_id = node_uniqid() act_b_id = node_uniqid() end_event_c_id = node_uniqid() start_event = EmptyStartEvent(start_event_a_id) act = ServiceActivity(id=act_b_id, service=RetryTestService(), data=DataObject({ 'data': '0', 'value2': '2', 'value3': '3', 'timing': 1000 })) end_event = EmptyEndEvent(end_event_c_id) flow_ab = SequenceFlow('ab', start_event, act) flow_bc = SequenceFlow('bc', act, end_event) start_event.outgoing.add_flow(flow_ab) act.incoming.add_flow(flow_ab) act.outgoing.add_flow(flow_bc) end_event.incoming.add_flow(flow_bc) spec = PipelineSpec(start_event, end_event, [flow_ab, flow_bc], [act], [], data=DataObject({}), context=context.Context({})) return Pipeline(node_uniqid(), spec)
def test_forced_fail__fail_with_transit_fail(self): top_pipeline = PipelineObject(nodes={self.node_id: ServiceActivity(id=self.node_id, service=None)}) process = MockPipelineProcess(top_pipeline=top_pipeline) with patch(PIPELINE_PROCESS_GET, MagicMock(return_value=process)): act_result = api.forced_fail(self.node_id) self.assertFalse(act_result.result) self.assertEqual(act_result.message, 'transit fail')
def test_skip_node__fail_with_skip_fail(self): node = ServiceActivity(id=self.node_id, service=None) top_pipeline = PipelineObject(nodes={self.node_id: node}) process = MockPipelineProcess(top_pipeline=top_pipeline) with patch(PIPELINE_PROCESS_GET, MagicMock(return_value=process)): act_result = api.skip_node(self.node_id) Status.objects.skip.assert_called_once_with(process, node) self.assertFalse(act_result.result)
def test_skip_node__fail_with_node_can_not_skip(self): top_pipeline = PipelineObject(nodes={self.node_id: ServiceActivity(id=self.node_id, service=None, skippable=False)}) process = MockPipelineProcess(top_pipeline=top_pipeline) with patch(PIPELINE_PROCESS_GET, MagicMock(return_value=process)): act_result = api.skip_node(self.node_id) self.assertFalse(act_result.result) self.assertEqual(act_result.message, 'this node can not be skipped')
def setUp(self): self.empty_start_event = EmptyStartEvent(id='1') self.empty_end_event = EmptyEndEvent(id='2') self.service_activity = ServiceActivity(id='3', service=None) self.subprocess = SubProcess(id='4', pipeline=MagicMock()) self.exclusive_gateway = ExclusiveGateway(id='5') self.parallel_gateway = ParallelGateway(id='6', converge_gateway_id=None) self.conditional_parallel_gateway = ConditionalParallelGateway(id='7', converge_gateway_id=None) self.converge_gateway = ConvergeGateway(id='8') self.executable_end_event_1 = CustomEndEventOne(id='9') self.executable_end_event_2 = CustomEndEventTwo(id='9')
def test_retry_node__success(self): node = ServiceActivity(id=self.node_id, service=None) top_pipeline = PipelineObject(nodes={self.node_id: node}) process = MockPipelineProcess(top_pipeline=top_pipeline) retry_inputs = {'id': self.node_id} with patch(PIPELINE_PROCESS_GET, MagicMock(return_value=process)): act_result = api.retry_node(self.node_id, inputs=retry_inputs) self.assertTrue(act_result.result) Status.objects.retry.assert_called_once_with( process, node, retry_inputs) PipelineProcess.objects.process_ready.assert_called_once_with( process_id=process.id)
def test_node(self): start_event = EmptyStartEvent(id='a') act = ServiceActivity(id='b', service=None) end_event = EmptyEndEvent(id='c') flow_ab = SequenceFlow('ab', start_event, act) flow_bc = SequenceFlow('bc', act, end_event) start_event.outgoing.add_flow(flow_ab) act.incoming.add_flow(flow_ab) act.outgoing.add_flow(flow_bc) end_event.incoming.add_flow(flow_bc) spec = PipelineSpec(start_event, end_event, [flow_ab, flow_bc], [act], [], None, None) pipeline = Pipeline('pipeline', spec) self.assertEqual(act, pipeline.node('b'))
def test_skip_node__success(self): node = ServiceActivity(id=self.node_id, service=None) mock_next = IdentifyObject() def _next(): return mock_next setattr(node, 'next', _next) top_pipeline = PipelineObject(nodes={self.node_id: node}, context=MockContext()) process = MockPipelineProcess(top_pipeline=top_pipeline) with patch(PIPELINE_PROCESS_GET, MagicMock(return_value=process)): act_result = api.skip_node(self.node_id) self.assertTrue(act_result.result) process.top_pipeline.context.extract_output.assert_called_once_with(node) process.save.assert_called_once() PipelineProcess.objects.process_ready(process_id=process.id, current_node_id=mock_next.id)
def test_retry_node__with_node_can_not_retry(self): # with service activity top_pipeline = PipelineObject( nodes={ self.node_id: ServiceActivity(id=self.node_id, service=None, retryable=False) }) process = MockPipelineProcess(top_pipeline=top_pipeline) with patch(PIPELINE_PROCESS_GET, MagicMock(return_value=process)): act_result = api.retry_node(self.node_id) self.assertFalse(act_result.result) # with parallel gateway pg = ParallelGateway(id=self.node_id, converge_gateway_id=uniqid()) setattr(pg, 'retryable', False) top_pipeline = PipelineObject(nodes={self.node_id: pg}) process = MockPipelineProcess(top_pipeline=top_pipeline) with patch(PIPELINE_PROCESS_GET, MagicMock(return_value=process)): act_result = api.retry_node(self.node_id) self.assertFalse(act_result.result)
def get_parallel_gateway_test_pipeline(): start_event_a_id = node_uniqid() gateway_b_id = node_uniqid() act_c_id = node_uniqid() act_d_id = node_uniqid() act_e_id = node_uniqid() act_f_id = node_uniqid() act_g_id = node_uniqid() gateway_h_id = node_uniqid() end_event_i_id = node_uniqid() start_event_a = EmptyStartEvent(start_event_a_id) gateway_b = ParallelGateway(gateway_b_id, gateway_h_id) act_c = ServiceActivity(act_c_id, service=SleepService(), data=DataObject({'data': '1'})) act_d = ServiceActivity(act_d_id, service=SleepService(), data=DataObject({'data': '1'})) act_e = ServiceActivity(act_e_id, service=SleepService(), data=DataObject({'data': '1'})) act_f = ServiceActivity(act_f_id, service=SleepService(), data=DataObject({'node_1': 'd'})) act_g = ServiceActivity(act_g_id, service=SleepService(), data=DataObject({'node_2': 'd'})) gateway_h = ConvergeGateway(gateway_h_id) end_event_i = EmptyEndEvent(end_event_i_id) flow_ab = SequenceFlow('ab', start_event_a, gateway_b) flow_bc = SequenceFlow('bc', gateway_b, act_c) flow_bd = SequenceFlow('bd', gateway_b, act_d) flow_be = SequenceFlow('be', gateway_b, act_e) flow_cf = SequenceFlow('cf', act_c, act_f) flow_dg = SequenceFlow('dg', act_d, act_g) flow_fh = SequenceFlow('fh', act_f, gateway_h) flow_gh = SequenceFlow('gh', act_g, gateway_h) flow_eh = SequenceFlow('eh', act_e, gateway_h) flow_hi = SequenceFlow('hi', gateway_h, end_event_i) start_event_a.outgoing.add_flow(flow_ab) gateway_b.incoming.add_flow(flow_ab) gateway_b.outgoing.add_flow(flow_bc) gateway_b.outgoing.add_flow(flow_bd) gateway_b.outgoing.add_flow(flow_be) act_c.incoming.add_flow(flow_bc) act_d.incoming.add_flow(flow_bd) act_e.incoming.add_flow(flow_be) act_c.outgoing.add_flow(flow_cf) act_d.outgoing.add_flow(flow_dg) act_e.outgoing.add_flow(flow_eh) act_f.incoming.add_flow(flow_cf) act_g.incoming.add_flow(flow_dg) act_f.outgoing.add_flow(flow_fh) act_g.outgoing.add_flow(flow_gh) gateway_h.incoming.add_flow(flow_fh) gateway_h.incoming.add_flow(flow_gh) gateway_h.incoming.add_flow(flow_eh) gateway_h.outgoing.add_flow(flow_hi) end_event_i.incoming.add_flow(flow_hi) spec = PipelineSpec( start_event_a, end_event_i, [ flow_ab, flow_bc, flow_bd, flow_be, flow_cf, flow_dg, flow_fh, flow_gh, flow_eh ], [act_c, act_d, act_e, act_f, act_g], [gateway_b, gateway_h], data=DataObject({}), context=context.Context(act_outputs={ act_f_id: { 'node_1': 'node_1_heihei' }, act_d_id: { 'data': 'data_haha' } }, output_key=['node_1_heihei', 'data_haha'])) return Pipeline(node_uniqid(), spec)
def get_exclusive_gateway_pipeline(result): start_event_a_id = node_uniqid() act_b_id = node_uniqid() gateway_c_id = node_uniqid() act_d_id = node_uniqid() act_e_id = node_uniqid() act_f_id = node_uniqid() gateway_g_id = node_uniqid() end_event_h_id = node_uniqid() start_event_a = EmptyStartEvent(start_event_a_id) b_act = ServiceActivity(act_b_id, service=EchoService(), data=DataObject({'a': 3})) c_gateway = ExclusiveGateway(gateway_c_id, gateway_g_id) d_act = ServiceActivity(act_d_id, service=LogService(), data=DataObject({'node': 'd'})) e_act = ServiceActivity(act_e_id, service=EchoService(), data=DataObject({'node': 'e'})) f_act = ServiceActivity(act_f_id, service=TestService(), data=DataObject({'node': 'f'})) g_gateway = ConvergeGateway(gateway_g_id) end_event_h = EmptyEndEvent(end_event_h_id) ab_flow = SequenceFlow('ab', start_event_a, b_act) bc_flow = SequenceFlow('bc', b_act, c_gateway) cd_flow = SequenceFlow('cd', c_gateway, d_act) ce_flow = SequenceFlow('ce', c_gateway, e_act) cf_flow = SequenceFlow('cf', c_gateway, f_act) dg_flow = SequenceFlow('dg', d_act, g_gateway) eg_flow = SequenceFlow('eg', e_act, g_gateway) fg_flow = SequenceFlow('fg', f_act, g_gateway) gh_flow = SequenceFlow('gh', g_gateway, end_event_h) start_event_a.outgoing.add_flow(ab_flow) b_act.incoming.add_flow(ab_flow) b_act.outgoing.add_flow(bc_flow) c_gateway.incoming.add_flow(bc_flow) c_gateway.outgoing.add_flow(cd_flow) c_gateway.outgoing.add_flow(ce_flow) c_gateway.outgoing.add_flow(cf_flow) d_act.incoming.add_flow(cd_flow) d_act.outgoing.add_flow(dg_flow) e_act.incoming.add_flow(ce_flow) e_act.outgoing.add_flow(eg_flow) f_act.incoming.add_flow(cf_flow) f_act.outgoing.add_flow(fg_flow) g_gateway.incoming.add_flow(dg_flow) g_gateway.incoming.add_flow(eg_flow) g_gateway.incoming.add_flow(fg_flow) g_gateway.outgoing.add_flow(gh_flow) end_event_h.incoming.add_flow(gh_flow) c_gateway.add_condition(Condition('result == 1', cd_flow)) c_gateway.add_condition(Condition('result == 2', ce_flow)) c_gateway.add_condition(Condition('result == 3', cf_flow)) spec = PipelineSpec(start_event_a, end_event_h, [ ab_flow, bc_flow, cd_flow, ce_flow, cf_flow, dg_flow, eg_flow, fg_flow, gh_flow ], [b_act, d_act, e_act, f_act], [c_gateway, g_gateway], data=DataObject({}), context=context.Context(act_outputs={}, scope={'result': result})) return Pipeline(node_uniqid(), spec)