def workflow_processor(self, workflow_dict): workflow_dict['id'] = self.request.id wf = Workflow.from_dict(workflow_dict) still_running = wf.tick() if still_running: # we can retry infinitely due to processing limit timestamp self.request.retries -= 1 self.retry( kwargs=dict(workflow_dict=wf.to_dict()), countdown=wf.get_retry_countdown(), )
def test_from_dict_with_custom_payload(self): wf_dict = { 'finished': { '1': False }, 'running': { '2': True }, 'nodes': { 'some': 'data', 'some2': 'data2' }, 'processing_limit_ts': 5000, 'version': 1, 'retry_policy': ['random', 10, 30], 'stats': { 'last_apply_async_tick': 0, 'ticks': 25 }, 'id': None, 'state': 'RUNNING', 'custom_payload': { 'foo': 'bar' } } with mock.patch.object(WorkflowNode, 'from_dict') as mck: mck.return_value = 'some_result' wf = Workflow.from_dict(wf_dict) assert wf.state == 'RUNNING' assert wf.running == {'2': True} assert wf.finished == {'1': False} assert wf.version == 1 assert wf.nodes == { 'some': 'some_result', 'some2': 'some_result', } assert wf.stats == { 'last_apply_async_tick': 0, 'ticks': 25, 'consecutive_celery_error_ticks': 0, } assert wf.processing_limit_ts == 5000 assert wf.custom_payload == {'foo': 'bar'} # order of calls may not be preserved in older versions of Python # as dict order was non-deterministic < 3.7 mck.assert_has_calls([ mock.call('data'), mock.call('data2'), ], any_order=True)
def test_from_dict(self): wf_dict = { 'finished': { '1': False }, 'running': { '2': True }, 'nodes': { 'some': 'data', 'some2': 'data2' }, 'processing_limit_ts': 5000, 'version': 1, 'retry_policy': ['random', 10, 30], 'stats': { 'last_apply_async_tick': 0, 'ticks': 25 }, 'id': None, 'state': 'RUNNING', } with mock.patch.object(WorkflowNode, 'from_dict') as mck: mck.return_value = 'some_result' wf = Workflow.from_dict(wf_dict) assert wf.state == 'RUNNING' assert wf.running == {'2': True} assert wf.finished == {'1': False} assert wf.version == 1 assert wf.nodes == { 'some': 'some_result', 'some2': 'some_result', } assert wf.stats == { 'last_apply_async_tick': 0, 'ticks': 25, 'consecutive_celery_error_ticks': 0, } assert wf.processing_limit_ts == 5000 assert wf.custom_payload == {} mck.assert_has_calls([ mock.call('data'), mock.call('data2'), ])
def test_to_from_dict(self, some_sigs): wf = Workflow() chain1 = celery_canvas.chain(some_sigs[:3]) wf.add_celery_canvas(chain1) wf_dict = wf.to_dict() wf2 = Workflow.from_dict(wf_dict) assert wf.nodes.keys() == wf2.nodes.keys() # use build_exec_asserts helper to rebuild asserts # build_exec_asserts(wf) assert wf.simulate_tick() assert wf.running == {'task-0': True} # noqa assert wf.simulate_tick() assert wf.running == {'task-1': True} # noqa assert wf.simulate_tick() assert wf.running == {'task-2': True} # noqa assert not wf.simulate_tick()