def test_combined_store(self): components = self.make_components() components.conductor.connect() consumed_event = threading.Event() def on_consume(state, details): consumed_event.set() flow_store = {'x': True, 'y': False} job_store = {'z': None} components.board.notifier.register(base.REMOVAL, on_consume) with close_many(components.conductor, components.client): t = threading_utils.daemon_thread(components.conductor.run) t.start() lb, fd = pu.temporary_flow_detail(components.persistence, meta={'store': flow_store}) engines.save_factory_details(fd, test_store_factory, [], {}, backend=components.persistence) components.board.post('poke', lb, details={'flow_uuid': fd.uuid, 'store': job_store}) self.assertTrue(consumed_event.wait(test_utils.WAIT_TIMEOUT)) components.conductor.stop() self.assertTrue(components.conductor.wait(test_utils.WAIT_TIMEOUT)) self.assertFalse(components.conductor.dispatching) persistence = components.persistence with contextlib.closing(persistence.get_connection()) as conn: lb = conn.get_logbook(lb.uuid) fd = lb.find(fd.uuid) self.assertIsNotNone(fd) self.assertEqual(st.SUCCESS, fd.state)
def test_get_flow_state(self): _lb, fd = p_utils.temporary_flow_detail(backend=self.backend) fd.state = states.FAILURE with contextlib.closing(self.backend.get_connection()) as conn: fd.update(conn.update_flow_details(fd)) s = storage.Storage(flow_detail=fd, backend=self.backend) self.assertEquals(s.get_flow_state(), states.FAILURE)
def test_get_flow_state(self): _lb, flow_detail = p_utils.temporary_flow_detail(backend=self.backend) flow_detail.state = states.FAILURE with contextlib.closing(self.backend.get_connection()) as conn: flow_detail.update(conn.update_flow_details(flow_detail)) s = self._get_storage(flow_detail) self.assertEqual(states.FAILURE, s.get_flow_state())
def _get_storage(self, flow_detail=None, threaded=False): if flow_detail is None: _lb, flow_detail = p_utils.temporary_flow_detail(self.backend) storage_cls = storage.SingleThreadedStorage if threaded: storage_cls = storage.MultiThreadedStorage return storage_cls(flow_detail=flow_detail, backend=self.backend)
def test_run_max_dispatches(self): components = self.make_components() components.conductor.connect() consumed_event = threading.Event() def on_consume(state, details): consumed_event.set() components.board.notifier.register(base.REMOVAL, on_consume) with close_many(components.client, components.conductor): t = threading_utils.daemon_thread( lambda: components.conductor.run(max_dispatches=5)) t.start() lb, fd = pu.temporary_flow_detail(components.persistence) engines.save_factory_details(fd, test_factory, [False], {}, backend=components.persistence) for _ in range(5): components.board.post('poke', lb, details={'flow_uuid': fd.uuid}) self.assertTrue(consumed_event.wait(test_utils.WAIT_TIMEOUT)) components.board.post('poke', lb, details={'flow_uuid': fd.uuid}) components.conductor.stop() self.assertTrue(components.conductor.wait(test_utils.WAIT_TIMEOUT)) self.assertFalse(components.conductor.dispatching)
def test_no_meta(self): _lb, flow_detail = p_utils.temporary_flow_detail() self.assertEqual({}, flow_detail.meta) self.assertRaisesRegexp(ValueError, '^Cannot .* no factory information saved.$', taskflow.engines.flow_from_detail, flow_detail)
def test_posting_with_book(self): backend = impl_dir.DirBackend(conf={ 'path': self.makeTmpDir(), }) backend.get_connection().upgrade() book, flow_detail = p_utils.temporary_flow_detail(backend) self.assertEqual(1, len(book)) client, board = create_board(persistence=backend) self.addCleanup(board.close) with connect_close(board): board.post('test', book) client.flush() possible_jobs = list(board.iterjobs(only_unclaimed=True)) self.assertEqual(1, len(possible_jobs)) j = possible_jobs[0] self.assertEqual(1, len(j.book)) self.assertEqual(book.name, j.book.name) self.assertEqual(book.uuid, j.book.uuid) flow_details = list(j.book) self.assertEqual(flow_detail.uuid, flow_details[0].uuid) self.assertEqual(flow_detail.name, flow_details[0].name)
def test_posting_with_book(self): backend = impl_dir.DirBackend(conf={ 'path': self.makeTmpDir(), }) backend.get_connection().upgrade() book, flow_detail = p_utils.temporary_flow_detail(backend) self.assertEqual(1, len(book)) client, board = self.create_board(persistence=backend) with connect_close(board): with self.flush(client): board.post('test', book) possible_jobs = list(board.iterjobs(only_unclaimed=True)) self.assertEqual(1, len(possible_jobs)) j = possible_jobs[0] self.assertEqual(1, len(j.book)) self.assertEqual(book.name, j.book.name) self.assertEqual(book.uuid, j.book.uuid) self.assertEqual(book.name, j.book_name) self.assertEqual(book.uuid, j.book_uuid) flow_details = list(j.book) self.assertEqual(flow_detail.uuid, flow_details[0].uuid) self.assertEqual(flow_detail.name, flow_details[0].name)
def test_get_without_save(self): _lb, flow_detail = p_utils.temporary_flow_detail(self.backend) td = logbook.TaskDetail(name='my_task', uuid='42') flow_detail.add(td) s = storage.Storage(backend=self.backend, flow_detail=flow_detail) self.assertEquals('42', s.get_task_uuid('my_task'))
def test_ensure_existing_task(self): _lb, flow_detail = p_utils.temporary_flow_detail(self.backend) td = models.TaskDetail(name='my_task', uuid='42') flow_detail.add(td) s = self._get_storage(flow_detail) s.ensure_atom(test_utils.NoopTask('my_task')) self.assertEqual('42', s.get_atom_uuid('my_task'))
def test_sequential_flow_iter_suspend_resume(self): flow = lf.Flow('flow-2').add(utils.SaveOrderTask(name='task1'), utils.SaveOrderTask(name='task2')) _lb, fd = p_utils.temporary_flow_detail(self.backend) e = self._make_engine(flow, flow_detail=fd) it = e.run_iter() gathered_states = [] suspend_it = None while True: try: s = it.send(suspend_it) gathered_states.append(s) if s == states.WAITING: # Stop it before task2 runs/starts. suspend_it = True except StopIteration: break self.assertTrue(len(gathered_states) > 0) self.assertEqual(self.values, ['task1']) self.assertEqual(states.SUSPENDED, e.storage.get_flow_state()) # Attempt to resume it and see what runs now... # # NOTE(harlowja): Clear all the values, but don't reset the reference. while len(self.values): self.values.pop() gathered_states = list(e.run_iter()) self.assertTrue(len(gathered_states) > 0) self.assertEqual(self.values, ['task2']) self.assertEqual(states.SUCCESS, e.storage.get_flow_state())
def test_run_max_dispatches(self): components = self.make_components() components.conductor.connect() consumed_event = threading.Event() def on_consume(state, details): consumed_event.set() components.board.notifier.register(base.REMOVAL, on_consume) with close_many(components.client, components.conductor): t = threading_utils.daemon_thread( lambda: components.conductor.run(max_dispatches=5)) t.start() lb, fd = pu.temporary_flow_detail(components.persistence) engines.save_factory_details(fd, test_factory, [False], {}, backend=components.persistence) for _ in range(5): components.board.post('poke', lb, details={'flow_uuid': fd.uuid}) self.assertTrue(consumed_event.wait( test_utils.WAIT_TIMEOUT)) components.board.post('poke', lb, details={'flow_uuid': fd.uuid}) components.conductor.stop() self.assertTrue(components.conductor.wait(test_utils.WAIT_TIMEOUT)) self.assertFalse(components.conductor.dispatching)
def test_sequential_flow_iter_suspend_resume(self): flow = lf.Flow('flow-2').add( utils.ProgressingTask(name='task1'), utils.ProgressingTask(name='task2') ) lb, fd = p_utils.temporary_flow_detail(self.backend) engine = self._make_engine(flow, flow_detail=fd) with utils.CaptureListener(engine, capture_flow=False) as capturer: it = engine.run_iter() gathered_states = [] suspend_it = None while True: try: s = it.send(suspend_it) gathered_states.append(s) if s == states.WAITING: # Stop it before task2 runs/starts. suspend_it = True except StopIteration: break self.assertTrue(len(gathered_states) > 0) expected = ['task1.t RUNNING', 'task1.t SUCCESS(5)'] self.assertEqual(expected, capturer.values) self.assertEqual(states.SUSPENDED, engine.storage.get_flow_state()) # Attempt to resume it and see what runs now... with utils.CaptureListener(engine, capture_flow=False) as capturer: gathered_states = list(engine.run_iter()) self.assertTrue(len(gathered_states) > 0) expected = ['task2.t RUNNING', 'task2.t SUCCESS(5)'] self.assertEqual(expected, capturer.values) self.assertEqual(states.SUCCESS, engine.storage.get_flow_state())
def test_ensure_existing_task(self): _lb, flow_detail = p_utils.temporary_flow_detail(self.backend) td = logbook.TaskDetail(name="my_task", uuid="42") flow_detail.add(td) s = self._get_storage(flow_detail) s.ensure_atom(test_utils.NoopTask("my_task")) self.assertEqual("42", s.get_atom_uuid("my_task"))
def test_run(self): components = self.make_components() components.conductor.connect() consumed_event = threading.Event() def on_consume(state, details): consumed_event.set() components.board.notifier.register(base.REMOVAL, on_consume) with close_many(components.conductor, components.client): t = threading_utils.daemon_thread(components.conductor.run) t.start() lb, fd = pu.temporary_flow_detail(components.persistence) engines.save_factory_details(fd, test_factory, [False], {}, backend=components.persistence) components.board.post('poke', lb, details={'flow_uuid': fd.uuid}) self.assertTrue(consumed_event.wait(test_utils.WAIT_TIMEOUT)) components.conductor.stop() self.assertTrue(components.conductor.wait(test_utils.WAIT_TIMEOUT)) self.assertFalse(components.conductor.dispatching) persistence = components.persistence with contextlib.closing(persistence.get_connection()) as conn: lb = conn.get_logbook(lb.uuid) fd = lb.find(fd.uuid) self.assertIsNotNone(fd) self.assertEqual(st.SUCCESS, fd.state)
def test_sequential_flow_iter_suspend_resume(self): flow = lf.Flow('flow-2').add( utils.SaveOrderTask(name='task1'), utils.SaveOrderTask(name='task2') ) _lb, fd = p_utils.temporary_flow_detail(self.backend) e = self._make_engine(flow, flow_detail=fd) it = e.run_iter() gathered_states = [] suspend_it = None while True: try: s = it.send(suspend_it) gathered_states.append(s) if s == states.WAITING: # Stop it before task2 runs/starts. suspend_it = True except StopIteration: break self.assertTrue(len(gathered_states) > 0) self.assertEqual(self.values, ['task1']) self.assertEqual(states.SUSPENDED, e.storage.get_flow_state()) # Attempt to resume it and see what runs now... # # NOTE(harlowja): Clear all the values, but don't reset the reference. while len(self.values): self.values.pop() gathered_states = list(e.run_iter()) self.assertTrue(len(gathered_states) > 0) self.assertEqual(self.values, ['task2']) self.assertEqual(states.SUCCESS, e.storage.get_flow_state())
def test_fail_run(self): components = self.make_components() components.conductor.connect() consumed_event = threading_utils.Event() def on_consume(state, details): consumed_event.set() components.board.notifier.register(jobboard.REMOVAL, on_consume) with close_many(components.conductor, components.client): t = threading_utils.daemon_thread(components.conductor.run) t.start() lb, fd = pu.temporary_flow_detail(components.persistence) engines.save_factory_details(fd, test_factory, [True], {}, backend=components.persistence) components.board.post('poke', lb, details={'flow_uuid': fd.uuid}) self.assertTrue(consumed_event.wait(test_utils.WAIT_TIMEOUT)) self.assertTrue(components.conductor.stop(test_utils.WAIT_TIMEOUT)) self.assertFalse(components.conductor.dispatching) persistence = components.persistence with contextlib.closing(persistence.get_connection()) as conn: lb = conn.get_logbook(lb.uuid) fd = lb.find(fd.uuid) self.assertIsNotNone(fd) self.assertEqual(st.REVERTED, fd.state)
def test_no_importable_function(self): _lb, flow_detail = p_utils.temporary_flow_detail() flow_detail.meta = dict(factory=dict( name='you can not import me, i contain spaces' )) expected_msg = '^Could not import factory' with self.assertRaisesRegexp(ImportError, expected_msg): taskflow.engines.flow_from_detail(flow_detail)
def test_ensure_existing_task(self): _lb, flow_detail = p_utils.temporary_flow_detail(self.backend) td = logbook.TaskDetail(name='my_task', uuid='42') flow_detail.add(td) s = storage.Storage(backend=self.backend, flow_detail=flow_detail) s.ensure_task('my_task') self.assertEqual('42', s.get_task_uuid('my_task'))
def _get_storage(self, threaded=False): _lb, flow_detail = p_utils.temporary_flow_detail(self.backend) if threaded: return storage.MultiThreadedStorage(backend=self.backend, flow_detail=flow_detail) else: return storage.SingleThreadedStorage(backend=self.backend, flow_detail=flow_detail)
def test_transient_storage_restore(self): _lb, flow_detail = p_utils.temporary_flow_detail(self.backend) s = self._get_storage(flow_detail=flow_detail) s.inject([("a", "b")], transient=True) s.inject([("b", "c")]) s2 = self._get_storage(flow_detail=flow_detail) results = s2.fetch_all() self.assertEqual({"b": "c"}, results)
def test_ensure_task_fd(self): _lb, flow_detail = p_utils.temporary_flow_detail(self.backend) s = storage.Storage(backend=self.backend, flow_detail=flow_detail) s.ensure_task('my task', '3.11') td = flow_detail.find(s.get_task_uuid('my task')) self.assertIsNotNone(td) self.assertEqual(td.name, 'my task') self.assertEqual(td.version, '3.11') self.assertEqual(td.state, states.PENDING)
def test_ensure_task_flow_detail(self): _lb, flow_detail = p_utils.temporary_flow_detail(self.backend) s = self._get_storage(flow_detail) s.ensure_task("my task", "3.11") td = flow_detail.find(s.get_task_uuid("my task")) self.assertIsNotNone(td) self.assertEqual(td.name, "my task") self.assertEqual(td.version, "3.11") self.assertEqual(td.state, states.PENDING)
def test_ensure_task_flow_detail(self): _lb, flow_detail = p_utils.temporary_flow_detail(self.backend) s = self._get_storage(flow_detail) s.ensure_task('my task', '3.11') td = flow_detail.find(s.get_task_uuid('my task')) self.assertIsNotNone(td) self.assertEqual(td.name, 'my task') self.assertEqual(td.version, '3.11') self.assertEqual(td.state, states.PENDING)
def test_no_arg_factory(self): name = 'some.test.factory' _lb, flow_detail = p_utils.temporary_flow_detail() flow_detail.meta = dict(factory=dict(name=name)) with mock.patch('taskflow.openstack.common.importutils.import_class', return_value=lambda: 'RESULT') as mock_import: result = taskflow.engines.flow_from_detail(flow_detail) mock_import.assert_called_onec_with(name) self.assertEqual(result, 'RESULT')
def test_no_arg_factory(self): name = 'some.test.factory' _lb, flow_detail = p_utils.temporary_flow_detail() flow_detail.meta = dict(factory=dict(name=name)) with mock.patch('oslo_utils.importutils.import_class', return_value=lambda: 'RESULT') as mock_import: result = taskflow.engines.flow_from_detail(flow_detail) mock_import.assert_called_once_with(name) self.assertEqual('RESULT', result)
def test_deregister(self): """Verify that register and deregister don't blow up""" with contextlib.closing(impl_memory.MemoryBackend()) as be: flow = lf.Flow("test") flow.add(SleepyTask("test-1", sleep_for=0.1)) (lb, fd) = persistence_utils.temporary_flow_detail(be) e = self._make_engine(flow, fd, be) l = timing.DurationListener(e) l.register() l.deregister()
def test_factory_with_arg(self): name = 'some.test.factory' _lb, flow_detail = p_utils.temporary_flow_detail() flow_detail.meta = dict(factory=dict(name=name, args=['foo'])) with mock.patch('oslo_utils.importutils.import_class', return_value=lambda x: 'RESULT %s' % x) as mock_import: result = taskflow.engines.flow_from_detail(flow_detail) mock_import.assert_called_once_with(name) self.assertEqual('RESULT foo', result)
def test_add_task_fd(self): _lb, flow_detail = p_utils.temporary_flow_detail(self.backend) s = storage.Storage(backend=self.backend, flow_detail=flow_detail) s.add_task('42', 'my task', '3.11') td = flow_detail.find('42') self.assertIsNot(td, None) self.assertEquals(td.uuid, '42') self.assertEquals(td.name, 'my task') self.assertEquals(td.version, '3.11') self.assertEquals(td.state, states.PENDING)
def test_factory_with_arg(self): name = 'some.test.factory' _lb, flow_detail = p_utils.temporary_flow_detail() flow_detail.meta = dict(factory=dict(name=name, args=['foo'])) with mock.patch('taskflow.openstack.common.importutils.import_class', return_value=lambda x: 'RESULT %s' % x) as mock_import: result = taskflow.engines.flow_from_detail(flow_detail) mock_import.assert_called_onec_with(name) self.assertEqual(result, 'RESULT foo')
def test_ensure_task_flow_detail(self): _lb, flow_detail = p_utils.temporary_flow_detail(self.backend) s = self._get_storage(flow_detail) t = test_utils.NoopTask('my task') t.version = (3, 11) s.ensure_atom(t) td = flow_detail.find(s.get_atom_uuid('my task')) self.assertIsNotNone(td) self.assertEqual(td.name, 'my task') self.assertEqual(td.version, '3.11') self.assertEqual(td.state, states.PENDING)
def test_ensure_task_flow_detail(self): _lb, flow_detail = p_utils.temporary_flow_detail(self.backend) s = self._get_storage(flow_detail) t = test_utils.NoopTask('my task') t.version = (3, 11) s.ensure_atom(t) td = flow_detail.find(s.get_atom_uuid('my task')) self.assertIsNotNone(td) self.assertEqual('my task', td.name) self.assertEqual('3.11', td.version) self.assertEqual(states.PENDING, td.state)
def test_record_ending_exception(self, mocked_warn): with contextlib.closing(impl_memory.MemoryBackend()) as be: flow = lf.Flow("test") flow.add(test_utils.TaskNoRequiresNoReturns("test-1")) (lb, fd) = persistence_utils.temporary_flow_detail(be) e = self._make_engine(flow, fd, be) duration_listener = timing.DurationListener(e) with mock.patch.object(duration_listener._engine.storage, "update_atom_metadata") as mocked_uam: mocked_uam.side_effect = exc.StorageFailure("Woot!") with duration_listener: e.run() mocked_warn.assert_called_once_with(mock.ANY, mock.ANY, "test-1", exc_info=True)
def test_flow_duration(self): with contextlib.closing(impl_memory.MemoryBackend()) as be: flow = lf.Flow("test") flow.add(SleepyTask("test-1", sleep_for=0.1)) (lb, fd) = persistence_utils.temporary_flow_detail(be) e = self._make_engine(flow, fd, be) with timing.DurationListener(e): e.run() self.assertIsNotNone(fd) self.assertIsNotNone(fd.meta) self.assertIn('duration', fd.meta) self.assertGreaterEqual(0.1, fd.meta['duration'])
def test_storage_progress(self): with contextlib.closing(impl_memory.MemoryBackend({})) as be: flo = lf.Flow("test") flo.add(ProgressTask("test", 3)) b, fd = p_utils.temporary_flow_detail(be) e = self._make_engine(flo, flow_detail=fd, backend=be) e.run() end_progress = e.storage.get_task_progress("test") self.assertEqual(1.0, end_progress) task_uuid = e.storage.get_task_uuid("test") td = fd.find(task_uuid) self.assertEqual(1.0, td.meta['progress']) self.assertFalse(td.meta['progress_details'])
def test_creation_default(self): flow = lf.Flow('test-flow').add(utils.DummyTask()) _, flow_detail = pu.temporary_flow_detail() engine.WorkerBasedActionEngine(flow, flow_detail, None, {}).compile() expected_calls = [ mock.call.executor_class(uuid=flow_detail.uuid, url=None, exchange='default', workers_info={}, transport=None, transport_options=None) ] self.assertEqual(self.master_mock.mock_calls, expected_calls)
def test_duration(self): with contextlib.closing(impl_memory.MemoryBackend({})) as be: flo = lf.Flow("test") flo.add(SleepyTask("test-1", sleep_for=0.1)) (lb, fd) = p_utils.temporary_flow_detail(be) e = self.make_engine(flo, fd, be) with timing.TimingListener(e): e.run() t_uuid = e.storage.get_task_uuid("test-1") td = fd.find(t_uuid) self.assertIsNotNone(td) self.assertIsNotNone(td.meta) self.assertIn('duration', td.meta) self.assertGreaterEqual(0.1, td.meta['duration'])
def test_duration(self): with contextlib.closing(impl_memory.MemoryBackend({})) as be: flo = lf.Flow("test") flo.add(SleepyTask("test-1", sleep_for=0.1)) (lb, fd) = p_utils.temporary_flow_detail(be) e = self.make_engine(flo, fd, be) with timing.TimingListener(e): e.run() t_uuid = e.storage.get_uuid_by_name("test-1") td = fd.find(t_uuid) self.assertIsNotNone(td) self.assertIsNotNone(td.meta) self.assertIn('duration', td.meta) self.assertGreaterEqual(0.1, td.meta['duration'])
def test_record_ending_exception(self, mocked_warn): with contextlib.closing(impl_memory.MemoryBackend()) as be: flow = lf.Flow("test") flow.add(test_utils.TaskNoRequiresNoReturns("test-1")) (lb, fd) = persistence_utils.temporary_flow_detail(be) e = self._make_engine(flow, fd, be) timing_listener = timing.TimingListener(e) with mock.patch.object(timing_listener._engine.storage, 'update_atom_metadata') as mocked_uam: mocked_uam.side_effect = exc.StorageFailure('Woot!') with timing_listener: e.run() mocked_warn.assert_called_once_with(mock.ANY, mock.ANY, 'test-1', exc_info=True)
def test_creation_default(self): flow = lf.Flow('test-flow').add(utils.DummyTask()) _, flow_detail = pu.temporary_flow_detail() engine.WorkerBasedActionEngine(flow, flow_detail, None, {}).compile() expected_calls = [ mock.call.executor_class(uuid=flow_detail.uuid, url=None, exchange='default', topics=[], transport=None, transport_options=None) ] self.assertEqual(self.master_mock.mock_calls, expected_calls)
def test_duration(self): with contextlib.closing(impl_memory.MemoryBackend()) as be: flow = lf.Flow("test") flow.add(SleepyTask("test-1", sleep_for=0.1)) (lb, fd) = persistence_utils.temporary_flow_detail(be) e = self._make_engine(flow, fd, be) with timing.DurationListener(e): e.run() t_uuid = e.storage.get_atom_uuid("test-1") td = fd.find(t_uuid) self.assertIsNotNone(td) self.assertIsNotNone(td.meta) self.assertIn("duration", td.meta) self.assertGreaterEqual(0.1, td.meta["duration"])
def test_creation_custom(self): flow = lf.Flow('test-flow').add(utils.DummyTask()) _, flow_detail = pu.temporary_flow_detail() config = {'url': self.broker_url, 'exchange': self.exchange, 'topics': self.topics, 'transport': 'memory', 'transport_options': {}, 'transition_timeout': 200} engine.WorkerBasedActionEngine( flow, flow_detail, None, config).compile() expected_calls = [ mock.call.executor_class(uuid=flow_detail.uuid, url=self.broker_url, exchange=self.exchange, topics=self.topics, transport='memory', transport_options={}, transition_timeout=200) ] self.assertEqual(self.master_mock.mock_calls, expected_calls)
def test_sequential_flow_two_tasks_with_resumption(self): flow = lf.Flow('lf-2-r').add( utils.SaveOrderTask(name='task1', provides='x1'), utils.SaveOrderTask(name='task2', provides='x2')) # Create FlowDetail as if we already run task1 _lb, fd = p_utils.temporary_flow_detail(self.backend) td = logbook.TaskDetail(name='task1', uuid='42') td.state = states.SUCCESS td.results = 17 fd.add(td) with contextlib.closing(self.backend.get_connection()) as conn: fd.update(conn.update_flow_details(fd)) td.update(conn.update_task_details(td)) engine = self._make_engine(flow, fd) engine.run() self.assertEqual(self.values, ['task2']) self.assertEqual(engine.storage.fetch_all(), {'x1': 17, 'x2': 5})
def test_sequential_flow_two_tasks_with_resumption(self): flow = lf.Flow('lf-2-r').add( utils.ProgressingTask(name='task1', provides='x1'), utils.ProgressingTask(name='task2', provides='x2')) # Create FlowDetail as if we already run task1 lb, fd = p_utils.temporary_flow_detail(self.backend) td = logbook.TaskDetail(name='task1', uuid='42') td.state = states.SUCCESS td.results = 17 fd.add(td) with contextlib.closing(self.backend.get_connection()) as conn: fd.update(conn.update_flow_details(fd)) td.update(conn.update_atom_details(td)) engine = self._make_engine(flow, fd) with utils.CaptureListener(engine, capture_flow=False) as capturer: engine.run() expected = ['task2.t RUNNING', 'task2.t SUCCESS(5)'] self.assertEqual(expected, capturer.values) self.assertEqual(engine.storage.fetch_all(), {'x1': 17, 'x2': 5})
def test_dual_storage_progress(self): fired_events = [] def notify_me(task, event_data, progress): fired_events.append(progress) with contextlib.closing(impl_memory.MemoryBackend({})) as be: t = ProgressTask("test", 5) t.bind('update_progress', notify_me) flo = lf.Flow("test") flo.add(t) b, fd = p_utils.temporary_flow_detail(be) e = self._make_engine(flo, flow_detail=fd, backend=be) e.run() end_progress = e.storage.get_task_progress("test") self.assertEqual(1.0, end_progress) task_uuid = e.storage.get_task_uuid("test") td = fd.find(task_uuid) self.assertEqual(1.0, td.meta['progress']) self.assertFalse(td.meta['progress_details']) self.assertEqual(6, len(fired_events))
def test_dual_storage_progress(self): fired_events = [] def notify_me(event_type, details): fired_events.append(details.pop('progress')) with contextlib.closing(impl_memory.MemoryBackend({})) as be: t = ProgressTask("test", 5) t.notifier.register(task.EVENT_UPDATE_PROGRESS, notify_me) flo = lf.Flow("test") flo.add(t) b, fd = p_utils.temporary_flow_detail(be) e = self._make_engine(flo, flow_detail=fd, backend=be) e.run() end_progress = e.storage.get_task_progress("test") self.assertEqual(1.0, end_progress) task_uuid = e.storage.get_atom_uuid("test") td = fd.find(task_uuid) self.assertEqual(1.0, td.meta['progress']) self.assertFalse(td.meta['progress_details']) self.assertEqual(6, len(fired_events))
def test_non_saving_storage(self): _lb, flow_detail = p_utils.temporary_flow_detail(self.backend) s = storage.Storage(flow_detail=flow_detail) # no backend s.add_task('42', 'my task') self.assertEquals(s.get_uuid_by_name('my task'), '42')
def _get_storage(self): _lb, flow_detail = p_utils.temporary_flow_detail(self.backend) return storage.Storage(backend=self.backend, flow_detail=flow_detail)