def test_flatten_checks_for_dups_globally(self): flo = gf.Flow("test").add( gf.Flow("int1").add(t_utils.DummyTask(name="a")), gf.Flow("int2").add(t_utils.DummyTask(name="a"))) self.assertRaisesRegexp(exc.InvariantViolation, '^Tasks with duplicate names', f_utils.flatten, flo)
def test_checks_for_dups_globally(self): flo = gf.Flow("test").add( gf.Flow("int1").add(test_utils.DummyTask(name="a")), gf.Flow("int2").add(test_utils.DummyTask(name="a"))) e = engines.load(flo) self.assertRaisesRegex(exc.Duplicate, '^Atoms with duplicate names', e.compile)
def test_request_invalid_action(self): request = pr.Request(utils.DummyTask("hi"), uuidutils.generate_uuid(), pr.EXECUTE, {}, 1.0) request = request.to_dict() request['action'] = 'NOTHING' self.assertRaises(excp.InvalidFormat, pr.Request.validate, request)
def setUp(self): super(TestRemoteTask, self).setUp() self.task = utils.DummyTask() self.task_uuid = 'task-uuid' self.task_action = 'execute' self.task_args = {'context': 'context'} self.timeout = 60
def setUp(self): super(TestProtocol, self).setUp() self.task = utils.DummyTask() self.task_uuid = 'task-uuid' self.task_action = 'execute' self.task_args = {'a': 'a'} self.timeout = 60
def _create_engine(**kwargs): flow = lf.Flow('test-flow').add(utils.DummyTask()) backend = backends.fetch({'connection': 'memory'}) flow_detail = pu.create_flow_detail(flow, backend=backend) options = kwargs.copy() return engine.WorkerBasedActionEngine(flow, flow_detail, backend, options)
def setUp(self): super(TestWorkerTaskExecutor, self).setUp() self.task = test_utils.DummyTask() self.task_uuid = 'task-uuid' self.task_args = {'a': 'a'} self.task_result = 'task-result' self.task_failures = {} self.timeout = 60 self.broker_url = 'broker-url' self.executor_uuid = 'executor-uuid' self.executor_exchange = 'executor-exchange' self.executor_topic = 'test-topic1' self.proxy_started_event = threading_utils.Event() # patch classes self.proxy_mock, self.proxy_inst_mock = self.patchClass( executor.proxy, 'Proxy') self.request_mock, self.request_inst_mock = self.patchClass( executor.pr, 'Request', autospec=False) # other mocking self.proxy_inst_mock.start.side_effect = self._fake_proxy_start self.proxy_inst_mock.stop.side_effect = self._fake_proxy_stop self.request_inst_mock.uuid = self.task_uuid self.request_inst_mock.expired = False self.request_inst_mock.task_cls = self.task.name self.wait_for_any_mock = self.patch( 'taskflow.engines.worker_based.executor.async_utils.wait_for_any') self.message_mock = mock.MagicMock(name='message') self.message_mock.properties = { 'correlation_id': self.task_uuid, 'type': pr.RESPONSE }
def setUp(self): super(TestWorkerTaskExecutor, self).setUp() self.task = utils.DummyTask() self.task_uuid = 'task-uuid' self.task_args = {'context': 'context'} self.task_result = 'task-result' self.task_failures = {} self.timeout = 60 self.broker_url = 'test-url' self.executor_uuid = 'executor-uuid' self.executor_exchange = 'executor-exchange' self.executor_topic = 'executor-topic' self.executor_workers_info = {self.executor_topic: [self.task.name]} self.proxy_started_event = threading.Event() # patch classes self.proxy_mock, self.proxy_inst_mock = self._patch_class( executor.proxy, 'Proxy') # other mocking self.proxy_inst_mock.start.side_effect = self._fake_proxy_start self.proxy_inst_mock.stop.side_effect = self._fake_proxy_stop self.wait_for_any_mock = self._patch( 'taskflow.engines.worker_based.executor.async_utils.wait_for_any') self.message_mock = mock.MagicMock(name='message') self.message_mock.properties = {'correlation_id': self.task_uuid} self.remote_task_mock = mock.MagicMock(uuid=self.task_uuid)
def setUp(self): super(TestRequestCache, self).setUp() self.addCleanup(timing.StopWatch.clear_overrides) self.task = utils.DummyTask() self.task_uuid = 'task-uuid' self.task_action = 'execute' self.task_args = {'a': 'a'} self.timeout = 60
def test_send_and_dispatch(self): details_capture = [] t = test_utils.DummyTask("rcver") t.notifier.register( task.EVENT_UPDATE_PROGRESS, lambda _event_type, details: details_capture.append(details)) d = pu.Dispatcher({}, b'secret', b'server-josh') d.setup() d.targets[b'child-josh'] = t s = threading.Thread(target=asyncore.loop, kwargs={'map': d.map}) s.start() self.addCleanup(s.join) c = pu.Channel(d.port, b'child-josh', b'secret') self.addCleanup(c.close) send_what = [ { 'progress': 0.1 }, { 'progress': 0.2 }, { 'progress': 0.3 }, { 'progress': 0.4 }, { 'progress': 0.5 }, { 'progress': 0.6 }, { 'progress': 0.7 }, { 'progress': 0.8 }, { 'progress': 0.9 }, ] e_s = pu.EventSender(c) for details in send_what: e_s(task.EVENT_UPDATE_PROGRESS, details) # This forces the thread to shutdown (since the asyncore loop # will exit when no more sockets exist to process...) d.close() self.assertEqual(len(send_what), len(details_capture)) self.assertEqual(send_what, details_capture)
def setUp(self): super(TestProtocol, self).setUp() timeutils.set_time_override() self.addCleanup(timeutils.clear_time_override) self.task = utils.DummyTask() self.task_uuid = 'task-uuid' self.task_action = 'execute' self.task_args = {'a': 'a'} self.timeout = 60
def setUpClass(cls): # Create a task for taskdetails to be made from task_id = uuidutils.generate_uuid() task_name = 'task-%s' % (task_id) tsk = utils.DummyTask(task_name, task_id) tsk.requires.update('r') tsk.optional.update('o') tsk.provides.update('p') cls.tsks.append(tsk)
def test_creation_default(self): flow = lf.Flow('test-flow').add(utils.DummyTask()) _, flow_detail = pu.temporary_flow_detail() engine.WorkerBasedActionEngine(flow, flow_detail, None, {}).compile() expected_calls = [ mock.call.executor_class(uuid=flow_detail.uuid, url=None, exchange='default', workers_info={}, transport=None, transport_options=None) ] self.assertEqual(self.master_mock.mock_calls, expected_calls)
def setUpClass(cls): # Create a workflow for flowdetails to use wf_id = uuidutils.generate_uuid() wf_name = 'wf-%s' % (wf_id) wf = flow.Flow(wf_name, None, wf_id) cls.wfs.append(wf) # Create a task for taskdetails to use task_id = uuidutils.generate_uuid() task_name = 'task-%s' % (task_id) tsk = utils.DummyTask(task_name, task_id) cls.tsks.append(tsk)
def test_creation_custom(self): flow = lf.Flow('test-flow').add(utils.DummyTask()) _, flow_detail = pu.temporary_flow_detail() config = {'url': self.broker_url, 'exchange': self.exchange, 'topics': self.topics, 'transport': 'memory', 'transport_options': {}, 'transition_timeout': 200} engine.WorkerBasedActionEngine( flow, flow_detail, None, config).compile() expected_calls = [ mock.call.executor_class(uuid=flow_detail.uuid, url=self.broker_url, exchange=self.exchange, topics=self.topics, transport='memory', transport_options={}, transition_timeout=200) ] self.assertEqual(self.master_mock.mock_calls, expected_calls)
def test_task(self): task = test_utils.DummyTask(name='a') g = _replicate_graph_with_names( compiler.PatternCompiler(task).compile()) self.assertEqual(['a'], list(g.nodes())) self.assertEqual([], list(g.edges()))
def my_flow_factory(task_name): return test_utils.DummyTask(name=task_name)
def _make_many(amount): assert amount <= len(string.ascii_lowercase), 'Not enough letters' tasks = [] for i in range(0, amount): tasks.append(t_utils.DummyTask(name=string.ascii_lowercase[i])) return tasks
def test_flatten_checks_for_dups(self): flo = gf.Flow("test").add(t_utils.DummyTask(name="a"), t_utils.DummyTask(name="a")) with self.assertRaisesRegexp(exc.InvariantViolationException, '^Tasks with duplicate names'): f_utils.flatten(flo)
def test_request(self): msg = pr.Request(utils.DummyTask("hi"), uuidutils.generate_uuid(), pr.EXECUTE, {}, None, 1.0) pr.Request.validate(msg.to_dict())
def test_checks_for_dups_globally(self): flo = gf.Flow("test").add( gf.Flow("int1").add(test_utils.DummyTask(name="a")), gf.Flow("int2").add(test_utils.DummyTask(name="a"))) self.assertRaisesRegexp(exc.Duplicate, '^Atoms with duplicate names', compiler.PatternCompiler(flo).compile)
def test_multi_message(self): message_count = 30 barrier = latch.Latch(message_count) countdown = lambda data, message: barrier.countdown() on_notify = mock.MagicMock() on_notify.side_effect = countdown on_response = mock.MagicMock() on_response.side_effect = countdown on_request = mock.MagicMock() on_request.side_effect = countdown handlers = { pr.NOTIFY: on_notify, pr.RESPONSE: on_response, pr.REQUEST: on_request, } p = proxy.Proxy(TEST_TOPIC, TEST_EXCHANGE, handlers, transport='memory', transport_options={ 'polling_interval': POLLING_INTERVAL, }) t = threading_utils.daemon_thread(p.start) t.start() p.wait() for i in range(0, message_count): j = i % 3 if j == 0: p.publish(pr.Notify(), TEST_TOPIC) elif j == 1: p.publish(pr.Response(pr.RUNNING), TEST_TOPIC) else: p.publish( pr.Request(test_utils.DummyTask("dummy_%s" % i), uuidutils.generate_uuid(), pr.EXECUTE, [], None), TEST_TOPIC) self.assertTrue(barrier.wait(test_utils.WAIT_TIMEOUT)) self.assertEqual(0, barrier.needed) p.stop() t.join() self.assertTrue(on_notify.called) self.assertTrue(on_response.called) self.assertTrue(on_request.called) self.assertEqual(10, on_notify.call_count) self.assertEqual(10, on_response.call_count) self.assertEqual(10, on_request.call_count) call_count = sum([ on_notify.call_count, on_response.call_count, on_request.call_count, ]) self.assertEqual(message_count, call_count)
def test_task(self): task = test_utils.DummyTask(name='a') compilation = compiler.PatternCompiler(task).compile() g = compilation.execution_graph self.assertEqual(list(g.nodes()), [task]) self.assertEqual(list(g.edges()), [])