def setUp(self) -> None: super(TestWorkflowSubscriber, self).setUp() self.scheduler.ws_data_mgr = DataStoreMgr(self.scheduler) for name in self.scheduler.config.taskdefs: task_proxy = create_task_proxy( task_name=name, suite_config=self.suite_config, is_startup=True ) warnings = self.task_pool.insert_tasks( items=[task_proxy.identity], stopcp=None, no_check=False ) assert warnings == 0 self.task_pool.release_runahead_tasks() self.scheduler.ws_data_mgr.initiate_data_model() self.workflow_id = self.scheduler.ws_data_mgr.workflow_id self.publisher = WorkflowPublisher( self.suite_name, threaded=False, daemon=True) self.publisher.start(*PORT_RANGE) self.subscriber = WorkflowSubscriber( self.suite_name, host=self.scheduler.host, port=self.publisher.port, topics=[b'workflow']) # delay to allow subscriber to connection, # otherwise it misses the first message sleep(1.0) self.topic = None self.data = None
def main(_, options, *args): suite = args[0] if len(args) > 1: try: user_at_host, options.port = args[1].split(':') options.owner, options.host = user_at_host.split('@') except ValueError: print(('USER_AT_HOST must take the form ' '"user@host:port"'), file=sys.stderr) sys.exit(1) elif options.host is None or options.port is None: try: while True: try: options.host, _, options.port = get_location( suite, options.owner, options.host) except (ClientError, IOError, TypeError, ValueError): time.sleep(3) continue break except KeyboardInterrupt: exit() print(f'Connecting to tcp://{options.host}:{options.port}') topic_set = set() topic_set.add(b'shutdown') for topic in options.topics.split(','): topic_set.add(topic.encode('utf-8')) subscriber = WorkflowSubscriber(suite, host=options.host, port=options.port, topics=topic_set) subscriber.loop.create_task( subscriber.subscribe(process_delta_msg, func=print_message, subscriber=subscriber, once=options.once)) # run Python run try: subscriber.loop.run_forever() except (KeyboardInterrupt, SystemExit): print('\nDisconnecting') subscriber.stop() exit()
def test_publish(self): """Test publishing data.""" self.publisher.start(*PORT_RANGE) subscriber = WorkflowSubscriber(self.suite_name, host=self.scheduler.host, port=self.publisher.port, topics=[b'workflow']) # delay to allow subscriber to connection, # otherwise it misses the first message sleep(1.0) self.publisher.publish(self.pub_data) btopic, msg = subscriber.loop.run_until_complete( subscriber.socket.recv_multipart()) delta = DELTAS_MAP[btopic.decode('utf-8')]() delta.ParseFromString(msg) self.assertEqual(delta.id, self.workflow_id) subscriber.stop() with self.assertLogs(LOG, level='ERROR') as cm: self.publisher.publish(None) self.assertIn('publish: ', cm.output[0])
def main(_, options, *args): suite = args[0] try: while True: try: host, _, port = get_location(suite) except (ClientError, IOError, TypeError, ValueError) as exc: print(exc) time.sleep(3) continue break except KeyboardInterrupt: sys.exit() print(f'Connecting to tcp://{host}:{port}') topic_set = set() topic_set.add(b'shutdown') for topic in options.topics.split(','): topic_set.add(topic.encode('utf-8')) subscriber = WorkflowSubscriber(suite, host=host, port=port, topics=topic_set) subscriber.loop.create_task( subscriber.subscribe(process_delta_msg, func=print_message, subscriber=subscriber, once=options.once)) # run Python run try: subscriber.loop.run_forever() except (KeyboardInterrupt, SystemExit): print('\nDisconnecting') subscriber.stop() sys.exit()
async def test_publisher(flow, scheduler, run, one_conf, port_range): """It should publish deltas when the flow starts.""" reg = flow(one_conf) schd = scheduler(reg, hold_start=False) async with run(schd): # create a subscriber subscriber = WorkflowSubscriber(schd.suite, host=schd.host, port=schd.publisher.port, topics=[b'workflow']) async with timeout(2): # wait for the first delta from the workflow btopic, msg = await subscriber.socket.recv_multipart() _, delta = process_delta_msg(btopic, msg, None) assert schd.id == delta.added.id
def start_subscription(self, w_id, reg, host, port): """Instantiate and run subscriber data-store sync. Args: w_id (str): Workflow external ID. reg (str): Registered workflow name. host (str): Hostname of target workflow. port (int): Port of target workflow. """ self.w_subs[w_id] = WorkflowSubscriber( reg, host=host, port=port, context=self.workflows_mgr.context, topics=self.topics) self.w_subs[w_id].loop.run_until_complete(self.w_subs[w_id].subscribe( process_delta_msg, func=self.update_workflow_data, w_id=w_id))
async def test_publisher(flow, scheduler, run, one_conf, port_range): """It should publish deltas when the flow starts.""" reg = flow(one_conf) schd = scheduler(reg, paused_start=False) async with run(schd): # create a subscriber subscriber = WorkflowSubscriber(schd.workflow, host=schd.host, port=schd.publisher.port, topics=[b'workflow']) async with timeout(2): # wait for the first delta from the workflow btopic, msg = await subscriber.socket.recv_multipart() _, delta = process_delta_msg(btopic, msg, None) for key in ('added', 'updated'): if getattr(getattr(delta, key), 'id', None): assert schd.id == getattr(delta, key).id break else: raise Exception("Delta wasn't added or updated")
class TestWorkflowSubscriber(CylcWorkflowTestCase): """Test the subscriber class components.""" suite_name = "five" suiterc = """ [meta] title = "Inter-cycle dependence + a cold-start task" [cylc] UTC mode = True [scheduling] #runahead limit = 120 initial cycle point = 20130808T00 final cycle point = 20130812T00 [[graph]] R1 = "prep => foo" PT12H = "foo[-PT12H] => foo => bar" [visualization] initial cycle point = 20130808T00 final cycle point = 20130808T12 [[node attributes]] foo = "color=red" bar = "color=blue" """ def setUp(self) -> None: super(TestWorkflowSubscriber, self).setUp() self.scheduler.ws_data_mgr = DataStoreMgr(self.scheduler) for name in self.scheduler.config.taskdefs: task_proxy = create_task_proxy( task_name=name, suite_config=self.suite_config, is_startup=True ) warnings = self.task_pool.insert_tasks( items=[task_proxy.identity], stopcp=None, no_check=False ) assert warnings == 0 self.task_pool.release_runahead_tasks() self.scheduler.ws_data_mgr.initiate_data_model() self.workflow_id = self.scheduler.ws_data_mgr.workflow_id self.publisher = WorkflowPublisher( self.suite_name, threaded=False, daemon=True) self.publisher.start(*PORT_RANGE) self.subscriber = WorkflowSubscriber( self.suite_name, host=self.scheduler.host, port=self.publisher.port, topics=[b'workflow']) # delay to allow subscriber to connection, # otherwise it misses the first message sleep(1.0) self.topic = None self.data = None def tearDown(self): self.subscriber.stop() self.publisher.stop() def test_constructor(self): """Test class constructor result.""" self.assertIsNotNone(self.subscriber.context) self.assertFalse(self.subscriber.socket.closed) def test_subscribe(self): """Test publishing data.""" pub_data = self.scheduler.ws_data_mgr.get_publish_deltas() self.publisher.publish(pub_data) def msg_process(btopic, msg): self.subscriber.stopping = True self.topic, self.data = process_delta_msg(btopic, msg, None) self.subscriber.loop.run_until_complete( self.subscriber.subscribe(msg_process)) self.assertEqual(self.data.id, self.workflow_id)