コード例 #1
0
 def setUp(self) -> None:
     super(TestDataStoreMgr, self).setUp()
     self.ws_data_mgr = DataStoreMgr(self.scheduler)
     for name in self.scheduler.config.taskdefs:
         task_proxy = create_task_proxy(task_name=name,
                                        suite_config=self.suite_config,
                                        is_startup=True)
         warnings = self.task_pool.insert_tasks(items=[task_proxy.identity],
                                                stopcp=None,
                                                no_check=False)
         assert 0 == warnings
     self.task_pool.release_runahead_tasks()
     self.data = self.ws_data_mgr.data[self.ws_data_mgr.workflow_id]
コード例 #2
0
 def setUp(self) -> None:
     super(TestWorkflowSubscriber, self).setUp()
     self.scheduler.ws_data_mgr = DataStoreMgr(self.scheduler)
     for name in self.scheduler.config.taskdefs:
         task_proxy = create_task_proxy(
             task_name=name,
             suite_config=self.suite_config,
             is_startup=True
         )
         warnings = self.task_pool.insert_tasks(
             items=[task_proxy.identity],
             stopcp=None,
             no_check=False
         )
         assert warnings == 0
     self.task_pool.release_runahead_tasks()
     self.scheduler.ws_data_mgr.initiate_data_model()
     self.workflow_id = self.scheduler.ws_data_mgr.workflow_id
     self.publisher = WorkflowPublisher(
         self.suite_name, threaded=False, daemon=True)
     self.publisher.start(*PORT_RANGE)
     self.subscriber = WorkflowSubscriber(
         self.suite_name,
         host=self.scheduler.host,
         port=self.publisher.port,
         topics=[b'workflow'])
     # delay to allow subscriber to connection,
     # otherwise it misses the first message
     sleep(1.0)
     self.topic = None
     self.data = None
コード例 #3
0
 def setUp(self) -> None:
     super(TestSuiteRuntimeServer, self).setUp()
     self.scheduler.ws_data_mgr = DataStoreMgr(self.scheduler)
     for name in self.scheduler.config.taskdefs:
         task_proxy = create_task_proxy(task_name=name,
                                        suite_config=self.suite_config,
                                        is_startup=True)
         warnings = self.task_pool.insert_tasks(items=[task_proxy.identity],
                                                stopcp=None,
                                                no_check=False)
         assert 0 == warnings
     self.task_pool.release_runahead_tasks()
     self.scheduler.ws_data_mgr.initiate_data_model()
     self.workflow_id = self.scheduler.ws_data_mgr.workflow_id
     create_auth_files(self.suite_name)  # auth keys are required for comms
     barrier = Barrier(2, timeout=10)
     self.server = SuiteRuntimeServer(self.scheduler,
                                      context=SERVER_CONTEXT,
                                      threaded=True,
                                      barrier=barrier,
                                      daemon=True)
     self.server.public_priv = Priv.CONTROL
     self.server.start(*PORT_RANGE)
     # barrier.wait() doesn't seem to work properly here
     # so this workaround will do
     while barrier.n_waiting < 1:
         sleep(0.2)
     barrier.wait()
     sleep(0.5)
コード例 #4
0
def xtrigger_mgr() -> XtriggerManager:
    """A fixture to build an XtriggerManager which uses a mocked proc_pool,
    and uses a mocked broadcast_mgr."""
    return XtriggerManager(
        suite="sample_suite",
        user="******",
        proc_pool=Mock(put_command=lambda *a, **k: True),
        broadcast_mgr=Mock(put_broadcast=lambda *a, **k: True),
        data_store_mgr=DataStoreMgr(create_autospec(Scheduler)))
コード例 #5
0
 def setUp(self) -> None:
     super(TestWorkflowPublisher, self).setUp()
     self.scheduler.ws_data_mgr = DataStoreMgr(self.scheduler)
     for name in self.scheduler.config.taskdefs:
         task_proxy = create_task_proxy(task_name=name,
                                        suite_config=self.suite_config,
                                        is_startup=True)
         warnings = self.task_pool.insert_tasks(items=[task_proxy.identity],
                                                stopcp=None,
                                                no_check=False)
         assert 0 == warnings
     self.task_pool.release_runahead_tasks()
     self.scheduler.ws_data_mgr.initiate_data_model()
     self.workflow_id = self.scheduler.ws_data_mgr.workflow_id
     self.publisher = WorkflowPublisher(self.suite_name,
                                        threaded=False,
                                        daemon=True)
     self.pub_data = self.scheduler.ws_data_mgr.get_publish_deltas()
コード例 #6
0
ファイル: test_resolvers.py プロジェクト: sllopis/cylc-flow
 def setUp(self) -> None:
     super(TestResolvers, self).setUp()
     self.scheduler.data_store_mgr = DataStoreMgr(self.scheduler)
     for name in self.scheduler.config.taskdefs:
         task_proxy = create_task_proxy(task_name=name,
                                        suite_config=self.suite_config,
                                        is_startup=True)
         warnings = self.task_pool.insert_tasks(items=[task_proxy.identity],
                                                stopcp=None,
                                                check_point=True)
         assert 0 == warnings
     self.task_pool.release_runahead_tasks()
     self.scheduler.data_store_mgr.initiate_data_model()
     self.workflow_id = self.scheduler.data_store_mgr.workflow_id
     self.data = self.scheduler.data_store_mgr.data[self.workflow_id]
     self.node_ids = [node.id for node in self.data[TASK_PROXIES].values()]
     self.edge_ids = [edge.id for edge in self.data[EDGES].values()]
     self.resolvers = Resolvers(self.scheduler.data_store_mgr.data,
                                schd=self.scheduler)
コード例 #7
0
 def setUp(self) -> None:
     super(TestSuiteRuntimeClient, self).setUp()
     self.scheduler.data_store_mgr = DataStoreMgr(self.scheduler)
     for name in self.scheduler.config.taskdefs:
         task_proxy = create_task_proxy(
             task_name=name,
             suite_config=self.suite_config,
             is_startup=True
         )
         warnings = self.task_pool.insert_tasks(
             items=[task_proxy.identity],
             stopcp=None,
             check_point=True
         )
         assert warnings == 0
     self.task_pool.release_runahead_tasks()
     self.scheduler.data_store_mgr.initiate_data_model()
     self.workflow_id = self.scheduler.data_store_mgr.workflow_id
     create_auth_files(self.suite_name)  # auth keys are required for comms
     barrier = Barrier(2, timeout=20)
     self.server = SuiteRuntimeServer(
         self.scheduler,
         context=SERVER_CONTEXT,
         threaded=True,
         barrier=barrier,
         daemon=True)
     port_range = glbl_cfg().get(['suite servers', 'run ports'])
     self.server.start(port_range[0], port_range[-1])
     # barrier.wait() doesn't seem to work properly here
     # so this workaround will do
     while barrier.n_waiting < 1:
         sleep(0.2)
     barrier.wait()
     sleep(0.5)
     self.client = SuiteRuntimeClient(
         self.scheduler.suite,
         host=self.scheduler.host,
         port=self.server.port)
     sleep(0.5)
コード例 #8
0
class TestDataStoreMgr(CylcWorkflowTestCase):

    suite_name = "five"
    suiterc = """
[meta]
    title = "Inter-cycle dependence + a cold-start task"
[cylc]
    UTC mode = True
[scheduling]
    #runahead limit = 120
    initial cycle point = 20130808T00
    final cycle point = 20130812T00
    [[graph]]
        R1 = "prep => foo"
        PT12H = "foo[-PT12H] => foo => bar"
[visualization]
    initial cycle point = 20130808T00
    final cycle point = 20130808T12
    [[node attributes]]
        foo = "color=red"
        bar = "color=blue"

    """

    def setUp(self) -> None:
        super(TestDataStoreMgr, self).setUp()
        self.ws_data_mgr = DataStoreMgr(self.scheduler)
        for name in self.scheduler.config.taskdefs:
            task_proxy = create_task_proxy(task_name=name,
                                           suite_config=self.suite_config,
                                           is_startup=True)
            warnings = self.task_pool.insert_tasks(items=[task_proxy.identity],
                                                   stopcp=None,
                                                   no_check=False)
            assert 0 == warnings
        self.task_pool.release_runahead_tasks()
        self.data = self.ws_data_mgr.data[self.ws_data_mgr.workflow_id]

    def test_constructor(self):
        self.assertEqual(f'{self.owner}{ID_DELIM}{self.suite_name}',
                         self.ws_data_mgr.workflow_id)
        self.assertFalse(self.ws_data_mgr.pool_points)

    def test_generate_definition_elements(self):
        """Test method that generates all definition elements."""
        task_defs = self.scheduler.config.taskdefs.keys()
        self.assertEqual(0, len(self.data[TASKS]))
        self.ws_data_mgr.generate_definition_elements()
        self.ws_data_mgr.apply_deltas()
        self.assertEqual(len(task_defs), len(self.data[TASKS]))

    def test_generate_graph_elements(self):
        """Test method that generates edge and ghost node elements
        by cycle point."""
        self.ws_data_mgr.generate_definition_elements()
        self.ws_data_mgr.apply_deltas()
        self.ws_data_mgr.pool_points = set(list(self.scheduler.pool.pool))
        tasks_proxies_generated = self.data[TASK_PROXIES]
        self.assertEqual(0, len(tasks_proxies_generated))
        self.ws_data_mgr.clear_deltas()
        self.ws_data_mgr.generate_graph_elements()
        self.ws_data_mgr.apply_deltas()
        self.assertEqual(3, len(tasks_proxies_generated))

    def test_get_data_elements(self):
        """Test method that returns data elements by specified type."""
        flow_msg = self.ws_data_mgr.get_data_elements(TASK_PROXIES)
        self.assertEqual(0, len(flow_msg.deltas))
        self.ws_data_mgr.initiate_data_model()
        flow_msg = self.ws_data_mgr.get_data_elements(TASK_PROXIES)
        self.assertEqual(len(flow_msg.deltas), len(self.data[TASK_PROXIES]))
        flow_msg = self.ws_data_mgr.get_data_elements(WORKFLOW)
        self.assertEqual(flow_msg.last_updated,
                         self.data[WORKFLOW].last_updated)
        none_msg = self.ws_data_mgr.get_data_elements('fraggle')
        self.assertEqual(0, len(none_msg.ListFields()))

    def test_get_entire_workflow(self):
        """Test method that populates the entire workflow protobuf message."""
        flow_msg = self.ws_data_mgr.get_entire_workflow()
        self.assertEqual(0, len(flow_msg.task_proxies))
        self.ws_data_mgr.initiate_data_model()
        flow_msg = self.ws_data_mgr.get_entire_workflow()
        self.assertEqual(len(flow_msg.task_proxies),
                         len(self.data[TASK_PROXIES]))

    def test_increment_graph_elements(self):
        """Test method that adds and removes elements by cycle point."""
        self.assertFalse(self.ws_data_mgr.pool_points)
        self.assertEqual(0, len(self.data[TASK_PROXIES]))
        self.ws_data_mgr.generate_definition_elements()
        self.ws_data_mgr.increment_graph_elements()
        self.ws_data_mgr.apply_deltas()
        self.assertTrue(self.ws_data_mgr.pool_points)
        self.assertEqual(3, len(self.data[TASK_PROXIES]))

    def test_initiate_data_model(self):
        """Test method that generates all data elements in order."""
        self.assertEqual(0, len(self.data[WORKFLOW].task_proxies))
        self.ws_data_mgr.initiate_data_model()
        self.assertEqual(3, len(self.data[WORKFLOW].task_proxies))
        self.ws_data_mgr.initiate_data_model(reloaded=True)
        self.assertEqual(3, len(self.data[WORKFLOW].task_proxies))

    def test_prune_points(self):
        """Test method that removes data elements by cycle point."""
        self.ws_data_mgr.initiate_data_model()
        points = self.ws_data_mgr.cycle_states.keys()
        point = next(iter(points))
        self.assertTrue(point in points)
        self.ws_data_mgr.clear_deltas()
        self.ws_data_mgr.prune_points([point])
        self.ws_data_mgr.apply_deltas()
        self.assertTrue(point not in points)

    def test_update_data_structure(self):
        """Test update_data_structure. This method will generate and
        apply deltas/updates given."""
        self.ws_data_mgr.initiate_data_model()
        self.assertEqual(0, len(self._collect_states(TASK_PROXIES)))
        update_tasks = self.task_pool.get_all_tasks()
        self.ws_data_mgr.update_data_structure(update_tasks)
        self.assertTrue(len(update_tasks) > 0)
        self.assertEqual(len(update_tasks),
                         len(self._collect_states(TASK_PROXIES)))

    def test_update_family_proxies(self):
        """Test update_family_proxies. This method will update all
        DataStoreMgr task_proxies of given cycle point strings."""
        self.ws_data_mgr.initiate_data_model()
        self.assertEqual(0, len(self._collect_states(FAMILY_PROXIES)))
        update_tasks = self.task_pool.get_all_tasks()
        update_points = set((str(t.point) for t in update_tasks))
        self.ws_data_mgr.clear_deltas()
        self.ws_data_mgr.update_task_proxies(update_tasks)
        self.ws_data_mgr.update_family_proxies(update_points)
        self.ws_data_mgr.apply_deltas()
        # Find families in updated cycle points
        point_fams = [
            f.id for f in self.data[FAMILY_PROXIES].values()
            if f.cycle_point in update_points
        ]
        self.assertTrue(len(point_fams) > 0)
        self.assertEqual(len(point_fams),
                         len(self._collect_states(FAMILY_PROXIES)))

    def test_update_task_proxies(self):
        """Test update_task_proxies. This method will iterate over given
        task instances (TaskProxy), and update any corresponding
        DataStoreMgr task_proxies."""
        self.ws_data_mgr.initiate_data_model()
        self.assertEqual(0, len(self._collect_states(TASK_PROXIES)))
        update_tasks = self.task_pool.get_all_tasks()
        self.ws_data_mgr.clear_deltas()
        self.ws_data_mgr.update_task_proxies(update_tasks)
        self.ws_data_mgr.apply_deltas()
        self.assertTrue(len(update_tasks) > 0)
        self.assertEqual(len(update_tasks),
                         len(self._collect_states(TASK_PROXIES)))

    def test_update_workflow(self):
        """Test method that updates the dynamic fields of the workflow msg."""
        self.ws_data_mgr.generate_definition_elements()
        self.ws_data_mgr.apply_deltas()
        old_time = self.data[WORKFLOW].last_updated
        self.ws_data_mgr.clear_deltas()
        self.ws_data_mgr.update_workflow()
        self.ws_data_mgr.apply_deltas()
        new_time = self.data[WORKFLOW].last_updated
        self.assertTrue(new_time > old_time)

    def _collect_states(self, node_type):
        return [
            t.state for t in self.data[node_type].values() if t.state != ''
        ]