示例#1
0
async def mock_flow(
    mod_flow: Callable[..., str],
    mod_scheduler: Callable[..., Scheduler],
    mod_start,
) -> Scheduler:
    ret = Mock()
    ret.reg = mod_flow({
        'scheduler': {
            'allow implicit tasks': True
        },
        'scheduling': {
            'initial cycle point': '2000',
            'dependencies': {
                'R1': 'prep => foo',
                'PT12H': 'foo[-PT12H] => foo => bar'
            }
        }
    })

    ret.schd = mod_scheduler(ret.reg, paused_start=True)
    async with mod_start(ret.schd):
        ret.schd.pool.release_runahead_tasks()
        ret.schd.data_store_mgr.initiate_data_model()

        ret.owner = ret.schd.owner
        ret.name = ret.schd.workflow
        ret.id = list(ret.schd.data_store_mgr.data.keys())[0]
        ret.resolvers = Resolvers(ret.schd.data_store_mgr, schd=ret.schd)
        ret.data = ret.schd.data_store_mgr.data[ret.id]
        ret.node_ids = [node.id for node in ret.data[TASK_PROXIES].values()]
        ret.edge_ids = [edge.id for edge in ret.data[EDGES].values()]

        yield ret
示例#2
0
async def flow(mod_flow, mod_scheduler, mod_run):
    ret = Mock()
    ret.reg = mod_flow({
        'scheduler': {
            'allow implicit tasks': True
        },
        'scheduling': {
            'initial cycle point': '2000',
            'dependencies': {
                'R1': 'prep => foo',
                'PT12H': 'foo[-PT12H] => foo => bar'
            }
        }
    })

    ret.schd = mod_scheduler(ret.reg, paused_start=True)
    await ret.schd.install()
    await ret.schd.initialise()
    await ret.schd.configure()
    ret.schd.release_runahead_tasks()
    ret.schd.data_store_mgr.initiate_data_model()

    ret.owner = ret.schd.owner
    ret.name = ret.schd.suite
    ret.id = list(ret.schd.data_store_mgr.data.keys())[0]
    ret.resolvers = Resolvers(ret.schd.data_store_mgr, schd=ret.schd)
    ret.data = ret.schd.data_store_mgr.data[ret.id]
    ret.node_ids = [node.id for node in ret.data[TASK_PROXIES].values()]
    ret.edge_ids = [edge.id for edge in ret.data[EDGES].values()]

    yield ret
示例#3
0
 def setUp(self) -> None:
     super(TestResolvers, self).setUp()
     self.scheduler.ws_data_mgr = WsDataMgr(self.scheduler)
     for name in self.scheduler.config.taskdefs:
         task_proxy = create_task_proxy(task_name=name,
                                        suite_config=self.suite_config,
                                        is_startup=True)
         warnings = self.task_pool.insert_tasks(items=[task_proxy.identity],
                                                stopcp=None,
                                                no_check=False)
         assert 0 == warnings
     self.task_pool.release_runahead_tasks()
     self.scheduler.ws_data_mgr.initiate_data_model()
     self.workflow_id = self.scheduler.ws_data_mgr.workflow_id
     self.data = self.scheduler.ws_data_mgr.data[self.workflow_id]
     self.node_ids = [node.id for node in self.data[TASK_PROXIES].values()]
     self.edge_ids = [edge.id for edge in self.data[EDGES].values()]
     self.resolvers = Resolvers(self.scheduler.ws_data_mgr.data,
                                schd=self.scheduler)
示例#4
0
 def __init__(self, schd):
     ZMQServer.__init__(
         self,
         encrypt,
         decrypt,
         partial(get_secret, schd.suite)
     )
     self.schd = schd
     self.public_priv = None  # update in get_public_priv()
     self.resolvers = Resolvers(self.schd)
示例#5
0
 def __init__(self,
              schd,
              context=None,
              barrier=None,
              threaded=True,
              daemon=False):
     super().__init__(zmq.REP,
                      bind=True,
                      context=context,
                      barrier=barrier,
                      threaded=threaded,
                      daemon=daemon)
     self.schd = schd
     self.suite = schd.suite
     self.public_priv = None  # update in get_public_priv()
     self.endpoints = None
     self.queue = None
     self.resolvers = Resolvers(self.schd.ws_data_mgr.data, schd=self.schd)
示例#6
0
文件: server.py 项目: lparkes/cylc
 def __init__(self,
              schd,
              context=None,
              barrier=None,
              threaded=True,
              daemon=False):
     super().__init__(zmq.REP,
                      bind=True,
                      context=context,
                      barrier=barrier,
                      threaded=threaded,
                      daemon=daemon)
     self.schd = schd
     self.workflow = schd.workflow
     self.public_priv = None  # update in get_public_priv()
     self.endpoints = None
     self.queue = None
     self.resolvers = Resolvers(self.schd.data_store_mgr, schd=self.schd)
     self.middleware = [
         IgnoreFieldMiddleware,
     ]
示例#7
0
class TestResolvers(CylcWorkflowTestCase):

    suite_name = "five"
    suiterc = """
[meta]
    title = "Inter-cycle dependence + a cold-start task"
[cylc]
    UTC mode = True
[scheduling]
    #runahead limit = 120
    initial cycle point = 20130808T00
    final cycle point = 20130812T00
    [[graph]]
        R1 = "prep => foo"
        PT12H = "foo[-PT12H] => foo => bar"
[visualization]
    initial cycle point = 20130808T00
    final cycle point = 20130808T12
    [[node attributes]]
        foo = "color=red"
        bar = "color=blue"

    """

    def setUp(self) -> None:
        super(TestResolvers, self).setUp()
        self.scheduler.ws_data_mgr = WsDataMgr(self.scheduler)
        for name in self.scheduler.config.taskdefs:
            task_proxy = create_task_proxy(task_name=name,
                                           suite_config=self.suite_config,
                                           is_startup=True)
            warnings = self.task_pool.insert_tasks(items=[task_proxy.identity],
                                                   stopcp=None,
                                                   no_check=False)
            assert 0 == warnings
        self.task_pool.release_runahead_tasks()
        self.scheduler.ws_data_mgr.initiate_data_model()
        self.workflow_id = self.scheduler.ws_data_mgr.workflow_id
        self.data = self.scheduler.ws_data_mgr.data[self.workflow_id]
        self.node_ids = [node.id for node in self.data[TASK_PROXIES].values()]
        self.edge_ids = [edge.id for edge in self.data[EDGES].values()]
        self.resolvers = Resolvers(self.scheduler.ws_data_mgr.data,
                                   schd=self.scheduler)

    def test_constructor(self):
        self.assertIsNotNone(self.resolvers.schd)

    def test_get_workflows(self):
        """Test method returning workflow messages satisfying filter args."""
        args = deepcopy(FLOW_ARGS)
        args['workflows'].append((self.owner, self.suite_name, None))
        flow_msgs = _run_coroutine(self.resolvers.get_workflows(args))
        self.assertEqual(1, len(flow_msgs))

    def test_get_nodes_all(self):
        """Test method returning workflow(s) node messages
        satisfying filter args."""
        args = deepcopy(NODE_ARGS)
        args['workflows'].append((self.owner, self.suite_name, None))
        args['states'].append('failed')
        nodes = _run_coroutine(self.resolvers.get_nodes_all(
            TASK_PROXIES, args))
        self.assertEqual(0, len(nodes))
        args['ghosts'] = True
        args['states'] = []
        args['ids'].append(parse_node_id(self.node_ids[0], TASK_PROXIES))
        nodes = [
            n for n in _run_coroutine(
                self.resolvers.get_nodes_all(TASK_PROXIES, args))
            if n in self.data[TASK_PROXIES].values()
        ]
        self.assertEqual(1, len(nodes))

    def test_get_nodes_by_ids(self):
        """Test method returning workflow(s) node messages
        who's ID is a match to any given."""
        args = deepcopy(NODE_ARGS)
        args['workflows'].append((self.owner, self.suite_name, None))
        nodes = _run_coroutine(
            self.resolvers.get_nodes_by_ids(TASK_PROXIES, args))
        self.assertEqual(0, len(nodes))
        args['ghosts'] = True
        args['native_ids'] = self.node_ids
        nodes = [
            n for n in _run_coroutine(
                self.resolvers.get_nodes_by_ids(TASK_PROXIES, args))
            if n in self.data[TASK_PROXIES].values()
        ]
        self.assertTrue(len(nodes) > 0)

    def test_get_node_by_id(self):
        """Test method returning a workflow node message
        who's ID is a match to that given."""
        args = deepcopy(NODE_ARGS)
        args['id'] = f'me{ID_DELIM}mine{ID_DELIM}20500808T00{ID_DELIM}jin'
        args['workflows'].append((self.owner, self.suite_name, None))
        node = _run_coroutine(self.resolvers.get_node_by_id(
            TASK_PROXIES, args))
        self.assertIsNone(node)
        args['id'] = self.node_ids[0]
        node = _run_coroutine(self.resolvers.get_node_by_id(
            TASK_PROXIES, args))
        self.assertTrue(node in self.data[TASK_PROXIES].values())

    def test_get_edges_all(self):
        """Test method returning all workflow(s) edges."""
        edges = [
            e for e in _run_coroutine(self.resolvers.get_edges_all(FLOW_ARGS))
            if e in self.data[EDGES].values()
        ]
        self.assertTrue(len(edges) > 0)

    def test_get_edges_by_ids(self):
        """Test method returning workflow(s) edge messages
        who's ID is a match to any given edge IDs."""
        args = deepcopy(NODE_ARGS)
        edges = _run_coroutine(self.resolvers.get_edges_by_ids(args))
        self.assertEqual(0, len(edges))
        args['native_ids'] = self.edge_ids
        edges = [
            e for e in _run_coroutine(self.resolvers.get_edges_by_ids(args))
            if e in self.data[EDGES].values()
        ]
        self.assertTrue(len(edges) > 0)

    def test_mutator(self):
        """Test the mutation method."""
        w_args = deepcopy(FLOW_ARGS)
        w_args['workflows'].append((self.owner, self.suite_name, None))
        args = {}
        response = _run_coroutine(
            self.resolvers.mutator(None, 'hold_suite', w_args, args))
        self.assertEqual(response[0]['id'], self.workflow_id)

    def test_nodes_mutator(self):
        """Test the nodes mutation method."""
        w_args = deepcopy(FLOW_ARGS)
        w_args['workflows'].append((self.owner, self.suite_name, None))
        args = {}
        ids = [parse_node_id(n, TASK_PROXIES) for n in self.node_ids]
        response = _run_coroutine(
            self.resolvers.nodes_mutator(None, 'trigger_tasks', ids, w_args,
                                         args))
        self.assertEqual(response[0]['id'], self.workflow_id)

    def test_mutation_mapper(self):
        """Test the mapping of mutations to internal command methods."""
        response = _run_coroutine(
            self.resolvers._mutation_mapper('hold_suite', {}))
        self.assertIsNotNone(response)