async def test_workflow_params(flow, scheduler, start, one_conf, run_dir, mod_test_dir): """It should extract workflow params from the workflow database. Note: For this test we ensure that the workflow UUID is present in the params table. """ reg = flow(one_conf) schd = scheduler(reg) async with start(schd): pipe = ( # scan just this workflow scan(scan_dir=mod_test_dir) | filter_name(rf'^{reg}$') | is_active(True) | workflow_params) async for flow in pipe: # check the workflow_params field has been provided assert 'workflow_params' in flow # check the workflow uuid key has been read from the DB uuid_key = WorkflowDatabaseManager.KEY_UUID_STR assert uuid_key in flow['workflow_params'] # check the workflow uuid key matches the scheduler value assert flow['workflow_params'][uuid_key] == schd.uuid_str
async def test_workflow_params( one, start, one_conf, run_dir, mod_test_dir ): """It should extract workflow params from the workflow database. Note: For this test we ensure that the workflow UUID is present in the params table. """ async with start(one): pipe = ( # scan just this workflow scan(scan_dir=mod_test_dir) | filter_name(rf'^{re.escape(one.workflow)}$') | is_active(True) | workflow_params ) async for flow in pipe: # check the workflow_params field has been provided assert 'workflow_params' in flow # check the workflow uuid key has been read from the DB uuid_key = WorkflowDatabaseManager.KEY_UUID_STR assert uuid_key in flow['workflow_params'] # check the workflow uuid key matches the scheduler value assert flow['workflow_params'][uuid_key] == one.uuid_str break else: raise Exception('Expected one scan result')
def get_pipe(opts, formatter, scan_dir=None): """Construct a pipe for listing flows.""" if scan_dir: pipe = scan(scan_dir=scan_dir) elif opts.source: pipe = scan_multi( Path(path).expanduser() for path in glbl_cfg().get(['install', 'source dirs']) ) opts.states = {'stopped'} else: pipe = scan show_running = 'running' in opts.states show_paused = 'paused' in opts.states show_active = show_running or show_paused or 'stopping' in opts.states # show_active = bool({'running', 'paused'} & opts.states) show_inactive = bool({'stopped'} & opts.states) # filter by flow name if opts.name: pipe |= filter_name(*opts.name) # filter by flow state if show_active: pipe |= is_active(True, filter_stop=(not show_inactive)) elif show_inactive: pipe |= is_active(False) # get contact file information if show_active: pipe |= contact_info graphql_fields = {} graphql_filters = set() # filter paused/running flows if show_active and not (show_running and show_paused): graphql_fields['status'] = None graphql_filters.add((('status',), tuple(opts.states))) # get fancy data if requested if formatter == _format_rich: # graphql_fields['status'] = None graphql_fields.update(RICH_FIELDS) # add graphql queries / filters to the pipe if show_active and graphql_fields: pipe |= graphql_query(graphql_fields, filters=graphql_filters) elif opts.ping: # check the flow is running even if not required # by display format or filters pipe |= graphql_query({'status': None}) # yield results as they are processed pipe.preserve_order = False return pipe
async def test_filter_name(): """It should filter flows by registration name.""" pipe = filter_name('^f') assert await pipe.func( {'name': 'foo'}, *pipe.args ) assert not await pipe.func( {'name': 'bar'}, *pipe.args )
async def _expand_workflow_tokens_impl(tokens, match_active=True): """Use "cylc scan" to expand workflow patterns.""" workflow_sel = tokens['workflow_sel'] if workflow_sel and workflow_sel != 'running': raise UserInputError(f'The workflow selector :{workflow_sel} is not' 'currently supported.') # construct the pipe pipe = scan | filter_name(fnmatch.translate(tokens['workflow'])) if match_active is not None: pipe |= is_active(match_active) # iter the results async for workflow in pipe: yield tokens.duplicate(workflow=workflow['name'])
async def test_scan_one(one, start, test_dir): """Ensure that a running workflow appears in the scan results.""" async with start(one): pipe = ( # scan just this workflow scan(scan_dir=test_dir) | filter_name(rf'^{re.escape(one.workflow)}$') | is_active(True) | workflow_params ) async for flow in pipe: assert flow['name'] == one.workflow break else: raise Exception('Expected one scan result')
def get_pipe(opts, formatter, scan_dir=None): """Construct a pipe for listing flows.""" if scan_dir: pipe = scan(scan_dir=scan_dir) else: pipe = scan show_running = 'running' in opts.states show_held = 'held' in opts.states show_active = show_running or show_held or 'stopping' in opts.states # show_active = bool({'running', 'held'} & opts.states) show_inactive = bool({'stopped'} & opts.states) # filter by flow name if opts.name: pipe |= filter_name(*opts.name) # filter by flow state if show_active: pipe |= is_active(True, filter_stop=(not show_inactive)) elif show_inactive: pipe |= is_active(False) # get contact file information if show_active: pipe |= contact_info graphql_fields = {} graphql_filters = set() # filter held/running flows if show_active and not (show_running and show_held): graphql_fields['status'] = None graphql_filters.add((('status', ), tuple(opts.states))) # get fancy data if requested if formatter == _format_rich: # graphql_fields['status'] = None graphql_fields.update(RICH_FIELDS) # add graphql queries / filters to the pipe if show_active and graphql_fields: pipe |= graphql_query(graphql_fields, filters=graphql_filters) # yield results as they are processed pipe.preserve_order = False return pipe
def test_filter_name_preprocess(): """It should combine provided patterns and compile them.""" pipe = filter_name('^f', '^c') assert pipe.args[0] == re.compile('(^f|^c)')