def test_pype_no_pipe_arg(mock_run_pipeline): """Input pype use_parent_context False.""" context = Context({ 'pype': { 'name': 'pipe name', 'pipeArg': None, 'useParentContext': False, 'skipParse': False, 'raiseError': True, } }) context.working_dir = 'arb/dir' with patch_logger('pypyr.steps.pype', logging.INFO) as mock_logger_info: pype.run_step(context) mock_run_pipeline.assert_called_once_with(pipeline_name='pipe name', pipeline_context_input=None, context={}, parse_input=True, loader=None, groups=None, success_group=None, failure_group=None) assert mock_logger_info.mock_calls == [ call('pyping pipe name, without parent context.'), call('pyped pipe name.') ]
def test_load_and_run_pipeline_with_failure_group_specified( mocked_get_work_dir, mocked_set_work_dir, mocked_get_pipe_def, mocked_get_parsed_context, mocked_steps_runner): """Pass run_pipeline with specified failure group.""" pipeline_cache.clear() pypeloader_cache.clear() existing_context = Context({'2': 'original', '3': 'new'}) existing_context.working_dir = 'from/context' pypyr.pipelinerunner.load_and_run_pipeline( pipeline_name='arb pipe', pipeline_context_input='arb context input', context=existing_context, failure_group='arb1') assert existing_context.working_dir == 'from/context' mocked_set_work_dir.assert_not_called() mocked_get_pipe_def.assert_called_once_with(pipeline_name='arb pipe', working_dir='from/context') mocked_get_parsed_context.assert_called_once_with( pipeline='pipe def', context_in_args='arb context input') mocked_steps_runner.return_value.run_step_groups.assert_called_once_with( groups=['steps'], success_group=None, failure_group='arb1' ) mocked_steps_runner.assert_called_once_with(pipeline_definition='pipe def', context={'1': 'context 1', '2': 'context2', '3': 'new'})
def test_pype_no_pipe_arg(mock_run_pipeline): """pype use_parent_context False.""" context = Context({ 'pype': { 'name': 'pipe name', 'pipeArg': None, 'useParentContext': False, 'skipParse': False, 'raiseError': True } }) context.working_dir = 'arb/dir' logger = logging.getLogger('pypyr.steps.pype') with patch.object(logger, 'info') as mock_logger_info: pype.run_step(context) mock_run_pipeline.assert_called_once_with(pipeline_name='pipe name', pipeline_context_input=None, working_dir='arb/dir', parse_input=True) assert mock_logger_info.mock_calls == [ call('pyping pipe name, without parent context.'), call('pyped pipe name.') ]
def test_pype_args_with_mapping_out(mock_run_pipeline): """Input pype args used as context with mapping out.""" context = Context({ 'parentkey': 'parentvalue', 'pype': { 'name': 'pipe name', 'args': { 'a': 'av', 'b': 'bv', 'c': 'cv' }, 'out': { 'new-a': 'a', 'new-c': 'c' } } }) context.working_dir = 'arb/dir' with patch_logger('pypyr.steps.pype', logging.INFO) as mock_logger_info: pype.run_step(context) mock_run_pipeline.assert_called_once_with(pipeline_name='pipe name', pipeline_context_input=None, context={ 'a': 'av', 'b': 'bv', 'c': 'cv' }, parse_input=False, loader=None, groups=None, success_group=None, failure_group=None) assert mock_logger_info.mock_calls == [ call('pyping pipe name, without parent context.'), call('pyped pipe name.') ] assert context == { 'parentkey': 'parentvalue', 'new-a': 'av', 'new-c': 'cv', 'pype': { 'name': 'pipe name', 'args': { 'a': 'av', 'b': 'bv', 'c': 'cv' }, 'out': { 'new-a': 'a', 'new-c': 'c' } } }
def test_run_pipeline_with_existing_context_pass(mocked_work_dir, mocked_get_pipe_def, mocked_get_parsed_context, mocked_run_step_group): """run_pipeline passes correct params to all methods""" existing_context = Context({'2': 'original', '3': 'new'}) existing_context.working_dir = 'from/context' pypyr.pipelinerunner.run_pipeline( pipeline_name='arb pipe', pipeline_context_input='arb context input', working_dir='arb/dir', context=existing_context) existing_context.working_dir == 'from/context' mocked_work_dir.assert_not_called() mocked_get_pipe_def.assert_called_once_with(pipeline_name='arb pipe', working_dir='from/context') mocked_get_parsed_context.assert_called_once_with( pipeline='pipe def', context_in_string='arb context input') # 1st called steps, then on_success. In both cases with merged context. expected_run_step_groups = [ call(context={ '1': 'context 1', '2': 'context2', '3': 'new' }, pipeline_definition='pipe def', step_group_name='steps'), call(context={ '1': 'context 1', '2': 'context2', '3': 'new' }, pipeline_definition='pipe def', step_group_name='on_success') ] mocked_run_step_group.assert_has_calls(expected_run_step_groups)
def run_step(context): """Run another pipeline from this step. The parent pipeline is the current, executing pipeline. The invoked, or child pipeline is the pipeline you are calling from this step. Args: context: dictionary-like pypyr.context.Context. context is mandatory. Uses the following context keys in context: - pype - name. mandatory. str. Name of pipeline to execute. This {name}.yaml must exist in the working directory/pipelines dir. - args. optional. dict. Create the context of the called pipeline with these keys & values. If args specified, will not pass the parent context unless you explicitly set useParentContext = True. If you do set useParentContext=True, will write args into the parent context. - out. optional. str or dict or list. If you set args or useParentContext=False, the values in out will be saved from child pipeline's fresh context into the parent content upon completion of the child pipeline. Pass a string for a single key to grab from child context, a list of string for a list of keys to grab from child context, or a dict where you map 'parent-key-name': 'child-key-name'. - pipeArg. string. optional. String to pass to the context_parser - the equivalent to context arg on the pypyr cli. Only used if skipParse==False. - raiseError. bool. optional. Defaults to True. If False, log, but swallow any errors that happen during the invoked pipeline execution. Swallowing means that the current/parent pipeline will carry on with the next step even if an error occurs in the invoked pipeline. - skipParse. bool. optional. Defaults to True. skip the context_parser on the invoked pipeline. - useParentContext. optional. bool. Defaults to True. Pass the current (i.e parent) pipeline context to the invoked (child) pipeline. - loader: str. optional. Absolute name of pipeline loader module. If not specified will use pypyr.pypeloaders.fileloader. - groups. list of str, or str. optional. Step-Groups to run in pipeline. If you specify a str, will convert it to a single entry list for you. - success. str. optional. Step-Group to run on successful pipeline completion. - failure. str. optional. Step-Group to run on pipeline error. If none of groups, success & failure specified, will run the default pypyr steps, on_success & on_failure sequence. If groups specified, will only run groups, without a success or failure sequence, unless you specifically set these also. Returns: None Raises: pypyr.errors.KeyNotInContextError: if ['pype'] or ['pype']['name'] is missing. pypyr.errors.KeyInContextHasNoValueError: ['pype']['name'] exists but is empty. """ logger.debug("started") (pipeline_name, args, out, use_parent_context, pipe_arg, skip_parse, raise_error, loader, step_groups, success_group, failure_group) = get_arguments(context) try: if use_parent_context: logger.info("pyping %s, using parent context.", pipeline_name) if args: logger.debug("writing args into parent context...") context.update(args) pipelinerunner.load_and_run_pipeline( pipeline_name=pipeline_name, pipeline_context_input=pipe_arg, context=context, parse_input=not skip_parse, loader=loader, groups=step_groups, success_group=success_group, failure_group=failure_group) else: logger.info("pyping %s, without parent context.", pipeline_name) if args: child_context = Context(args) else: child_context = Context() child_context.pipeline_name = pipeline_name child_context.working_dir = context.working_dir pipelinerunner.load_and_run_pipeline( pipeline_name=pipeline_name, pipeline_context_input=pipe_arg, context=child_context, parse_input=not skip_parse, loader=loader, groups=step_groups, success_group=success_group, failure_group=failure_group) if out: write_child_context_to_parent(out=out, parent_context=context, child_context=child_context) logger.info("pyped %s.", pipeline_name) except (ControlOfFlowInstruction, Stop): # Control-of-Flow/Stop are instructions to go somewhere # else, not errors per se. raise except Exception as ex_info: # yes, yes, don't catch Exception. Have to, though, in order to swallow # errs if !raise_error logger.error("Something went wrong pyping %s. %s: %s", pipeline_name, type(ex_info).__name__, ex_info) if raise_error: logger.debug("Raising original exception to caller.") raise else: logger.debug("raiseError is False. Swallowing error in %s.", pipeline_name) logger.debug("done")