コード例 #1
0
def test_pype_use_parent_context_swallow_stop_error(mock_run_pipeline):
    """Input pype doesn't swallow stop error in child pipeline."""
    mock_run_pipeline.side_effect = mocked_run_pipeline_with_stop
    context = Context({
        'pype': {
            'name': 'pipe name',
            'pipeArg': 'argument here',
            'useParentContext': True,
            'skipParse': True,
            'raiseError': False
        }
    })

    context.pipeline_name = 'og pipe name'

    with patch_logger('pypyr.steps.pype', logging.ERROR) as mock_logger_error:
        with pytest.raises(Stop) as err_info:
            pype.run_step(context)

        assert isinstance(err_info.value, Stop)

    assert context.pipeline_name == 'og pipe name'

    mock_run_pipeline.assert_called_once_with(
        pipeline_name='pipe name',
        pipeline_context_input=['argument', 'here'],
        context=context,
        parse_input=False,
        loader=None,
        groups=None,
        success_group=None,
        failure_group=None)

    mock_logger_error.assert_not_called()
コード例 #2
0
def test_pype_use_parent_context_with_swallow(mock_run_pipeline):
    """Input pype swallowing error in child pipeline."""
    mock_run_pipeline.side_effect = mocked_run_pipeline_with_runtime_error
    context = Context({
        'pype': {
            'name': 'pipe name',
            'pipeArg': 'argument here',
            'useParentContext': True,
            'skipParse': True,
            'raiseError': False,
            'loader': 'test loader'
        }
    })

    context.pipeline_name = 'og pipe name'

    with patch_logger('pypyr.steps.pype', logging.ERROR) as mock_logger_error:
        pype.run_step(context)

    assert context.pipeline_name == 'og pipe name'

    mock_run_pipeline.assert_called_once_with(
        pipeline_name='pipe name',
        pipeline_context_input=['argument', 'here'],
        context=context,
        parse_input=False,
        loader='test loader',
        groups=None,
        success_group=None,
        failure_group=None)

    mock_logger_error.assert_called_once_with(
        'Something went wrong pyping pipe name. RuntimeError: whoops')
コード例 #3
0
def test_pype_use_parent_context(mock_run_pipeline):
    """Input pype use_parent_context True."""
    mock_run_pipeline.side_effect = mocked_run_pipeline

    context = Context({
        'pype': {
            'name': 'pipe name',
            'pipeArg': 'argument here',
            'useParentContext': True,
            'skipParse': True,
            'raiseError': True,
            'loader': 'test loader'
        }
    })
    context.pipeline_name = 'og pipe name'

    with patch_logger('pypyr.steps.pype', logging.INFO) as mock_logger_info:
        pype.run_step(context)

    mock_run_pipeline.assert_called_once_with(
        pipeline_name='pipe name',
        pipeline_context_input=['argument', 'here'],
        context=context,
        parse_input=False,
        loader='test loader',
        groups=None,
        success_group=None,
        failure_group=None)

    assert context.pipeline_name == 'og pipe name'

    assert mock_logger_info.mock_calls == [
        call('pyping pipe name, using parent context.'),
        call('pyped pipe name.')
    ]
コード例 #4
0
def test_stop_pipeline(mock_step_cache):
    """When StopPipeline stop pipeline execution."""
    # Sequence: sg2 - sg2.1, 2.2
    #           sg3 - sg3.1 (StopPipeline)
    mock_step_cache.side_effect = [
        nothing_step,  # 2.1
        nothing_step,  # 2.2
        stop_pipe_step,  # 3.1
    ]

    context = Context()
    context.pipeline_name = 'arb'
    pypyr.pipelinerunner.run_pipeline(
        pipeline=get_step_pipeline(),
        context=context,
        pipeline_context_input='arb context input',
        groups=['sg2', 'sg3', 'sg4', 'sg1'],
        success_group='sg5',
        failure_group=None
    )

    assert mock_step_cache.mock_calls == [call('sg2.step1'),
                                          call('sg2.step2'),
                                          call('sg3.step1')
                                          ]
コード例 #5
0
def test_stop_pipeline_retry(mock_step_cache):
    """When StopPipeline stop pipeline execution in retry loop."""
    # Sequence: sg2 - sg2.1, 2.2
    #           sg3 - sg3.1 x2 (StopPipeline)

    nothing_mock = DeepCopyMagicMock()
    mock312 = DeepCopyMagicMock()

    def step31(context):
        mock312(context)
        if context['retryCounter'] == 2:
            raise StopPipeline()
        else:
            raise ValueError(context['retryCounter'])

    mock_step_cache.side_effect = [
        nothing_mock,  # 2.1
        nothing_mock,  # 2.2
        step31,  # 3.1
    ]

    context = Context()
    context.pipeline_name = 'arb'
    pypyr.pipelinerunner.run_pipeline(
        pipeline=get_retry_step_pipeline(),
        context=context,
        pipeline_context_input='arb context input',
        groups=['sg2', 'sg3', 'sg4', 'sg1'],
        success_group='sg5',
        failure_group=None
    )

    assert nothing_mock.mock_calls == [call({}),
                                       call({})
                                       ]

    assert mock312.mock_calls == [call({'retryCounter': 1}),
                                  call({'retryCounter': 2})]

    assert mock_step_cache.mock_calls == [call('sg2.step1'),
                                          call('sg2.step2'),
                                          call('sg3.step1')
                                          ]
コード例 #6
0
ファイル: pype.py プロジェクト: jizongFox/pypyr
def run_step(context):
    """Run another pipeline from this step.

    The parent pipeline is the current, executing pipeline. The invoked, or
    child pipeline is the pipeline you are calling from this step.

    Args:
        context: dictionary-like pypyr.context.Context. context is mandatory.
                 Uses the following context keys in context:
            - pype
                - name. mandatory. str. Name of pipeline to execute. This
                  {name}.yaml must exist in the working directory/pipelines
                  dir.
                - args. optional. dict. Create the context of the called
                  pipeline with these keys & values. If args specified,
                  will not pass the parent context unless you explicitly set
                  useParentContext = True. If you do set useParentContext=True,
                  will write args into the parent context.
                - out. optional. str or dict or list. If you set args or
                  useParentContext=False, the values in out will be saved from
                  child pipeline's fresh context into the parent content upon
                  completion of the child pipeline. Pass a string for a single
                  key to grab from child context, a list of string for a list
                  of keys to grab from child context, or a dict where you map
                  'parent-key-name': 'child-key-name'.
                - pipeArg. string. optional. String to pass to the
                  context_parser - the equivalent to context arg on the
                  pypyr cli. Only used if skipParse==False.
                - raiseError. bool. optional. Defaults to True. If False, log,
                  but swallow any errors that happen during the invoked
                  pipeline execution. Swallowing means that the current/parent
                  pipeline will carry on with the next step even if an error
                  occurs in the invoked pipeline.
                - skipParse. bool. optional. Defaults to True. skip the
                  context_parser on the invoked pipeline.
                - useParentContext. optional. bool. Defaults to True. Pass the
                  current (i.e parent) pipeline context to the invoked (child)
                  pipeline.
                - loader: str. optional. Absolute name of pipeline loader
                  module. If not specified will use
                  pypyr.pypeloaders.fileloader.
                - groups. list of str, or str. optional. Step-Groups to run in
                  pipeline. If you specify a str, will convert it to a single
                  entry list for you.
                - success. str. optional. Step-Group to run on successful
                  pipeline completion.
                - failure. str. optional. Step-Group to run on pipeline error.

    If none of groups, success & failure specified, will run the default pypyr
    steps, on_success & on_failure sequence.

    If groups specified, will only run groups, without a success or failure
    sequence, unless you specifically set these also.

    Returns:
        None

    Raises:
        pypyr.errors.KeyNotInContextError: if ['pype'] or ['pype']['name']
                                           is missing.
        pypyr.errors.KeyInContextHasNoValueError: ['pype']['name'] exists but
                                                  is empty.
    """
    logger.debug("started")

    (pipeline_name, args, out, use_parent_context, pipe_arg, skip_parse,
     raise_error, loader, step_groups, success_group,
     failure_group) = get_arguments(context)

    try:
        if use_parent_context:
            logger.info("pyping %s, using parent context.", pipeline_name)

            if args:
                logger.debug("writing args into parent context...")
                context.update(args)

            pipelinerunner.load_and_run_pipeline(
                pipeline_name=pipeline_name,
                pipeline_context_input=pipe_arg,
                context=context,
                parse_input=not skip_parse,
                loader=loader,
                groups=step_groups,
                success_group=success_group,
                failure_group=failure_group)
        else:
            logger.info("pyping %s, without parent context.", pipeline_name)

            if args:
                child_context = Context(args)
            else:
                child_context = Context()

            child_context.pipeline_name = pipeline_name
            child_context.working_dir = context.working_dir

            pipelinerunner.load_and_run_pipeline(
                pipeline_name=pipeline_name,
                pipeline_context_input=pipe_arg,
                context=child_context,
                parse_input=not skip_parse,
                loader=loader,
                groups=step_groups,
                success_group=success_group,
                failure_group=failure_group)

            if out:
                write_child_context_to_parent(out=out,
                                              parent_context=context,
                                              child_context=child_context)

        logger.info("pyped %s.", pipeline_name)
    except (ControlOfFlowInstruction, Stop):
        # Control-of-Flow/Stop are instructions to go somewhere
        # else, not errors per se.
        raise
    except Exception as ex_info:
        # yes, yes, don't catch Exception. Have to, though, in order to swallow
        # errs if !raise_error
        logger.error("Something went wrong pyping %s. %s: %s", pipeline_name,
                     type(ex_info).__name__, ex_info)

        if raise_error:
            logger.debug("Raising original exception to caller.")
            raise
        else:
            logger.debug("raiseError is False. Swallowing error in %s.",
                         pipeline_name)

    logger.debug("done")