コード例 #1
0
def test_pipeline_init_failure():
    stub_solid = define_stub_solid('stub', None)
    env_config = {}

    def failing_resource_fn(*args, **kwargs):
        raise Exception()

    pipeline_def = PipelineDefinition(
        [stub_solid],
        'failing_init_pipeline',
        mode_definitions=[
            ModeDefinition(
                resources={
                    'failing': ResourceDefinition(
                        resource_fn=failing_resource_fn)
                })
        ],
    )

    result = execute_pipeline(
        pipeline_def,
        environment_dict=env_config,
        run_config=RunConfig(executor_config=InProcessExecutorConfig(
            raise_on_error=False)),
    )

    assert result.success is False

    assert len(result.event_list) == 1

    event = result.event_list[0]

    assert event.event_type_value == 'PIPELINE_INIT_FAILURE'

    assert event.pipeline_init_failure_data
コード例 #2
0
def test_error_pipeline():
    pipeline = define_error_pipeline()
    result = execute_pipeline(
        pipeline,
        run_config=RunConfig(executor_config=InProcessExecutorConfig(raise_on_error=False)),
    )
    assert not result.success
コード例 #3
0
    def execute_pipeline(self, repository_container, pipeline, pipeline_run,
                         raise_on_error):
        check.inst_param(pipeline, 'pipeline', PipelineDefinition)
        try:
            return execute_pipeline(
                pipeline,
                pipeline_run.config,
                run_config=RunConfig(
                    pipeline_run.run_id,
                    event_callback=pipeline_run.handle_new_event,
                    executor_config=InProcessExecutorConfig(
                        raise_on_error=raise_on_error),
                    reexecution_config=pipeline_run.reexecution_config,
                    step_keys_to_execute=pipeline_run.step_keys_to_execute,
                ),
            )
        except:  # pylint: disable=W0702
            if raise_on_error:
                six.reraise(*sys.exc_info())

            pipeline_run.handle_new_event(
                build_synthetic_pipeline_error_record(
                    pipeline_run.run_id,
                    serializable_error_info_from_exc_info(sys.exc_info()),
                    pipeline.name,
                ))
コード例 #4
0
def test_resource_init_failure():
    @resource
    def failing_resource(_init_context):
        raise Exception('Uh oh')

    @solid(required_resource_keys={'failing_resource'})
    def failing_resource_solid(_context):
        pass

    pipeline = PipelineDefinition(
        name='test_resource_init_failure',
        solid_defs=[failing_resource_solid],
        mode_defs=[
            ModeDefinition(
                resource_defs={'failing_resource': failing_resource})
        ],
    )

    run_config = RunConfig(executor_config=InProcessExecutorConfig(
        raise_on_error=False))
    res = execute_pipeline(pipeline, run_config=run_config)

    assert res.event_list[0].event_type_value == 'PIPELINE_INIT_FAILURE'

    execution_plan = create_execution_plan(pipeline, run_config=run_config)

    step_events = execute_plan(
        execution_plan,
        run_config=run_config,
        step_keys_to_execute=[
            step.key for step in execution_plan.topological_steps()
        ],
    )

    assert step_events[0].event_type_value == 'PIPELINE_INIT_FAILURE'
コード例 #5
0
ファイル: pipeline.py プロジェクト: rolanddb/dagster
def execute_execute_command_with_preset(preset, raise_on_error, cli_args,
                                        _mode):
    pipeline = handle_for_pipeline_cli_args(
        cli_args).build_pipeline_definition()
    cli_args.pop('pipeline_name')

    kwargs = pipeline.get_preset(preset)
    kwargs['run_config'] = kwargs['run_config'].with_executor_config(
        InProcessExecutorConfig(raise_on_error=raise_on_error))
    return execute_pipeline(**kwargs)
コード例 #6
0
def execute_execute_command_with_preset(preset, raise_on_error, cli_args):
    pipeline_target = load_pipeline_target_from_cli_args(cli_args)
    cli_args.pop('pipeline_name')
    repository_target_info = load_target_info_from_cli_args(cli_args)

    repository = load_repository_from_target_info(repository_target_info)
    return execute_pipeline(
        run_config=RunConfig(executor_config=InProcessExecutorConfig(
            raise_on_error=raise_on_error)),
        **(repository.get_preset_pipeline(pipeline_target.pipeline_name,
                                          preset)))
コード例 #7
0
def execute_pipeline_through_queue(
    handle,
    pipeline_name,
    solid_subset,
    environment_dict,
    mode,
    run_id,
    message_queue,
    reexecution_config,
    step_keys_to_execute,
):
    """
    Execute pipeline using message queue as a transport
    """

    check.opt_str_param(mode, 'mode')

    message_queue.put(ProcessStartedSentinel(os.getpid()))

    run_config = RunConfig(
        run_id,
        mode=mode,
        event_callback=message_queue.put,
        executor_config=InProcessExecutorConfig(raise_on_error=False),
        reexecution_config=reexecution_config,
        step_keys_to_execute=step_keys_to_execute,
    )

    try:
        repository = handle.build_repository_definition()
    except Exception:  # pylint: disable=broad-except
        repo_error = sys.exc_info()
        message_queue.put(
            MultiprocessingError(
                serializable_error_info_from_exc_info(repo_error)))
        return

    try:
        result = execute_pipeline(
            repository.get_pipeline(pipeline_name).build_sub_pipeline(
                solid_subset),
            environment_dict,
            run_config=run_config,
        )
        return result
    except Exception:  # pylint: disable=broad-except
        error_info = serializable_error_info_from_exc_info(sys.exc_info())
        message_queue.put(MultiprocessingError(error_info))
    finally:
        message_queue.put(MultiprocessingDone())
        message_queue.close()
コード例 #8
0
def execute_execute_command_with_preset(preset, raise_on_error, cli_args,
                                        mode):
    pipeline = handle_for_pipeline_cli_args(
        cli_args).build_pipeline_definition()
    cli_args.pop('pipeline_name')
    repository = handle_for_repo_cli_args(
        cli_args).build_repository_definition()
    kwargs = repository.get_preset_pipeline(pipeline.name, preset)

    return execute_pipeline(run_config=RunConfig(
        mode=mode,
        executor_config=InProcessExecutorConfig(
            raise_on_error=raise_on_error)),
                            **kwargs)
コード例 #9
0
def do_execute_command(pipeline, env_file_list, raise_on_error):
    check.inst_param(pipeline, 'pipeline', PipelineDefinition)
    env_file_list = check.opt_list_param(env_file_list,
                                         'env_file_list',
                                         of_type=str)

    environment_dict = load_yaml_from_glob_list(
        env_file_list) if env_file_list else {}

    return execute_pipeline(
        pipeline,
        environment_dict=environment_dict,
        run_config=RunConfig(executor_config=InProcessExecutorConfig(
            raise_on_error=raise_on_error)),
    )
コード例 #10
0
ファイル: test_toys.py プロジェクト: vmuthuk2/dagster
def test_error_resource(snapshot):
    result = execute_pipeline(
        resource_error_pipeline,
        environment_dict={'storage': {
            'filesystem': {}
        }},
        run_config=RunConfig(executor_config=InProcessExecutorConfig(
            raise_on_error=False)),
    )

    assert not result.success
    assert len(result.event_list) == 1

    init_failure_event = result.event_list[0]
    assert init_failure_event.event_type_value == 'PIPELINE_INIT_FAILURE'
    snapshot.assert_match(init_failure_event.message)
コード例 #11
0
def execute_pipeline_through_queue(
    repository_info,
    pipeline_name,
    solid_subset,
    environment_dict,
    run_id,
    message_queue,
    reexecution_config,
    step_keys_to_execute,
):
    """
    Execute pipeline using message queue as a transport
    """

    message_queue.put(ProcessStartedSentinel(os.getpid()))

    run_config = RunConfig(
        run_id,
        event_callback=message_queue.put,
        executor_config=InProcessExecutorConfig(raise_on_error=False),
        reexecution_config=reexecution_config,
        step_keys_to_execute=step_keys_to_execute,
    )

    repository_container = RepositoryContainer(repository_info)
    if repository_container.repo_error:
        message_queue.put(
            MultiprocessingError(
                serializable_error_info_from_exc_info(
                    repository_container.repo_error)))
        return

    try:
        result = execute_pipeline(
            repository_container.repository.get_pipeline(
                pipeline_name).build_sub_pipeline(solid_subset),
            environment_dict,
            run_config=run_config,
        )
        return result
    except:  # pylint: disable=W0702
        error_info = serializable_error_info_from_exc_info(sys.exc_info())
        message_queue.put(MultiprocessingError(error_info))
    finally:
        message_queue.put(MultiprocessingDone())
        message_queue.close()
コード例 #12
0
def test_single_solid_pipeline_failure():
    events = defaultdict(list)

    @lambda_solid
    def solid_one():
        raise Exception('nope')

    def _event_callback(record):
        if record.is_dagster_event:
            events[record.dagster_event.event_type].append(record)

    pipeline_def = PipelineDefinition(
        name='single_solid_pipeline',
        solids=[solid_one],
        mode_definitions=[mode_def(_event_callback)],
    )

    result = execute_pipeline(
        pipeline_def,
        {'loggers': {
            'callback': {}
        }},
        run_config=RunConfig(executor_config=InProcessExecutorConfig(
            raise_on_error=False)),
    )
    assert not result.success

    start_event = single_dagster_event(events, DagsterEventType.STEP_START)
    assert start_event.pipeline_name == 'single_solid_pipeline'

    assert start_event.dagster_event.solid_name == 'solid_one'
    assert start_event.dagster_event.solid_definition_name == 'solid_one'
    assert start_event.level == logging.INFO

    failure_event = single_dagster_event(events, DagsterEventType.STEP_FAILURE)
    assert failure_event.pipeline_name == 'single_solid_pipeline'

    assert failure_event.dagster_event.solid_name == 'solid_one'
    assert failure_event.dagster_event.solid_definition_name == 'solid_one'
    assert failure_event.level == logging.ERROR
コード例 #13
0
ファイル: test_utils.py プロジェクト: zorrock/dagster
def execute_single_solid_in_isolation(context_params,
                                      solid_def,
                                      environment=None,
                                      raise_on_error=True):
    '''
    Deprecated.

    Execute a solid outside the context of a pipeline, with an already-created context.

    Prefer execute_solid in dagster.utils.test
    '''
    check.inst_param(context_params, 'context_params', ExecutionContext)
    check.inst_param(solid_def, 'solid_def', SolidDefinition)
    environment = check.opt_dict_param(environment, 'environment')
    check.bool_param(raise_on_error, 'raise_on_error')

    single_solid_environment = {
        'expectations': environment.get('expectations'),
        'context': environment.get('context'),
        'solids': {
            solid_def.name: environment['solids'][solid_def.name]
        } if solid_def.name in environment.get('solids', {}) else None,
    }

    pipeline_result = execute_pipeline(
        PipelineDefinition(
            solids=[solid_def],
            context_definitions=PipelineContextDefinition.
            passthrough_context_definition(context_params),
        ),
        environment_dict=single_solid_environment,
        run_config=RunConfig(executor_config=InProcessExecutorConfig(
            raise_on_error=raise_on_error)),
    )

    return pipeline_result
コード例 #14
0
def execute_no_throw(pipeline_def):
    return execute_pipeline(
        pipeline_def,
        run_config=RunConfig(executor_config=InProcessExecutorConfig(
            raise_on_error=False)),
    )
コード例 #15
0
def test_diamond_simple_execution():
    result = execute_pipeline(
        define_diamond_pipeline(), run_config=RunConfig(executor_config=InProcessExecutorConfig())
    )
    assert result.success
    assert result.result_for_solid('adder').result_value() == 11
コード例 #16
0
ファイル: resources_error.py プロジェクト: vmuthuk2/dagster
    ModeDefinition,
    InProcessExecutorConfig,
    RunConfig,
)


@resource(config_field=Field(Int, is_optional=True))
def a_resource(context):
    raise Exception("Bad Resource")


resources = {'BadResource': a_resource}


@solid(required_resource_keys={'BadResource'})
def one(_):
    return 1


@pipeline(mode_defs=[ModeDefinition(resource_defs=resources)])
def resource_error_pipeline():
    one()


if __name__ == '__main__':
    result = execute_pipeline(
        resource_error_pipeline,
        environment_dict={'storage': {'filesystem': {}}},
        run_config=RunConfig(executor_config=InProcessExecutorConfig(raise_on_error=False)),
    )