コード例 #1
0
def _execute_pipeline_iterator(context_or_failure_event):
    # Due to use of context managers, if the user land code in context or resource init fails
    # we can get either a pipeline_context or the failure event here.
    if (isinstance(context_or_failure_event, DagsterEvent)
            and context_or_failure_event.event_type
            == DagsterEventType.PIPELINE_INIT_FAILURE):
        yield context_or_failure_event
        return

    pipeline_context = context_or_failure_event
    check.inst_param(pipeline_context, 'pipeline_context',
                     SystemPipelineExecutionContext)
    yield DagsterEvent.pipeline_start(pipeline_context)

    execution_plan = ExecutionPlan.build(
        pipeline_context.pipeline_def,
        pipeline_context.environment_config,
        pipeline_context.mode_def,
    )

    steps = execution_plan.topological_steps()

    if not steps:
        pipeline_context.log.debug(
            'Pipeline {pipeline} has no nodes and no execution will happen'.
            format(pipeline=pipeline_context.pipeline_def.display_name))
        yield DagsterEvent.pipeline_success(pipeline_context)
        return

    _setup_reexecution(pipeline_context.run_config, pipeline_context,
                       execution_plan)

    pipeline_context.log.debug(
        'About to execute the compute node graph in the following order {order}'
        .format(order=[step.key for step in steps]))

    check.invariant(
        len([
            step_input for step_input in steps[0].step_inputs
            if step_input.is_from_output
        ]) == 0)

    pipeline_success = True

    try:
        for event in invoke_executor_on_plan(
                pipeline_context, execution_plan,
                pipeline_context.run_config.step_keys_to_execute):
            if event.is_step_failure:
                pipeline_success = False
            yield event
    finally:
        if pipeline_success:
            yield DagsterEvent.pipeline_success(pipeline_context)
        else:
            yield DagsterEvent.pipeline_failure(pipeline_context)
コード例 #2
0
ファイル: api.py プロジェクト: yetudada/dagster
def _pipeline_execution_iterator(pipeline_context, execution_plan,
                                 pipeline_run):
    '''A complete execution of a pipeline. Yields pipeline start, success,
    and failure events. Defers to _steps_execution_iterator for step execution.
    '''
    if (isinstance(pipeline_context, DagsterEvent)
            and pipeline_context.event_type  # pylint: disable=no-member
            == DagsterEventType.PIPELINE_INIT_FAILURE):
        yield pipeline_context
        return

    yield DagsterEvent.pipeline_start(pipeline_context)

    pipeline_success = True
    try:
        for event in _steps_execution_iterator(pipeline_context,
                                               execution_plan=execution_plan,
                                               pipeline_run=pipeline_run):
            if event.is_step_failure:
                pipeline_success = False
            yield event
    except (Exception, KeyboardInterrupt):
        pipeline_success = False
        raise  # finally block will run before this is re-raised
    finally:
        if pipeline_success:
            yield DagsterEvent.pipeline_success(pipeline_context)

        else:
            yield DagsterEvent.pipeline_failure(pipeline_context)
コード例 #3
0
def _pipeline_execution_iterator(pipeline_context, execution_plan,
                                 pipeline_run):
    '''A complete execution of a pipeline. Yields pipeline start, success,
    and failure events.
    '''
    check.inst_param(pipeline_context, 'pipeline_context',
                     SystemPipelineExecutionContext)
    check.inst_param(execution_plan, 'execution_plan', ExecutionPlan)
    check.inst_param(pipeline_run, 'pipeline_run', PipelineRun)
    yield DagsterEvent.pipeline_start(pipeline_context)

    pipeline_success = True
    generator_closed = False
    try:
        for event in pipeline_context.executor_config.get_engine().execute(
                pipeline_context, execution_plan):
            if event.is_step_failure:
                pipeline_success = False
            yield event
    except GeneratorExit:
        # Shouldn't happen, but avoid runtime-exception in case this generator gets GC-ed
        # (see https://amir.rachum.com/blog/2017/03/03/generator-cleanup/).
        generator_closed = True
        pipeline_success = False
        raise
    except (Exception, KeyboardInterrupt):
        pipeline_success = False
        raise  # finally block will run before this is re-raised
    finally:
        if pipeline_success:
            event = DagsterEvent.pipeline_success(pipeline_context)
        else:
            event = DagsterEvent.pipeline_failure(pipeline_context)
        if not generator_closed:
            yield event
コード例 #4
0
ファイル: api.py プロジェクト: tristaneljed/dagster
def _execute_pipeline_iterator(pipeline_context, execution_plan, run_config,
                               step_keys_to_execute):
    if (isinstance(pipeline_context, DagsterEvent)
            and pipeline_context.event_type  # pylint: disable=no-member
            == DagsterEventType.PIPELINE_INIT_FAILURE):
        yield pipeline_context
        return

    yield DagsterEvent.pipeline_start(pipeline_context)

    pipeline_success = True

    try:
        for event in _execute_plan_iterator(
                pipeline_context,
                execution_plan=execution_plan,
                run_config=run_config,
                step_keys_to_execute=step_keys_to_execute,
        ):
            if event.is_step_failure:
                pipeline_success = False
            yield event
    finally:
        if pipeline_success:
            yield DagsterEvent.pipeline_success(pipeline_context)
        else:
            yield DagsterEvent.pipeline_failure(pipeline_context)
コード例 #5
0
ファイル: api.py プロジェクト: databill86/dagster
def _pipeline_execution_iterator(pipeline_context, execution_plan, run_config,
                                 step_keys_to_execute):
    '''A complete execution of a pipeline. Yields pipeline start, success,
    and failure events. Defers to _steps_execution_iterator for step execution.
    '''
    if (isinstance(pipeline_context, DagsterEvent)
            and pipeline_context.event_type  # pylint: disable=no-member
            == DagsterEventType.PIPELINE_INIT_FAILURE):
        yield pipeline_context
        return

    yield DagsterEvent.pipeline_start(pipeline_context)

    pipeline_success = True

    try:
        for event in _steps_execution_iterator(
                pipeline_context,
                execution_plan=execution_plan,
                run_config=run_config,
                step_keys_to_execute=step_keys_to_execute,
        ):
            if event.is_step_failure:
                pipeline_success = False
            yield event
    finally:
        if pipeline_success:
            yield DagsterEvent.pipeline_success(pipeline_context)

        else:
            yield DagsterEvent.pipeline_failure(pipeline_context)
コード例 #6
0
ファイル: api.py プロジェクト: zkan/dagster
def _pipeline_execution_iterator(pipeline_context, execution_plan,
                                 pipeline_run):
    '''A complete execution of a pipeline. Yields pipeline start, success,
    and failure events. Defers to _steps_execution_iterator for step execution.
    '''
    check.inst_param(pipeline_context, 'pipeline_context',
                     SystemPipelineExecutionContext)
    check.inst_param(execution_plan, 'execution_plan', ExecutionPlan)
    check.inst_param(pipeline_run, 'pipeline_run', PipelineRun)
    yield DagsterEvent.pipeline_start(pipeline_context)

    pipeline_success = True
    try:
        for event in _steps_execution_iterator(pipeline_context,
                                               execution_plan=execution_plan,
                                               pipeline_run=pipeline_run):
            if event.is_step_failure:
                pipeline_success = False
            yield event
    except (Exception, KeyboardInterrupt):
        pipeline_success = False
        raise  # finally block will run before this is re-raised
    finally:
        if pipeline_success:
            yield DagsterEvent.pipeline_success(pipeline_context)

        else:
            yield DagsterEvent.pipeline_failure(pipeline_context)
コード例 #7
0
ファイル: api.py プロジェクト: danieldiamond/dagster
def _pipeline_execution_iterator(pipeline_context,
                                 execution_plan,
                                 retries=None):
    '''A complete execution of a pipeline. Yields pipeline start, success,
    and failure events.

    Args:
        pipeline_context (SystemPipelineExecutionContext):
        execution_plan (ExecutionPlan):
        retries (None): Must be None. This is to align the signature of
            `_pipeline_execution_iterator` with that of
            `dagster.core.execution.plan.execute_plan.inner_plan_execution_iterator` so the same
            machinery in _ExecuteRunWithPlanIterable can call them without unpleasant workarounds.
            (Default: None)
    '''
    check.inst_param(pipeline_context, 'pipeline_context',
                     SystemPipelineExecutionContext)
    check.inst_param(execution_plan, 'execution_plan', ExecutionPlan)
    check.invariant(
        retries is None,
        'Programming error: Retries not supported in _pipeline_execution_iterator'
    )
    yield DagsterEvent.pipeline_start(pipeline_context)

    steps_started = set([])
    pipeline_success = True
    generator_closed = False
    try:
        for event in pipeline_context.executor.execute(pipeline_context,
                                                       execution_plan):
            if event.is_step_start:
                steps_started.add(event.step_key)
            if event.is_step_success:
                if event.step_key not in steps_started:
                    pipeline_success = False
                else:
                    steps_started.remove(event.step_key)
            if event.is_step_failure:
                pipeline_success = False
            yield event
    except GeneratorExit:
        # Shouldn't happen, but avoid runtime-exception in case this generator gets GC-ed
        # (see https://amir.rachum.com/blog/2017/03/03/generator-cleanup/).
        generator_closed = True
        pipeline_success = False
        raise
    except (Exception, KeyboardInterrupt):
        pipeline_success = False
        raise  # finally block will run before this is re-raised
    finally:
        if steps_started:
            pipeline_success = False
        if pipeline_success:
            event = DagsterEvent.pipeline_success(pipeline_context)
        else:
            event = DagsterEvent.pipeline_failure(pipeline_context)
        if not generator_closed:
            yield event
コード例 #8
0
ファイル: api.py プロジェクト: lewismacdonald/dagster
def _pipeline_execution_iterator(pipeline_context, execution_plan):
    """A complete execution of a pipeline. Yields pipeline start, success,
    and failure events.

    Args:
        pipeline_context (SystemPipelineExecutionContext):
        execution_plan (ExecutionPlan):
    """
    check.inst_param(pipeline_context, "pipeline_context",
                     SystemPipelineExecutionContext)
    check.inst_param(execution_plan, "execution_plan", ExecutionPlan)

    yield DagsterEvent.pipeline_start(pipeline_context)

    pipeline_exception_info = None
    failed_steps = []
    generator_closed = False
    try:
        for event in pipeline_context.executor.execute(pipeline_context,
                                                       execution_plan):
            if event.is_step_failure:
                failed_steps.append(event.step_key)

            yield event
    except GeneratorExit:
        # Shouldn't happen, but avoid runtime-exception in case this generator gets GC-ed
        # (see https://amir.rachum.com/blog/2017/03/03/generator-cleanup/).
        generator_closed = True
        pipeline_exception_info = serializable_error_info_from_exc_info(
            sys.exc_info())
        raise
    except (Exception, KeyboardInterrupt):  # pylint: disable=broad-except
        pipeline_exception_info = serializable_error_info_from_exc_info(
            sys.exc_info())
        raise  # finally block will run before this is re-raised
    finally:
        if pipeline_exception_info:
            event = DagsterEvent.pipeline_failure(
                pipeline_context,
                "An exception was thrown during execution.",
                pipeline_exception_info,
            )
        elif failed_steps:
            event = DagsterEvent.pipeline_failure(
                pipeline_context,
                "Steps failed: {}.".format(failed_steps),
            )
        else:
            event = DagsterEvent.pipeline_success(pipeline_context)
        if not generator_closed:
            yield event
コード例 #9
0
ファイル: api.py プロジェクト: databill86/dagster
def _steps_execution_iterator(pipeline_context, execution_plan, run_config,
                              step_keys_to_execute):
    '''Iterates over execution of individual steps yielding the associated events.
    Does not yield pipeline level events asside from init failure when the context fails to construct.
    '''
    check.inst_param(pipeline_context, 'pipeline_context',
                     (DagsterEvent, SystemPipelineExecutionContext))
    check.inst_param(execution_plan, 'execution_plan', ExecutionPlan)
    check.inst_param(run_config, 'run_config', RunConfig)
    check.opt_list_param(step_keys_to_execute,
                         'step_keys_to_execute',
                         of_type=str)

    if (isinstance(pipeline_context, DagsterEvent)
            and pipeline_context.event_type  # pylint: disable=no-member
            == DagsterEventType.PIPELINE_INIT_FAILURE):
        return ensure_gen(pipeline_context)

    if not step_keys_to_execute:
        step_keys_to_execute = [
            step.key for step in execution_plan.topological_steps()
        ]

    if not step_keys_to_execute:
        pipeline_context.log.debug(
            'Pipeline {pipeline} has no steps to execute and no execution will happen'
            .format(pipeline=pipeline_context.pipeline_def.display_name))
        return ensure_gen(DagsterEvent.pipeline_success(pipeline_context))
    else:
        for step_key in step_keys_to_execute:
            if not execution_plan.has_step(step_key):
                raise DagsterExecutionStepNotFoundError(
                    'Execution plan does not contain step \'{}\''.format(
                        step_key),
                    step_key=step_key,
                )

    _setup_reexecution(run_config, pipeline_context, execution_plan)

    return _invoke_executor_on_plan(pipeline_context, execution_plan,
                                    step_keys_to_execute)
コード例 #10
0
ファイル: api.py プロジェクト: markjm610/dagster
def _pipeline_execution_iterator(pipeline_context, execution_plan):
    """A complete execution of a pipeline. Yields pipeline start, success,
    and failure events.

    Args:
        pipeline_context (SystemPipelineExecutionContext):
        execution_plan (ExecutionPlan):
    """
    check.inst_param(pipeline_context, "pipeline_context",
                     SystemPipelineExecutionContext)
    check.inst_param(execution_plan, "execution_plan", ExecutionPlan)

    yield DagsterEvent.pipeline_start(pipeline_context)

    steps_started = set([])
    pipeline_success_ref = BoolRef(True)
    generator_closed = False
    try:
        for event in _core_execution_iterator(pipeline_context, execution_plan,
                                              steps_started,
                                              pipeline_success_ref):
            yield event
    except GeneratorExit:
        # Shouldn't happen, but avoid runtime-exception in case this generator gets GC-ed
        # (see https://amir.rachum.com/blog/2017/03/03/generator-cleanup/).
        generator_closed = True
        pipeline_success_ref.value = False
        raise
    finally:
        if steps_started:
            pipeline_success_ref.value = False
        if pipeline_success_ref.value:
            event = DagsterEvent.pipeline_success(pipeline_context)
        else:
            event = DagsterEvent.pipeline_failure(pipeline_context)
        if not generator_closed:
            yield event
コード例 #11
0
ファイル: api.py プロジェクト: tristaneljed/dagster
def _execute_plan_iterator(pipeline_context, execution_plan, run_config,
                           step_keys_to_execute):
    check.inst_param(pipeline_context, 'pipeline_context',
                     (DagsterEvent, SystemPipelineExecutionContext))
    check.inst_param(execution_plan, 'execution_plan', ExecutionPlan)
    check.inst_param(run_config, 'run_config', RunConfig)
    check.opt_list_param(step_keys_to_execute,
                         'step_keys_to_execute',
                         of_type=str)

    if (isinstance(pipeline_context, DagsterEvent)
            and pipeline_context.event_type  # pylint: disable=no-member
            == DagsterEventType.PIPELINE_INIT_FAILURE):
        return ensure_gen(pipeline_context)

    if not step_keys_to_execute:
        step_keys_to_execute = [
            step.key for step in execution_plan.topological_steps()
        ]

    if not step_keys_to_execute:
        pipeline_context.log.debug(
            'Pipeline {pipeline} has no steps to execute and no execution will happen'
            .format(pipeline=pipeline_context.pipeline_def.display_name))
        return ensure_gen(DagsterEvent.pipeline_success(pipeline_context))
    else:
        for step_key in step_keys_to_execute:
            if not execution_plan.has_step(step_key):
                raise DagsterExecutionStepNotFoundError(
                    'Execution plan does not contain step "{}"'.format(
                        step_key),
                    step_key=step_key)

    _setup_reexecution(run_config, pipeline_context, execution_plan)

    return _invoke_executor_on_plan(pipeline_context, execution_plan,
                                    step_keys_to_execute)
コード例 #12
0
ファイル: api.py プロジェクト: prezi/dagster
def pipeline_execution_iterator(
        pipeline_context: PlanOrchestrationContext,
        execution_plan: ExecutionPlan) -> Iterator[DagsterEvent]:
    """A complete execution of a pipeline. Yields pipeline start, success,
    and failure events.

    Args:
        pipeline_context (PlanOrchestrationContext):
        execution_plan (ExecutionPlan):
    """

    yield DagsterEvent.pipeline_start(pipeline_context)

    pipeline_exception_info = None
    pipeline_canceled_info = None
    failed_steps = []
    generator_closed = False
    try:
        for event in pipeline_context.executor.execute(pipeline_context,
                                                       execution_plan):
            if event.is_step_failure:
                failed_steps.append(event.step_key)

            yield event
    except GeneratorExit:
        # Shouldn't happen, but avoid runtime-exception in case this generator gets GC-ed
        # (see https://amir.rachum.com/blog/2017/03/03/generator-cleanup/).
        generator_closed = True
        pipeline_exception_info = serializable_error_info_from_exc_info(
            sys.exc_info())
        if pipeline_context.raise_on_error:
            raise
    except (KeyboardInterrupt, DagsterExecutionInterruptedError):
        pipeline_canceled_info = serializable_error_info_from_exc_info(
            sys.exc_info())
        if pipeline_context.raise_on_error:
            raise
    except Exception:  # pylint: disable=broad-except
        pipeline_exception_info = serializable_error_info_from_exc_info(
            sys.exc_info())
        if pipeline_context.raise_on_error:
            raise  # finally block will run before this is re-raised
    finally:
        if pipeline_canceled_info:
            reloaded_run = pipeline_context.instance.get_run_by_id(
                pipeline_context.run_id)
            if reloaded_run and reloaded_run.status == PipelineRunStatus.CANCELING:
                event = DagsterEvent.pipeline_canceled(pipeline_context,
                                                       pipeline_canceled_info)
            else:
                event = DagsterEvent.pipeline_failure(
                    pipeline_context,
                    "Execution was interrupted unexpectedly. "
                    "No user initiated termination request was found, treating as failure.",
                    pipeline_canceled_info,
                )
        elif pipeline_exception_info:
            event = DagsterEvent.pipeline_failure(
                pipeline_context,
                "An exception was thrown during execution.",
                pipeline_exception_info,
            )
        elif failed_steps:
            event = DagsterEvent.pipeline_failure(
                pipeline_context,
                "Steps failed: {}.".format(failed_steps),
            )
        else:
            event = DagsterEvent.pipeline_success(pipeline_context)
        if not generator_closed:
            yield event