def test_create_subplan_middle_step():
    pipeline_def = define_two_int_pipeline()
    typed_environment = create_typed_environment(pipeline_def, None)
    execution_plan = create_execution_plan(pipeline_def)
    with yield_context(pipeline_def, typed_environment,
                       ExecutionMetadata()) as context:
        subplan = create_subplan(
            ExecutionPlanInfo(context=context,
                              pipeline=pipeline_def,
                              environment=typed_environment),
            StepBuilderState(pipeline_name=pipeline_def.name),
            execution_plan,
            ExecutionPlanSubsetInfo(['add_one.transform'],
                                    {'add_one.transform': {
                                        'num': 2
                                    }}),
        )
        assert subplan
        steps = subplan.topological_steps()
        assert len(steps) == 2
        assert steps[0].key == 'add_one.transform.input.num.value'
        assert not steps[0].step_inputs
        assert len(steps[0].step_outputs) == 1
        assert steps[1].key == 'add_one.transform'
        assert len(steps[1].step_inputs) == 1
        step_input = steps[1].step_inputs[0]
        assert step_input.prev_output_handle.step.key == 'add_one.transform.input.num.value'
        assert step_input.prev_output_handle.output_name == VALUE_OUTPUT
        assert len(steps[1].step_outputs) == 1
        assert len(subplan.topological_steps()) == 2
        assert [step.key for step in subplan.topological_steps()] == [
            'add_one.transform.input.num.value',
            'add_one.transform',
        ]
Exemple #2
0
def run_test_pipeline(pipeline):
    typed_environment = create_typed_environment(pipeline, TEST_ENVIRONMENT)

    execution_metadata = ExecutionMetadata(run_id=str(uuid.uuid4()))
    with yield_context(pipeline, typed_environment,
                       execution_metadata) as context:
        execution_plan = create_execution_plan_core(
            ExecutionPlanInfo(context, pipeline, typed_environment))
        return execute_plan(context, execution_plan)
Exemple #3
0
def run_test_pipeline(pipeline):
    typed_environment = create_typed_environment(pipeline, TEST_ENVIRONMENT)

    reentrant_info = ReentrantInfo(run_id=str(uuid.uuid4()))
    with yield_context(pipeline, typed_environment, reentrant_info) as context:
        execution_plan = create_execution_plan_core(
            ExecutionPlanInfo(context, pipeline, typed_environment))
        with context.value('pipeline', pipeline.display_name):
            results = execute_plan(context, execution_plan)
            return results
Exemple #4
0
    def define_out_of_pipeline_info(self, context_config):
        check.str_param(self.solid_def_name, 'solid_def_name')
        solid = Solid(self.solid_def_name, self.solid_def)
        pipeline_def = PipelineDefinition([self.solid_def],
                                          name="Ephemeral Notebook Pipeline")
        from dagster.core.execution import create_typed_context

        typed_context = create_typed_context(
            pipeline_def, {} if context_config is None else context_config)
        from dagster.core.system_config.objects import EnvironmentConfig

        dummy_environment_config = EnvironmentConfig(context=typed_context)
        with yield_context(pipeline_def, dummy_environment_config,
                           ExecutionMetadata(run_id='')) as context:
            self.info = TransformExecutionInfo(context, None, solid,
                                               pipeline_def)
        return self.info
def test_create_subplan_source_step():
    pipeline_def = define_two_int_pipeline()
    typed_environment = create_typed_environment(pipeline_def, None)
    execution_plan = create_execution_plan(pipeline_def)
    with yield_context(pipeline_def, typed_environment) as context:
        subplan = create_subplan(
            ExecutionPlanInfo(context=context,
                              pipeline=pipeline_def,
                              environment=typed_environment),
            execution_plan,
            ExecutionPlanSubsetInfo(['return_one.transform']),
        )
        assert subplan
        assert len(subplan.steps) == 1
        assert subplan.steps[0].key == 'return_one.transform'
        assert not subplan.steps[0].step_inputs
        assert len(subplan.steps[0].step_outputs) == 1
        assert len(subplan.topological_steps()) == 1
Exemple #6
0
    def populate_context(self, run_id, pipeline_def, marshal_dir,
                         environment_config, output_log_path):
        check.dict_param(environment_config, 'environment_config')
        check.invariant(pipeline_def.has_solid_def(self.solid_def_name))

        self.marshal_dir = marshal_dir
        self.populated_by_papermill = True
        loggers = None
        if output_log_path != 0:
            event_logger = construct_json_event_logger(output_log_path)
            loggers = [event_logger]
        # do not include event_callback in ExecutionMetadata,
        # since that'll be taken care of by side-channel established by event_logger
        execution_metadata = ExecutionMetadata(run_id, loggers=loggers)
        solid = Solid(self.solid_def_name, self.solid_def)
        typed_environment = construct_environment_config(environment_config)
        with yield_context(pipeline_def, typed_environment,
                           execution_metadata) as context:
            solid_config = None
            self.info = TransformExecutionInfo(context, solid_config, solid,
                                               pipeline_def)

        return self.info