Пример #1
0
def validate_pipeline_config(graphene_info, selector, environment_dict, mode):
    check.inst_param(graphene_info, 'graphene_info', ResolveInfo)
    check.inst_param(selector, 'selector', ExecutionSelector)
    check.opt_str_param(mode, 'mode')

    pipeline_def = get_pipeline_def_from_selector(graphene_info, selector)
    get_validated_config(pipeline_def, environment_dict, mode)
    return graphene_info.schema.type_named('PipelineConfigValidationValid')(
        DauphinPipeline.from_pipeline_def(pipeline_def))
Пример #2
0
 def for_validation_errors(pipeline, errors):
     return DauphinPipelineConfigValidationInvalid(
         pipeline=DauphinPipeline.from_pipeline_def(pipeline),
         errors=[
             DauphinPipelineConfigValidationError.from_dagster_error(
                 pipeline.get_config_schema_snapshot(), err)
             for err in errors
         ],
     )
Пример #3
0
def get_execution_plan(graphene_info, selector, environment_dict, mode):
    check.inst_param(graphene_info, 'graphene_info', ResolveInfo)
    check.inst_param(selector, 'selector', ExecutionSelector)
    check.opt_str_param(mode, 'mode')

    pipeline_def = get_pipeline_def_from_selector(graphene_info, selector)
    get_validated_config(graphene_info, pipeline_def, environment_dict, mode)
    return graphene_info.schema.type_named('ExecutionPlan')(
        DauphinPipeline.from_pipeline_def(pipeline_def),
        create_execution_plan(pipeline_def, environment_dict,
                              RunConfig(mode=mode)),
    )
Пример #4
0
def get_dauphin_pipeline_from_selector(graphene_info, selector):
    check.inst_param(graphene_info, 'graphene_info', ResolveInfo)
    check.inst_param(selector, 'selector', ExecutionSelector)

    if isinstance(graphene_info.context, DagsterGraphQLContext):
        pipeline_definition = get_pipeline_def_from_selector(graphene_info, selector)
        return DauphinPipeline.from_pipeline_def(pipeline_definition)

    # TODO: Support solid sub selection.
    check.invariant(
        not selector.solid_subset,
        desc="DagsterSnapshotGraphQLContext doesn't support pipeline sub-selection.",
    )

    repository_index = graphene_info.context.get_repository_index()
    if not repository_index.has_pipeline_index(selector.name):
        raise UserFacingGraphQLError(
            graphene_info.schema.type_named('PipelineNotFoundError')(pipeline_name=selector.name)
        )
    return DauphinPipeline(repository_index.get_pipeline_index(selector.name))
Пример #5
0
def get_validated_config(graphene_info, pipeline_def, environment_dict, mode):
    check.str_param(mode, 'mode')
    check.inst_param(pipeline_def, 'pipeline_def', PipelineDefinition)

    environment_schema = create_environment_schema(pipeline_def, mode)

    validated_config = validate_config(environment_schema.environment_type,
                                       environment_dict)

    if not validated_config.success:
        raise UserFacingGraphQLError(
            graphene_info.schema.type_named('PipelineConfigValidationInvalid')(
                pipeline=DauphinPipeline.from_pipeline_def(pipeline_def),
                errors=[
                    graphene_info.schema.type_named(
                        'PipelineConfigValidationError').from_dagster_error(
                            pipeline_def.get_config_schema_snapshot(), err)
                    for err in validated_config.errors
                ],
            ))

    return validated_config
Пример #6
0
def _do_execute_plan(graphene_info, execution_params, pipeline_def):
    check.inst_param(graphene_info, 'graphene_info', ResolveInfo)
    check.inst_param(execution_params, 'execution_params', ExecutionParams)

    run_id = execution_params.execution_metadata.run_id

    pipeline_run = graphene_info.context.instance.get_run_by_id(run_id)
    if not pipeline_run:
        # TODO switch to raising a UserFacingError if the run_id cannot be found
        # https://github.com/dagster-io/dagster/issues/1876
        pipeline_run = PipelineRun(
            pipeline_name=pipeline_def.name,
            run_id=run_id,
            environment_dict=execution_params.environment_dict,
            mode=execution_params.mode or pipeline_def.get_default_mode_name(),
            tags=execution_params.execution_metadata.tags or {},
        )

    execution_plan = create_execution_plan(
        pipeline=pipeline_def,
        environment_dict=execution_params.environment_dict,
        run_config=pipeline_run,
    )

    if execution_params.step_keys:
        for step_key in execution_params.step_keys:
            if not execution_plan.has_step(step_key):
                raise UserFacingGraphQLError(
                    graphene_info.schema.type_named('InvalidStepError')(invalid_step_key=step_key)
                )

        execution_plan = execution_plan.build_subset_plan(execution_params.step_keys)

    event_logs = []

    def _on_event_record(record):
        if record.is_dagster_event:
            event_logs.append(record)

    graphene_info.context.instance.add_event_listener(run_id, _on_event_record)

    execute_plan(
        execution_plan=execution_plan,
        environment_dict=execution_params.environment_dict,
        pipeline_run=pipeline_run,
        instance=graphene_info.context.instance,
    )

    dauphin_pipeline = DauphinPipeline.from_pipeline_def(pipeline_def)

    def to_graphql_event(event_record):
        return from_dagster_event_record(
            graphene_info, event_record, dauphin_pipeline, execution_plan
        )

    return graphene_info.schema.type_named('ExecutePlanSuccess')(
        pipeline=dauphin_pipeline,
        has_failures=any(
            er
            for er in event_logs
            if er.is_dagster_event and er.dagster_event.event_type == DagsterEventType.STEP_FAILURE
        ),
        step_events=list(map(to_graphql_event, event_logs)),
        raw_event_records=list(map(serialize_dagster_namedtuple, event_logs)),
    )