class DauphinMarshalledInput(dauphin.InputObjectType): class Meta: name = 'MarshalledInput' input_name = dauphin.NonNull(dauphin.String) key = dauphin.NonNull(dauphin.String)
class DauphinPipelineRunLogsSubscriptionFailure(dauphin.ObjectType): class Meta(object): name = 'PipelineRunLogsSubscriptionFailure' message = dauphin.NonNull(dauphin.String) missingRunId = dauphin.Field(dauphin.String)
class DauphinPipelineEvent(dauphin.Interface): class Meta(object): name = 'PipelineEvent' pipelineName = dauphin.NonNull(dauphin.String)
class DauphinStepExpectationResultEvent(dauphin.ObjectType): class Meta(object): name = "StepExpectationResultEvent" interfaces = (DauphinMessageEvent, DauphinStepEvent) expectation_result = dauphin.NonNull(DauphinExpectationResult)
class DauphinEventMetadataEntry(dauphin.Interface): class Meta(object): name = 'EventMetadataEntry' label = dauphin.NonNull(dauphin.String) description = dauphin.String()
class DauphinEventFloatMetadataEntry(dauphin.ObjectType): class Meta(object): name = "EventFloatMetadataEntry" interfaces = (DauphinEventMetadataEntry, ) value = dauphin.NonNull(dauphin.Float)
class DauphinStepMaterializationEvent(dauphin.ObjectType): class Meta(object): name = "StepMaterializationEvent" interfaces = (DauphinMessageEvent, DauphinStepEvent) materialization = dauphin.NonNull(DauphinMaterialization)
class Arguments(object): scheduleName = dauphin.NonNull(dauphin.String)
class Arguments(object): runId = dauphin.NonNull(dauphin.String)
class DauphinCancelPipelineExecutionSuccess(dauphin.ObjectType): class Meta(object): name = 'CancelPipelineExecutionSuccess' run = dauphin.Field(dauphin.NonNull('PipelineRun'))
class DauphinCancelPipelineExecutionFailure(dauphin.ObjectType): class Meta(object): name = 'CancelPipelineExecutionFailure' run = dauphin.NonNull('PipelineRun') message = dauphin.NonNull(dauphin.String)
class DauphinDeletePipelineRunSuccess(dauphin.ObjectType): class Meta(object): name = 'DeletePipelineRunSuccess' runId = dauphin.NonNull(dauphin.String)
class DauphinStepOutputHandle(dauphin.InputObjectType): class Meta(object): name = 'StepOutputHandle' stepKey = dauphin.NonNull(dauphin.String) outputName = dauphin.NonNull(dauphin.String)
class DauphinQuery(dauphin.ObjectType): class Meta: name = 'Query' version = dauphin.NonNull(dauphin.String) pipelineOrError = dauphin.Field( dauphin.NonNull('PipelineOrError'), params=dauphin.NonNull('ExecutionSelector')) pipeline = dauphin.Field(dauphin.NonNull('Pipeline'), params=dauphin.NonNull('ExecutionSelector')) pipelinesOrError = dauphin.NonNull('PipelinesOrError') pipelines = dauphin.Field(dauphin.NonNull('PipelineConnection')) configTypeOrError = dauphin.Field( dauphin.NonNull('ConfigTypeOrError'), pipelineName=dauphin.Argument(dauphin.NonNull(dauphin.String)), configTypeName=dauphin.Argument(dauphin.NonNull(dauphin.String)), mode=dauphin.Argument(dauphin.NonNull(dauphin.String)), ) runtimeTypeOrError = dauphin.Field( dauphin.NonNull('RuntimeTypeOrError'), pipelineName=dauphin.Argument(dauphin.NonNull(dauphin.String)), runtimeTypeName=dauphin.Argument(dauphin.NonNull(dauphin.String)), ) pipelineRuns = dauphin.non_null_list('PipelineRun') pipelineRunOrError = dauphin.Field(dauphin.NonNull('PipelineRunOrError'), runId=dauphin.NonNull(dauphin.ID)) isPipelineConfigValid = dauphin.Field( dauphin.NonNull('PipelineConfigValidationResult'), args={ 'pipeline': dauphin.Argument(dauphin.NonNull('ExecutionSelector')), 'environmentConfigData': dauphin.Argument('EnvironmentConfigData'), 'mode': dauphin.Argument(dauphin.NonNull(dauphin.String)), }, ) executionPlan = dauphin.Field( dauphin.NonNull('ExecutionPlanResult'), args={ 'pipeline': dauphin.Argument(dauphin.NonNull('ExecutionSelector')), 'environmentConfigData': dauphin.Argument('EnvironmentConfigData'), 'mode': dauphin.Argument(dauphin.NonNull(dauphin.String)), }, ) def resolve_configTypeOrError(self, graphene_info, **kwargs): return get_config_type(graphene_info, kwargs['pipelineName'], kwargs['configTypeName'], kwargs.get('mode')) def resolve_runtimeTypeOrError(self, graphene_info, **kwargs): return get_runtime_type(graphene_info, kwargs['pipelineName'], kwargs['runtimeTypeName']) def resolve_version(self, graphene_info): return graphene_info.context.version def resolve_pipelineOrError(self, graphene_info, **kwargs): return get_pipeline(graphene_info, kwargs['params'].to_selector()) def resolve_pipeline(self, graphene_info, **kwargs): return get_pipeline_or_raise(graphene_info, kwargs['params'].to_selector()) def resolve_pipelinesOrError(self, graphene_info): return get_pipelines(graphene_info) def resolve_pipelines(self, graphene_info): return get_pipelines_or_raise(graphene_info) def resolve_pipelineRuns(self, graphene_info): return get_runs(graphene_info) def resolve_pipelineRunOrError(self, graphene_info, runId): return get_run(graphene_info, runId) def resolve_isPipelineConfigValid(self, graphene_info, pipeline, **kwargs): return validate_pipeline_config( graphene_info, pipeline.to_selector(), kwargs.get('environmentConfigData'), kwargs.get('mode'), ) def resolve_executionPlan(self, graphene_info, pipeline, **kwargs): return get_execution_plan( graphene_info, pipeline.to_selector(), kwargs.get('environmentConfigData'), kwargs.get('mode'), )
class DauphinEventUrlMetadataEntry(dauphin.ObjectType): class Meta(object): name = "EventUrlMetadataEntry" interfaces = (DauphinEventMetadataEntry, ) url = dauphin.NonNull(dauphin.String)
class DauphinExecutionTag(dauphin.InputObjectType): class Meta(object): name = 'ExecutionTag' key = dauphin.NonNull(dauphin.String) value = dauphin.NonNull(dauphin.String)
class DauphinEventMarkdownMetadataEntry(dauphin.ObjectType): class Meta(object): name = "EventMarkdownMetadataEntry" interfaces = (DauphinEventMetadataEntry, ) md_str = dauphin.NonNull(dauphin.String)
class DauphinMarshalledOutput(dauphin.InputObjectType): class Meta(object): name = 'MarshalledOutput' output_name = dauphin.NonNull(dauphin.String) key = dauphin.NonNull(dauphin.String)
class DauphinHookErroredEvent(dauphin.ObjectType): class Meta(object): name = "HookErroredEvent" interfaces = (DauphinMessageEvent, DauphinStepEvent) error = dauphin.NonNull("PythonError")
class DauphinExecutionMetadata(dauphin.InputObjectType): class Meta(object): name = 'ExecutionMetadata' runId = dauphin.String() tags = dauphin.List(dauphin.NonNull(DauphinExecutionTag))
class DauphinObjectStoreOperationEvent(dauphin.ObjectType): class Meta(object): name = "ObjectStoreOperationEvent" interfaces = (DauphinMessageEvent, DauphinStepEvent) operation_result = dauphin.NonNull(DauphinObjectStoreOperationResult)
class Arguments(object): executionParams = dauphin.NonNull('ExecutionParams')
class DauphinPipelineRun(dauphin.ObjectType): class Meta(object): name = "PipelineRun" runId = dauphin.NonNull(dauphin.String) # Nullable because of historical runs pipelineSnapshotId = dauphin.String() status = dauphin.NonNull("PipelineRunStatus") pipeline = dauphin.NonNull("PipelineReference") pipelineName = dauphin.NonNull(dauphin.String) solidSelection = dauphin.List(dauphin.NonNull(dauphin.String)) stats = dauphin.NonNull("PipelineRunStatsOrError") stepStats = dauphin.non_null_list("PipelineRunStepStats") computeLogs = dauphin.Field( dauphin.NonNull("ComputeLogs"), stepKey=dauphin.Argument(dauphin.NonNull(dauphin.String)), description=""" Compute logs are the stdout/stderr logs for a given solid step computation """, ) executionPlan = dauphin.Field("ExecutionPlan") stepKeysToExecute = dauphin.List(dauphin.NonNull(dauphin.String)) runConfigYaml = dauphin.NonNull(dauphin.String) mode = dauphin.NonNull(dauphin.String) tags = dauphin.non_null_list("PipelineTag") rootRunId = dauphin.Field(dauphin.String) parentRunId = dauphin.Field(dauphin.String) canTerminate = dauphin.NonNull(dauphin.Boolean) assets = dauphin.non_null_list("Asset") def __init__(self, pipeline_run): super(DauphinPipelineRun, self).__init__(runId=pipeline_run.run_id, status=pipeline_run.status, mode=pipeline_run.mode) self._pipeline_run = check.inst_param(pipeline_run, "pipeline_run", PipelineRun) def resolve_pipeline(self, graphene_info): return get_pipeline_reference_or_raise( graphene_info, self._pipeline_run, ) def resolve_pipelineName(self, _graphene_info): return self._pipeline_run.pipeline_name def resolve_solidSelection(self, _graphene_info): return self._pipeline_run.solid_selection def resolve_pipelineSnapshotId(self, _): return self._pipeline_run.pipeline_snapshot_id def resolve_stats(self, graphene_info): return get_stats(graphene_info, self.run_id) def resolve_stepStats(self, graphene_info): return get_step_stats(graphene_info, self.run_id) def resolve_computeLogs(self, graphene_info, stepKey): return graphene_info.schema.type_named("ComputeLogs")( runId=self.run_id, stepKey=stepKey) def resolve_executionPlan(self, graphene_info): if not (self._pipeline_run.execution_plan_snapshot_id and self._pipeline_run.pipeline_snapshot_id): return None from .execution import DauphinExecutionPlan instance = graphene_info.context.instance historical_pipeline = instance.get_historical_pipeline( self._pipeline_run.pipeline_snapshot_id) execution_plan_snapshot = instance.get_execution_plan_snapshot( self._pipeline_run.execution_plan_snapshot_id) return (DauphinExecutionPlan( ExternalExecutionPlan( execution_plan_snapshot=execution_plan_snapshot, represented_pipeline=historical_pipeline, )) if execution_plan_snapshot and historical_pipeline else None) def resolve_stepKeysToExecute(self, _): return self._pipeline_run.step_keys_to_execute def resolve_runConfigYaml(self, _graphene_info): return yaml.dump(self._pipeline_run.run_config, default_flow_style=False) def resolve_tags(self, graphene_info): return [ graphene_info.schema.type_named("PipelineTag")(key=key, value=value) for key, value in self._pipeline_run.tags.items() if get_tag_type(key) != TagType.HIDDEN ] def resolve_rootRunId(self, _): return self._pipeline_run.root_run_id def resolve_parentRunId(self, _): return self._pipeline_run.parent_run_id @property def run_id(self): return self.runId def resolve_canTerminate(self, graphene_info): return graphene_info.context.instance.run_launcher.can_terminate( self.run_id) def resolve_assets(self, graphene_info): return get_assets_for_run_id(graphene_info, self.run_id)
class DauphinQuery(dauphin.ObjectType): class Meta(object): name = 'Query' version = dauphin.NonNull(dauphin.String) reloadSupported = dauphin.NonNull(dauphin.Boolean) pipelineOrError = dauphin.Field( dauphin.NonNull('PipelineOrError'), params=dauphin.NonNull('ExecutionSelector') ) pipeline = dauphin.Field( dauphin.NonNull('Pipeline'), params=dauphin.NonNull('ExecutionSelector') ) pipelinesOrError = dauphin.NonNull('PipelinesOrError') pipelines = dauphin.Field(dauphin.NonNull('PipelineConnection')) runtimeTypeOrError = dauphin.Field( dauphin.NonNull('RuntimeTypeOrError'), pipelineName=dauphin.Argument(dauphin.NonNull(dauphin.String)), runtimeTypeName=dauphin.Argument(dauphin.NonNull(dauphin.String)), ) scheduler = dauphin.Field(dauphin.NonNull('SchedulerOrError')) scheduleOrError = dauphin.Field( dauphin.NonNull('ScheduleOrError'), schedule_name=dauphin.NonNull(dauphin.String), limit=dauphin.Int(), ) partitionSetsOrError = dauphin.Field( dauphin.NonNull('PartitionSetsOrError'), pipelineName=dauphin.String() ) partitionSetOrError = dauphin.Field( dauphin.NonNull('PartitionSetOrError'), partitionSetName=dauphin.String() ) pipelineRunsOrError = dauphin.Field( dauphin.NonNull('PipelineRunsOrError'), filter=dauphin.Argument(dauphin.NonNull('PipelineRunsFilter')), cursor=dauphin.String(), limit=dauphin.Int(), ) pipelineRunOrError = dauphin.Field( dauphin.NonNull('PipelineRunOrError'), runId=dauphin.NonNull(dauphin.ID) ) pipelineRunTags = dauphin.non_null_list('PipelineTagAndValues') usedSolids = dauphin.Field(dauphin.non_null_list('UsedSolid')) usedSolid = dauphin.Field('UsedSolid', name=dauphin.NonNull(dauphin.String)) isPipelineConfigValid = dauphin.Field( dauphin.NonNull('PipelineConfigValidationResult'), args={ 'pipeline': dauphin.Argument(dauphin.NonNull('ExecutionSelector')), 'environmentConfigData': dauphin.Argument('EnvironmentConfigData'), 'mode': dauphin.Argument(dauphin.NonNull(dauphin.String)), }, ) executionPlan = dauphin.Field( dauphin.NonNull('ExecutionPlanResult'), args={ 'pipeline': dauphin.Argument(dauphin.NonNull('ExecutionSelector')), 'environmentConfigData': dauphin.Argument('EnvironmentConfigData'), 'mode': dauphin.Argument(dauphin.NonNull(dauphin.String)), }, ) environmentSchemaOrError = dauphin.Field( dauphin.NonNull('EnvironmentSchemaOrError'), args={ 'selector': dauphin.Argument(dauphin.NonNull('ExecutionSelector')), 'mode': dauphin.Argument(dauphin.String), }, description='''Fetch an environment schema given an execution selection and a mode. See the descripton on EnvironmentSchema for more information.''', ) instance = dauphin.NonNull('Instance') def resolve_runtimeTypeOrError(self, graphene_info, **kwargs): return get_runtime_type(graphene_info, kwargs['pipelineName'], kwargs['runtimeTypeName']) def resolve_version(self, graphene_info): return graphene_info.context.version def resolve_reloadSupported(self, graphene_info): return graphene_info.context.reloader.is_reload_supported def resolve_scheduler(self, graphene_info): return get_scheduler_or_error(graphene_info) def resolve_scheduleOrError(self, graphene_info, schedule_name): return get_schedule_or_error(graphene_info, schedule_name) def resolve_pipelineOrError(self, graphene_info, **kwargs): return get_pipeline_or_error(graphene_info, kwargs['params'].to_selector()) def resolve_pipeline(self, graphene_info, **kwargs): return get_pipeline_or_raise(graphene_info, kwargs['params'].to_selector()) def resolve_pipelinesOrError(self, graphene_info): return get_pipelines_or_error(graphene_info) def resolve_pipelines(self, graphene_info): return get_pipelines_or_raise(graphene_info) def resolve_pipelineRunsOrError(self, graphene_info, **kwargs): filters = kwargs['filter'].to_selector() provided = [ i for i in [filters.run_id, filters.pipeline, filters.tag_key, filters.status] if i ] if len(provided) > 1: return graphene_info.schema.type_named('InvalidPipelineRunsFilterError')( message="You may only provide one of the filter options." ) return graphene_info.schema.type_named('PipelineRuns')( results=get_runs(graphene_info, filters, kwargs.get('cursor'), kwargs.get('limit')) ) def resolve_pipelineRunOrError(self, graphene_info, runId): return get_run(graphene_info, runId) def resolve_partitionSetsOrError(self, graphene_info, **kwargs): pipeline_name = kwargs.get('pipelineName') return get_partition_sets_or_error(graphene_info, pipeline_name) def resolve_partitionSetOrError(self, graphene_info, partitionSetName): return get_partition_set(graphene_info, partitionSetName) def resolve_pipelineRunTags(self, graphene_info): return get_run_tags(graphene_info) def resolve_usedSolid(self, graphene_info, name): repository = graphene_info.context.repository_definition invocations = [] definition = None for pipeline in repository.get_all_pipelines(): for handle in build_dauphin_solid_handles(pipeline): if handle.handleID.definition_name == name: if definition is None: definition = handle.solid.resolve_definition(graphene_info) invocations.append( DauphinSolidInvocationSite(pipeline=pipeline, solidHandle=handle) ) return DauphinUsedSolid(definition=definition, invocations=invocations) def resolve_usedSolids(self, graphene_info): repository = graphene_info.context.repository_definition inv_by_def_name = defaultdict(list) definitions = [] for pipeline in repository.get_all_pipelines(): for handle in build_dauphin_solid_handles(pipeline): definition = handle.solid.resolve_definition(graphene_info) if definition.name not in inv_by_def_name: definitions.append(definition) inv_by_def_name[definition.name].append( DauphinSolidInvocationSite(pipeline=pipeline, solidHandle=handle) ) return map( lambda d: DauphinUsedSolid( definition=d, invocations=sorted(inv_by_def_name[d.name], key=lambda i: i.solidHandle.handleID), ), sorted(definitions, key=lambda d: d.name), ) def resolve_isPipelineConfigValid(self, graphene_info, pipeline, **kwargs): return validate_pipeline_config( graphene_info, pipeline.to_selector(), kwargs.get('environmentConfigData'), kwargs.get('mode'), ) def resolve_executionPlan(self, graphene_info, pipeline, **kwargs): return get_execution_plan( graphene_info, pipeline.to_selector(), kwargs.get('environmentConfigData'), kwargs.get('mode'), ) def resolve_environmentSchemaOrError(self, graphene_info, **kwargs): return resolve_environment_schema_or_error( graphene_info, kwargs['selector'].to_selector(), kwargs.get('mode') ) def resolve_instance(self, graphene_info): return graphene_info.schema.type_named('Instance')(graphene_info.context.instance)
class DauphinPipelineRunLogsSubscriptionSuccess(dauphin.ObjectType): class Meta(object): name = 'PipelineRunLogsSubscriptionSuccess' run = dauphin.NonNull('PipelineRun') messages = dauphin.non_null_list('PipelineRunEvent')
class DauphinEnvironmentSchema(dauphin.ObjectType): def __init__(self, environment_schema, dagster_pipeline): from dagster.core.definitions.environment_schema import EnvironmentSchema from dagster.core.definitions.pipeline import PipelineDefinition self._environment_schema = check.inst_param( environment_schema, 'environment_schema', EnvironmentSchema ) self._dagster_pipeline = check.inst_param( dagster_pipeline, 'dagster_pipeline', PipelineDefinition ) class Meta(object): name = 'EnvironmentSchema' description = '''The environment schema represents the all the config type information given a certain execution selection and mode of execution of that selection. All config interactions (e.g. checking config validity, fetching all config types, fetching in a particular config type) should be done through this type ''' rootEnvironmentType = dauphin.Field( dauphin.NonNull('ConfigType'), description='''Fetch the root environment type. Concretely this is the type that is in scope at the root of configuration document for a particular execution selection. It is the type that is in scope initially with a blank config editor.''', ) allConfigTypes = dauphin.Field( dauphin.non_null_list('ConfigType'), description='''Fetch all the named config types that are in the schema. Useful for things like a type browser UI, or for fetching all the types are in the scope of a document so that the index can be built for the autocompleting editor. ''', ) isEnvironmentConfigValid = dauphin.Field( dauphin.NonNull('PipelineConfigValidationResult'), args={'environmentConfigData': dauphin.Argument('EnvironmentConfigData')}, description='''Parse a particular environment config result. The return value either indicates that the validation succeeded by returning `PipelineConfigValidationValid` or that there are configuration errors by returning `PipelineConfigValidationInvalid' which containers a list errors so that can be rendered for the user''', ) def resolve_allConfigTypes(self, _graphene_info): return sorted( list(map(to_dauphin_config_type, self._environment_schema.all_config_types())), key=lambda ct: ct.name if ct.name else '', ) def resolve_rootEnvironmentType(self, _graphene_info): return to_dauphin_config_type(self._environment_schema.environment_type) def resolve_isEnvironmentConfigValid(self, graphene_info, **kwargs): return resolve_is_environment_config_valid( graphene_info, self._environment_schema, self._dagster_pipeline, kwargs.get('environmentConfigData', {}), )
class DauphinMissingRunIdErrorEvent(dauphin.ObjectType): class Meta(object): name = 'MissingRunIdErrorEvent' invalidRunId = dauphin.NonNull(dauphin.String)
class DauphinScheduleRunConfig(dauphin.ObjectType): class Meta(object): name = 'ScheduleRunConfig' yaml = dauphin.NonNull(dauphin.String)
class DauphinPipelineInitFailureEvent(dauphin.ObjectType): class Meta(object): name = 'PipelineInitFailureEvent' interfaces = (DauphinMessageEvent, DauphinPipelineEvent) error = dauphin.NonNull('PythonError')
class Arguments: executionParams = dauphin.NonNull('ExecutionParams') reexecutionConfig = dauphin.Argument('ReexecutionConfig')