def __init__( self, name=None, input_defs=None, output_defs=None, description=None, required_resource_keys=None, config=None, metadata=None, step_metadata_fn=None, ): self.name = check.opt_str_param(name, 'name') self.input_defs = check.opt_nullable_list_param(input_defs, 'input_defs', InputDefinition) self.output_defs = check.opt_nullable_list_param( output_defs, 'output_defs', OutputDefinition ) self.description = check.opt_str_param(description, 'description') # resources will be checked within SolidDefinition self.required_resource_keys = required_resource_keys # config will be checked within SolidDefinition self.config = config # metadata will be checked within ISolidDefinition self.metadata = metadata self.step_metadata_fn = check.opt_callable_param(step_metadata_fn, 'step_metadata_fn')
def __init__( self, name, cron_schedule, pipeline_name, environment_dict=None, environment_dict_fn=None, tags=None, tags_fn=None, solid_subset=None, mode="default", should_execute=None, environment_vars=None, ): check.str_param(name, 'name') check.str_param(cron_schedule, 'cron_schedule') check.str_param(pipeline_name, 'pipeline_name') check.opt_dict_param(environment_dict, 'environment_dict') check.opt_callable_param(environment_dict_fn, 'environment_dict_fn') check.opt_dict_param(tags, 'tags', key_type=str, value_type=str) check.opt_callable_param(tags_fn, 'tags_fn') check.opt_nullable_list_param(solid_subset, 'solid_subset', of_type=str) mode = check.opt_str_param(mode, 'mode', DEFAULT_MODE_NAME) check.opt_callable_param(should_execute, 'should_execute') check.opt_dict_param(environment_vars, 'environment_vars', key_type=str, value_type=str) if environment_dict_fn and environment_dict: raise DagsterInvalidDefinitionError( 'Attempted to provide both environment_dict_fn and environment_dict as arguments' ' to ScheduleDefinition. Must provide only one of the two.' ) if tags_fn and tags: raise DagsterInvalidDefinitionError( 'Attempted to provide both tags_fn and tags as arguments' ' to ScheduleDefinition. Must provide only one of the two.' ) if not environment_dict and not environment_dict_fn: environment_dict_fn = lambda _context: {} if not tags and not tags_fn: tags_fn = lambda _context: {} if not should_execute: should_execute = lambda _context: True self._schedule_definition_data = ScheduleDefinitionData( name=check.str_param(name, 'name'), cron_schedule=check.str_param(cron_schedule, 'cron_schedule'), environment_vars=check.opt_dict_param(environment_vars, 'environment_vars'), ) self._environment_dict = environment_dict self._environment_dict_fn = environment_dict_fn self._tags = tags self._tags_fn = tags_fn self._should_execute = should_execute self._mode = mode self._selector = ExecutionSelector(pipeline_name, solid_subset)
def __init__( self, name: Optional[str] = None, input_defs: Optional[List[InputDefinition]] = None, output_defs: Optional[List[OutputDefinition]] = None, description: Optional[str] = None, required_resource_keys: Optional[Set[str]] = None, config_schema: Optional[Union[Any, Dict[str, Any]]] = None, tags: Optional[Dict[str, Any]] = None, version: Optional[str] = None, ): self.name = check.opt_str_param(name, "name") self.input_defs = check.opt_nullable_list_param(input_defs, "input_defs", InputDefinition) self.output_defs = check.opt_nullable_list_param( output_defs, "output_defs", OutputDefinition ) self.description = check.opt_str_param(description, "description") # these will be checked within SolidDefinition self.required_resource_keys = required_resource_keys self.tags = tags self.version = version # config will be checked within SolidDefinition self.config_schema = config_schema
def get_external_execution_plan( self, external_pipeline: ExternalPipeline, run_config: Dict[str, Any], mode: str, step_keys_to_execute: Optional[List[str]], known_state: Optional[KnownExecutionState], ) -> ExternalExecutionPlan: check.inst_param(external_pipeline, "external_pipeline", ExternalPipeline) check.dict_param(run_config, "run_config") check.str_param(mode, "mode") check.opt_nullable_list_param(step_keys_to_execute, "step_keys_to_execute", of_type=str) check.opt_inst_param(known_state, "known_state", KnownExecutionState) return ExternalExecutionPlan( execution_plan_snapshot=snapshot_from_execution_plan( create_execution_plan( pipeline=self.get_reconstructable_pipeline( external_pipeline.name). subset_for_execution_from_existing_pipeline( external_pipeline.solids_to_execute), run_config=run_config, mode=mode, step_keys_to_execute=step_keys_to_execute, known_state=known_state, ), external_pipeline.identifying_pipeline_snapshot_id, ), represented_pipeline=external_pipeline, )
def get_external_execution_plan( self, external_pipeline: ExternalPipeline, run_config: Dict[str, Any], mode: str, step_keys_to_execute: Optional[List[str]], known_state: Optional[KnownExecutionState], ) -> ExternalExecutionPlan: check.inst_param(external_pipeline, "external_pipeline", ExternalPipeline) check.dict_param(run_config, "run_config") check.str_param(mode, "mode") check.opt_nullable_list_param(step_keys_to_execute, "step_keys_to_execute", of_type=str) check.opt_inst_param(known_state, "known_state", KnownExecutionState) execution_plan_snapshot_or_error = sync_get_external_execution_plan_grpc( api_client=self.client, pipeline_origin=external_pipeline.get_external_origin(), run_config=run_config, mode=mode, pipeline_snapshot_id=external_pipeline. identifying_pipeline_snapshot_id, solid_selection=external_pipeline.solid_selection, step_keys_to_execute=step_keys_to_execute, known_state=known_state, ) return ExternalExecutionPlan( execution_plan_snapshot=execution_plan_snapshot_or_error, represented_pipeline=external_pipeline, )
def create_execution_plan( pipeline: Union[IPipeline, PipelineDefinition], run_config: Optional[dict] = None, mode: Optional[str] = None, step_keys_to_execute: Optional[List[str]] = None, known_state: KnownExecutionState = None, ) -> ExecutionPlan: pipeline = _check_pipeline(pipeline) pipeline_def = pipeline.get_definition() check.inst_param(pipeline_def, "pipeline_def", PipelineDefinition) run_config = check.opt_dict_param(run_config, "run_config", key_type=str) mode = check.opt_str_param(mode, "mode", default=pipeline_def.get_default_mode_name()) check.opt_nullable_list_param(step_keys_to_execute, "step_keys_to_execute", of_type=str) resolved_run_config = ResolvedRunConfig.build(pipeline_def, run_config, mode=mode) return ExecutionPlan.build( pipeline, resolved_run_config, step_keys_to_execute=step_keys_to_execute, known_state=known_state, )
def get_external_execution_plan( self, external_pipeline: ExternalPipeline, run_config: Dict[str, Any], mode: str, step_keys_to_execute: Optional[List[str]], known_state: Optional[KnownExecutionState], instance: Optional[DagsterInstance] = None, ) -> ExternalExecutionPlan: check.inst_param(external_pipeline, "external_pipeline", ExternalPipeline) check.dict_param(run_config, "run_config") check.str_param(mode, "mode") check.opt_nullable_list_param(step_keys_to_execute, "step_keys_to_execute", of_type=str) check.opt_inst_param(known_state, "known_state", KnownExecutionState) check.opt_inst_param(instance, "instance", DagsterInstance) execution_plan = create_execution_plan( pipeline=self.get_reconstructable_pipeline(external_pipeline.name). subset_for_execution_from_existing_pipeline( external_pipeline.solids_to_execute), run_config=run_config, mode=mode, step_keys_to_execute=step_keys_to_execute, known_state=known_state, instance_ref=instance.get_ref() if instance and instance.is_persistent else None, ) return ExternalExecutionPlan( execution_plan_snapshot=snapshot_from_execution_plan( execution_plan, external_pipeline.identifying_pipeline_snapshot_id, ))
def __init__( self, name=None, mode_defs=None, preset_defs=None, description=None, tags=None, hook_defs=None, input_defs=None, output_defs=None, config_schema=None, config_fn=None, ): self.name = check.opt_str_param(name, "name") self.mode_definitions = check.opt_list_param(mode_defs, "mode_defs", ModeDefinition) self.preset_definitions = check.opt_list_param(preset_defs, "preset_defs", PresetDefinition) self.description = check.opt_str_param(description, "description") self.tags = check.opt_dict_param(tags, "tags") self.hook_defs = check.opt_set_param(hook_defs, "hook_defs", of_type=HookDefinition) self.input_defs = check.opt_nullable_list_param( input_defs, "input_defs", of_type=InputDefinition) self.did_pass_outputs = output_defs is not None self.output_defs = check.opt_nullable_list_param( output_defs, "output_defs", of_type=OutputDefinition) self.config_schema = config_schema self.config_fn = check.opt_callable_param(config_fn, "config_fn")
def __init__( self, name=None, input_defs=None, output_defs=None, description=None, required_resource_keys=None, config_schema=None, tags=None, ): self.name = check.opt_str_param(name, 'name') self.input_defs = check.opt_nullable_list_param( input_defs, 'input_defs', InputDefinition) self.output_defs = check.opt_nullable_list_param( output_defs, 'output_defs', OutputDefinition) self.description = check.opt_str_param(description, 'description') # resources will be checked within SolidDefinition self.required_resource_keys = required_resource_keys # config will be checked within SolidDefinition self.config_schema = config_schema # tags will be checked within ISolidDefinition self.tags = tags
def __init__( self, name=None, input_defs=None, output_defs=None, description=None, required_resource_keys=None, config_schema=None, tags=None, version=None, ): self.name = check.opt_str_param(name, "name") self.input_defs = check.opt_nullable_list_param( input_defs, "input_defs", InputDefinition) self.output_defs = check.opt_nullable_list_param( output_defs, "output_defs", OutputDefinition) self.description = check.opt_str_param(description, "description") # these will be checked within SolidDefinition self.required_resource_keys = required_resource_keys self.tags = tags self.version = version # config will be checked within SolidDefinition self.config_schema = config_schema
def execute_remote_pipeline_run(host, pipeline_name, environment_dict=None, tags=None, solid_subset=None, mode=None): check.str_param(host, 'host') check.str_param(pipeline_name, 'pipeline_name') environment_dict = check.opt_dict_param(environment_dict, 'environment_dict', key_type=str) check.opt_dict_param(tags, 'tags', key_type=str, value_type=str) check.opt_nullable_list_param(solid_subset, 'solid_subset', of_type=str) mode = check.opt_str_param(mode, 'mode', DEFAULT_MODE_NAME) selector = ExecutionSelector(pipeline_name, solid_subset) execution_params = ExecutionParams( selector=selector, environment_dict=environment_dict, mode=mode, execution_metadata=ExecutionMetadata(run_id=None, tags=tags or {}), step_keys=None, previous_run_id=None, ) result = execute_query_against_remote( host, START_PIPELINE_EXECUTION_MUTATION, variables=json.dumps( {'executionParams': execution_params.to_graphql_input()}), ) return result
def __init__( self, name=None, inputs=None, outputs=None, description=None, required_resources=None, config_field=None, metadata=None, ): self.name = check.opt_str_param(name, 'name') self.input_defs = check.opt_nullable_list_param( inputs, 'inputs', InputDefinition) self.output_defs = check.opt_nullable_list_param( outputs, 'outputs', OutputDefinition) self.description = check.opt_str_param(description, 'description') # resources will be checked within SolidDefinition self.required_resources = required_resources # config_field will be checked within SolidDefinition self.config_field = config_field # metadata will be checked within ISolidDefinition self.metadata = metadata
def daily_schedule( pipeline_name, start_date, name=None, execution_time=datetime.time(0, 0), tags=None, tags_fn_for_date=None, solid_subset=None, mode="default", should_execute=None, environment_vars=None, ): from dagster.core.definitions.partition import PartitionSetDefinition check.opt_str_param(name, 'name') check.str_param(pipeline_name, 'pipeline_name') check.inst_param(start_date, 'start_date', datetime.datetime) check.inst_param(execution_time, 'execution_time', datetime.time) check.opt_dict_param(tags, 'tags', key_type=str, value_type=str) check.opt_callable_param(tags_fn_for_date, 'tags_fn_for_date') check.opt_nullable_list_param(solid_subset, 'solid_subset', of_type=str) mode = check.opt_str_param(mode, 'mode', DEFAULT_MODE_NAME) check.opt_callable_param(should_execute, 'should_execute') check.opt_dict_param(environment_vars, 'environment_vars', key_type=str, value_type=str) cron_schedule = '{minute} {hour} * * *'.format( minute=execution_time.minute, hour=execution_time.hour) def inner(fn): check.callable_param(fn, 'fn') schedule_name = name or fn.__name__ def _environment_dict_fn_for_partition(partition): return fn(partition.value) partition_set_name = '{}_daily'.format(pipeline_name) partition_set = PartitionSetDefinition( name=partition_set_name, pipeline_name=pipeline_name, partition_fn=date_partition_range(start_date), environment_dict_fn_for_partition= _environment_dict_fn_for_partition, mode=mode, ) return partition_set.create_schedule_definition( schedule_name, cron_schedule, should_execute=should_execute, environment_vars=environment_vars, ) return inner
def triggered_execution( pipeline_name, name=None, mode="default", solid_selection=None, tags_fn=None, should_execute_fn=None, ): """ The decorated function will be called as the ``run_config_fn`` of the underlying :py:class:`~dagster.TriggeredDefinition` and should take a :py:class:`~dagster.TriggeredExecutionContext` as its only argument, returning the environment dict for the triggered execution. Args: pipeline_name (str): The name of the pipeline to execute when the trigger fires. name (Optional[str]): The name of this triggered execution. solid_selection (Optional[List[str]]): A list of solid subselection (including single solid names) to execute when the trigger fires. e.g. ``['*some_solid+', 'other_solid']`` mode (Optional[str]): The pipeline mode to apply for the triggered execution (Default: 'default') tags_fn (Optional[Callable[[TriggeredExecutionContext], Optional[Dict[str, str]]]]): A function that generates tags to attach to the triggered execution. Takes a :py:class:`~dagster.TriggeredExecutionContext` and returns a dictionary of tags (string key-value pairs). should_execute_fn (Optional[Callable[[TriggeredExecutionContext], bool]]): A function that runs at trigger time to determine whether a pipeline execution should be initiated or skipped. Takes a :py:class:`~dagster.TriggeredExecutionContext` and returns a boolean (``True`` if a pipeline run should be execute). Defaults to a function that always returns ``True``. """ check.str_param(pipeline_name, "pipeline_name") check.opt_str_param(name, "name") check.str_param(mode, "mode") check.opt_nullable_list_param(solid_selection, "solid_selection", of_type=str) check.opt_callable_param(tags_fn, "tags_fn") check.opt_callable_param(should_execute_fn, "should_execute_fn") def inner(fn): check.callable_param(fn, "fn") trigger_name = name or fn.__name__ return TriggeredExecutionDefinition( name=trigger_name, pipeline_name=pipeline_name, run_config_fn=fn, tags_fn=tags_fn, should_execute_fn=should_execute_fn, mode=mode, solid_selection=solid_selection, ) return inner
def __init__( self, name=None, description=None, input_defs=None, output_defs=None, ): self.name = check.opt_str_param(name, "name") self.description = check.opt_str_param(description, "description") self.input_defs = check.opt_nullable_list_param( input_defs, "input_defs", of_type=InputDefinition) self.did_pass_outputs = output_defs is not None self.output_defs = check.opt_nullable_list_param( output_defs, "output_defs", of_type=OutputDefinition)
def __new__(cls, name, pipeline_name, solid_subset=None, environment_files=None): return super(PipelinePreset, cls).__new__( cls, check.str_param(name, 'name'), check.str_param(pipeline_name, 'pipeline_name'), check.opt_nullable_list_param(solid_subset, 'solid_subset', of_type=str), check.opt_nullable_list_param(environment_files, 'environment_files'), )
def test_opt_nullable_list_param(): assert check.opt_nullable_list_param(None, "list_param") is None assert check.opt_nullable_list_param([], "list_param") == [] assert check.opt_nullable_list_param(frozenlist(), "list_param") == [] obj_list = [1] assert check.opt_nullable_list_param(obj_list, "list_param") == obj_list with pytest.raises(ParameterCheckError): check.opt_nullable_list_param(0, "list_param") with pytest.raises(ParameterCheckError): check.opt_nullable_list_param("", "list_param") with pytest.raises(ParameterCheckError): check.opt_nullable_list_param("3u4", "list_param")
def test_opt_nullable_list_param(): assert check.opt_nullable_list_param(None, 'list_param') is None assert check.opt_nullable_list_param([], 'list_param') == [] assert check.opt_nullable_list_param(frozenlist(), 'list_param') == [] obj_list = [1] assert check.opt_nullable_list_param(obj_list, 'list_param') == obj_list with pytest.raises(ParameterCheckError): check.opt_nullable_list_param(0, 'list_param') with pytest.raises(ParameterCheckError): check.opt_nullable_list_param('', 'list_param') with pytest.raises(ParameterCheckError): check.opt_nullable_list_param('3u4', 'list_param')
def job( pipeline_name, name=None, mode="default", solid_selection=None, tags_fn=None, ): """ The decorated function will be called as the ``run_config_fn`` of the underlying :py:class:`~dagster.JobDefinition` and should take a :py:class:`~dagster.JobContext` as its only argument, returning the run config dict for the pipeline execution. Args: pipeline_name (str): The name of the pipeline to execute. name (Optional[str]): The name of this job. solid_selection (Optional[List[str]]): A list of solid subselection (including single solid names) for the pipeline execution e.g. ``['*some_solid+', 'other_solid']`` mode (Optional[str]): The pipeline mode to apply for the pipeline execution (Default: 'default') tags_fn (Optional[Callable[[JobContext], Optional[Dict[str, str]]]]): A function that generates tags to attach to the pipeline execution. Takes a :py:class:`~dagster.JobContext` and returns a dictionary of tags (string key-value pairs). """ check.str_param(pipeline_name, "pipeline_name") check.opt_str_param(name, "name") check.str_param(mode, "mode") check.opt_nullable_list_param(solid_selection, "solid_selection", of_type=str) check.opt_callable_param(tags_fn, "tags_fn") def inner(fn): check.callable_param(fn, "fn") job_name = name or fn.__name__ return JobDefinition( name=job_name, pipeline_name=pipeline_name, run_config_fn=fn, tags_fn=tags_fn, mode=mode, solid_selection=solid_selection, ) return inner
def __new__(cls, asset_keys: Optional[List[AssetKey]] = None): return super(ExternalSensorMetadata, cls).__new__( cls, asset_keys=check.opt_nullable_list_param(asset_keys, "asset_keys", of_type=AssetKey), )
def from_files(name, config_files=None, solid_selection=None, mode=None, tags=None): """Static constructor for presets from YAML files. Args: name (str): The name of this preset. Must be unique in the presets defined on a given pipeline. config_files (Optional[List[str]]): List of paths or glob patterns for yaml files to load and parse as the run config for this preset. solid_selection (Optional[List[str]]): A list of solid subselection (including single solid names) to execute with the preset. e.g. ``['*some_solid+', 'other_solid']`` mode (Optional[str]): The mode to apply when executing this preset. (default: 'default') tags (Optional[Dict[str, Any]]): The tags to apply when executing this preset. Returns: PresetDefinition: A PresetDefinition constructed from the provided YAML files. Raises: DagsterInvariantViolationError: When one of the YAML files is invalid and has a parse error. """ check.str_param(name, "name") config_files = check.opt_list_param(config_files, "config_files") solid_selection = check.opt_nullable_list_param(solid_selection, "solid_selection", of_type=str) mode = check.opt_str_param(mode, "mode", DEFAULT_MODE_NAME) merged = config_from_files(config_files) return PresetDefinition(name, merged, solid_selection, mode, tags)
def __init__( self, name: Optional[str] = None, description: Optional[str] = None, input_defs: Optional[List[InputDefinition]] = None, output_defs: Optional[List[OutputDefinition]] = None, ins: Optional[Dict[str, GraphIn]] = None, out: Optional[Union[GraphOut, Dict[str, GraphOut]]] = None, tags: Optional[Dict[str, Any]] = None, config_mapping: Optional[ConfigMapping] = None, ): self.name = check.opt_str_param(name, "name") self.description = check.opt_str_param(description, "description") self.input_defs = check.opt_list_param(input_defs, "input_defs", of_type=InputDefinition) self.did_pass_outputs = output_defs is not None or out is not None self.output_defs = check.opt_nullable_list_param( output_defs, "output_defs", of_type=OutputDefinition) self.ins = ins self.out = out self.tags = tags self.config_mapping = check.opt_inst_param(config_mapping, "config_mapping", ConfigMapping)
def from_files(name, environment_files=None, solid_subset=None, mode=None): check.str_param(name, 'name') environment_files = check.opt_list_param(environment_files, 'environment_files') solid_subset = check.opt_nullable_list_param(solid_subset, 'solid_subset', of_type=str) mode = check.opt_str_param(mode, 'mode', DEFAULT_MODE_NAME) file_set = set() for file_glob in environment_files or []: files = glob(file_glob) if not files: raise DagsterInvalidDefinitionError( 'File or glob pattern "{file_glob}" for "environment_files" in preset ' '"{name}" produced no results.'.format(name=name, file_glob=file_glob) ) file_set.update(map(os.path.realpath, files)) try: merged = merge_yamls(list(file_set)) except yaml.YAMLError as err: six.raise_from( DagsterInvariantViolationError( 'Encountered error attempting to parse yaml. Parsing files {file_set} ' 'loaded by file/patterns {files} on preset "{name}".'.format( file_set=file_set, files=environment_files, name=name ) ), err, ) return PresetDefinition(name, merged, solid_subset, mode)
def __new__( cls, name, cron_schedule, pipeline_name, solid_selection, mode, environment_vars, partition_set_name, execution_timezone, description=None, ): return super(ExternalScheduleData, cls).__new__( cls, name=check.str_param(name, "name"), cron_schedule=check.str_param(cron_schedule, "cron_schedule"), pipeline_name=check.str_param(pipeline_name, "pipeline_name"), solid_selection=check.opt_nullable_list_param( solid_selection, "solid_selection", str), mode=check.opt_str_param(mode, "mode"), environment_vars=check.opt_dict_param(environment_vars, "environment_vars"), partition_set_name=check.opt_str_param(partition_set_name, "partition_set_name"), execution_timezone=check.opt_str_param(execution_timezone, "execution_timezone"), description=check.opt_str_param(description, "description"), )
def __init__(self, name, environment_files=None, solid_subset=None, mode=None): self.name = check.str_param(name, 'name') self.environment_files = check.opt_list_param( environment_files, 'environment_files', of_type=str ) self.solid_subset = check.opt_nullable_list_param(solid_subset, 'solid_subset', of_type=str) self.mode = check.opt_str_param(mode, 'mode', DEFAULT_MODE_NAME)
def __init__( self, name: str, pipeline_name: str, evaluation_fn: Callable[["SensorExecutionContext"], Union[Generator[Union[RunRequest, SkipReason], None, None], RunRequest, SkipReason], ], solid_selection: Optional[List[Any]] = None, mode: Optional[str] = None, minimum_interval_seconds: Optional[int] = None, description: Optional[str] = None, ): self._name = check_valid_name(name) self._pipeline_name = check.str_param(pipeline_name, "pipeline_name") self._mode = check.opt_str_param(mode, "mode", DEFAULT_MODE_NAME) self._solid_selection = check.opt_nullable_list_param( solid_selection, "solid_selection", of_type=str) self._description = check.opt_str_param(description, "description") self._evaluation_fn = check.callable_param(evaluation_fn, "evaluation_fn") self._min_interval = check.opt_int_param( minimum_interval_seconds, "minimum_interval_seconds", DEFAULT_SENSOR_DAEMON_INTERVAL)
def __new__( cls, name, pipeline_name, partition_fn, solid_subset=None, mode=None, environment_dict_fn_for_partition=lambda _partition: {}, tags_fn_for_partition=lambda _partition: {}, ): def _wrap(x): if isinstance(x, Partition): return x if isinstance(x, str): return Partition(x) raise DagsterInvalidDefinitionError( 'Expected <Partition> | <str>, received {type}'.format(type=type(x)) ) return super(PartitionSetDefinition, cls).__new__( cls, name=check.str_param(name, 'name'), pipeline_name=check.str_param(pipeline_name, 'pipeline_name'), partition_fn=lambda: [ _wrap(x) for x in check.callable_param(partition_fn, 'partition_fn')() ], solid_subset=check.opt_nullable_list_param(solid_subset, 'solid_subset', of_type=str), mode=check.opt_str_param(mode, 'mode', DEFAULT_MODE_NAME), user_defined_environment_dict_fn_for_partition=check.callable_param( environment_dict_fn_for_partition, 'environment_dict_fn_for_partition' ), user_defined_tags_fn_for_partition=check.callable_param( tags_fn_for_partition, 'tags_fn_for_partition' ), )
def __new__( cls, pipeline_origin, solid_selection, run_config, mode, step_keys_to_execute, pipeline_snapshot_id, known_state=None, instance_ref=None, ): return super(ExecutionPlanSnapshotArgs, cls).__new__( cls, pipeline_origin=check.inst_param(pipeline_origin, "pipeline_origin", ExternalPipelineOrigin), solid_selection=check.opt_list_param(solid_selection, "solid_selection", of_type=str), run_config=check.dict_param(run_config, "run_config"), mode=check.str_param(mode, "mode"), step_keys_to_execute=check.opt_nullable_list_param( step_keys_to_execute, "step_keys_to_execute", of_type=str), pipeline_snapshot_id=check.str_param(pipeline_snapshot_id, "pipeline_snapshot_id"), known_state=check.opt_inst_param(known_state, "known_state", KnownExecutionState), instance_ref=check.opt_inst_param(instance_ref, "instance_ref", InstanceRef), )
def __new__( cls, pipeline_origin, pipeline_run_id, step_keys_to_execute, instance_ref=None, retry_mode=None, known_state=None, should_verify_step=None, ): return super(ExecuteStepArgs, cls).__new__( cls, pipeline_origin=check.inst_param(pipeline_origin, "pipeline_origin", PipelinePythonOrigin), pipeline_run_id=check.str_param(pipeline_run_id, "pipeline_run_id"), step_keys_to_execute=check.opt_nullable_list_param( step_keys_to_execute, "step_keys_to_execute", of_type=str), instance_ref=check.opt_inst_param(instance_ref, "instance_ref", InstanceRef), retry_mode=check.opt_inst_param(retry_mode, "retry_mode", RetryMode), known_state=check.opt_inst_param(known_state, "known_state", KnownExecutionState), should_verify_step=check.opt_bool_param(should_verify_step, "should_verify_step", False), )
def __init__( self, name: Optional[str] = None, input_defs: Optional[Sequence[InputDefinition]] = None, output_defs: Optional[Sequence[OutputDefinition]] = None, description: Optional[str] = None, required_resource_keys: Optional[Set[str]] = None, config_schema: Optional[Union[Any, Dict[str, Any]]] = None, tags: Optional[Dict[str, Any]] = None, version: Optional[str] = None, decorator_takes_context: Optional[bool] = True, retry_policy: Optional[RetryPolicy] = None, ): self.name = check.opt_str_param(name, "name") self.input_defs = check.opt_list_param(input_defs, "input_defs", InputDefinition) self.output_defs = check.opt_nullable_list_param( output_defs, "output_defs", OutputDefinition) self.decorator_takes_context = check.bool_param( decorator_takes_context, "decorator_takes_context") self.description = check.opt_str_param(description, "description") # these will be checked within SolidDefinition self.required_resource_keys = required_resource_keys self.tags = tags self.version = version self.retry_policy = retry_policy # config will be checked within SolidDefinition self.config_schema = config_schema