Exemple #1
0
    def __init__(
        self,
        dagster_type=None,
        name=None,
        description=None,
        is_required=None,
        io_manager_key=None,
        metadata=None,
        asset_key=None,
        asset_partitions=None,
    ):
        self._name = check_valid_name(
            check.opt_str_param(name, "name", DEFAULT_OUTPUT))
        self._dagster_type = resolve_dagster_type(dagster_type)
        self._description = check.opt_str_param(description, "description")
        self._is_required = check.opt_bool_param(is_required,
                                                 "is_required",
                                                 default=True)
        self._manager_key = check.opt_str_param(io_manager_key,
                                                "io_manager_key",
                                                default="io_manager")
        if metadata:
            experimental_arg_warning("metadata", "OutputDefinition.__init__")
        self._metadata = metadata

        if asset_key:
            experimental_arg_warning("asset_key", "OutputDefinition.__init__")

        self._is_asset = asset_key is not None

        if callable(asset_key):
            self._asset_key_fn = asset_key
        else:
            asset_key = check.opt_inst_param(asset_key, "asset_key", AssetKey)
            self._asset_key_fn = lambda _: asset_key

        if asset_partitions:
            experimental_arg_warning("asset_partitions",
                                     "OutputDefinition.__init__")
            check.param_invariant(
                asset_key is not None,
                "asset_partitions",
                'Cannot specify "asset_partitions" argument without also specifying "asset_key"',
            )
        if callable(asset_partitions):
            self._asset_partitions_fn = asset_partitions
        else:
            asset_partitions = check.opt_set_param(asset_partitions,
                                                   "asset_partitions", str)
            self._asset_partitions_fn = lambda _: asset_partitions
 def set_intermediate_object(cls, intermediate_storage, context,
                             dagster_type, step_output_handle, value):
     dagster_type = resolve_dagster_type(dagster_type)
     check.inst_param(intermediate_storage, "intermediate_storage",
                      ADLS2IntermediateStorage)
     paths = [
         "intermediates", step_output_handle.step_key,
         step_output_handle.output_name
     ]
     paths.append(value)
     key = intermediate_storage.object_store.key_for_paths(
         [intermediate_storage.root] + paths)
     return intermediate_storage.object_store.set_object(
         key, "", dagster_type.serialization_strategy)
def test_custom_read_write_mode(gcs_bucket):
    run_id = make_new_run_id()
    intermediate_storage = GCSIntermediateStorage(run_id=run_id,
                                                  gcs_bucket=gcs_bucket)
    data_frame = [
        OrderedDict({
            "foo": "1",
            "bar": "1"
        }),
        OrderedDict({
            "foo": "2",
            "bar": "2"
        })
    ]

    obj_name = "data_frame"

    try:
        with yield_empty_pipeline_context(run_id=run_id) as context:
            intermediate_storage.set_intermediate(
                context,
                resolve_dagster_type(LessSimpleDataFrame),
                StepOutputHandle(obj_name),
                data_frame,
            )

            assert intermediate_storage.has_intermediate(
                context, StepOutputHandle(obj_name))
            assert (intermediate_storage.get_intermediate(
                context, resolve_dagster_type(LessSimpleDataFrame),
                StepOutputHandle(obj_name)).obj == data_frame)
            assert intermediate_storage.uri_for_paths([obj_name
                                                       ]).startswith("gs://")

    finally:
        intermediate_storage.rm_intermediate(context,
                                             StepOutputHandle(obj_name))
Exemple #4
0
 def __init__(
     self,
     dagster_type=None,
     name=None,
     description=None,
     is_required=None,
     asset_store_key="asset_store",
     asset_metadata=None,
 ):
     self._name = check_valid_name(check.opt_str_param(name, "name", DEFAULT_OUTPUT))
     self._dagster_type = resolve_dagster_type(dagster_type)
     self._description = check.opt_str_param(description, "description")
     self._is_required = check.opt_bool_param(is_required, "is_required", default=True)
     self._asset_store_key = asset_store_key
     self._asset_metadata = asset_metadata
Exemple #5
0
def test_custom_read_write_mode(s3_bucket):
    run_id = make_new_run_id()
    intermediate_storage = S3IntermediateStorage(run_id=run_id,
                                                 s3_bucket=s3_bucket)
    data_frame = [
        OrderedDict({
            'foo': '1',
            'bar': '1'
        }),
        OrderedDict({
            'foo': '2',
            'bar': '2'
        })
    ]
    try:
        with yield_empty_pipeline_context(run_id=run_id) as context:
            intermediate_storage.set_intermediate(
                context,
                resolve_dagster_type(LessSimpleDataFrame),
                StepOutputHandle('data_frame'),
                data_frame,
            )

            assert intermediate_storage.has_intermediate(
                context, StepOutputHandle('data_frame'))
            assert (intermediate_storage.get_intermediate(
                context,
                resolve_dagster_type(LessSimpleDataFrame),
                StepOutputHandle('data_frame'),
            ).obj == data_frame)
            assert intermediate_storage.uri_for_paths(['data_frame'
                                                       ]).startswith('s3://')

    finally:
        intermediate_storage.rm_intermediate(context,
                                             StepOutputHandle('data_frame'))
Exemple #6
0
def test_file_system_intermediate_store_with_composite_type_storage_plugin():
    run_id = str(uuid.uuid4())

    intermediate_store = build_fs_intermediate_store(
        DagsterInstance.ephemeral().intermediates_directory,
        run_id=run_id,
        type_storage_plugin_registry=TypeStoragePluginRegistry([
            (RuntimeString, FancyStringFilesystemTypeStoragePlugin)
        ]),
    )

    with yield_empty_pipeline_context(run_id=run_id) as context:
        with pytest.raises(check.NotImplementedCheckError):
            intermediate_store.set_value(['hello'], context,
                                         resolve_dagster_type(List[String]),
                                         ['obj_name'])

    with yield_empty_pipeline_context(run_id=run_id) as context:
        with pytest.raises(check.NotImplementedCheckError):
            intermediate_store.set_value(['hello'], context,
                                         resolve_dagster_type(
                                             Optional[String]), ['obj_name'])

    with yield_empty_pipeline_context(run_id=run_id) as context:
        with pytest.raises(check.NotImplementedCheckError):
            intermediate_store.set_value(['hello'], context,
                                         resolve_dagster_type(
                                             List[Optional[String]]),
                                         ['obj_name'])

    with yield_empty_pipeline_context(run_id=run_id) as context:
        with pytest.raises(check.NotImplementedCheckError):
            intermediate_store.set_value(['hello'], context,
                                         resolve_dagster_type(
                                             Optional[List[String]]),
                                         ['obj_name'])
Exemple #7
0
def test_gcs_intermediate_store_composite_types_with_custom_serializer_for_inner_type(
        gcs_bucket):
    run_id = make_new_run_id()

    intermediate_store = GCSIntermediateStore(run_id=run_id,
                                              gcs_bucket=gcs_bucket)

    obj_name = 'list'

    with yield_empty_pipeline_context(run_id=run_id) as context:
        try:
            intermediate_store.set_object(
                ['foo', 'bar'],
                context,
                resolve_dagster_type(List[LowercaseString]),
                [obj_name],
            )
            assert intermediate_store.has_object(context, [obj_name])
            assert intermediate_store.get_object(
                context, resolve_dagster_type(List[Bool]),
                [obj_name]).obj == ['foo', 'bar']

        finally:
            intermediate_store.rm_object(context, [obj_name])
def test_s3_intermediate_store_with_composite_type_storage_plugin(s3_bucket):
    run_id = make_new_run_id()

    intermediate_store = S3IntermediateStore(
        run_id=run_id,
        s3_bucket=s3_bucket,
        type_storage_plugin_registry=TypeStoragePluginRegistry([
            (RuntimeString, FancyStringS3TypeStoragePlugin)
        ]),
    )

    with yield_empty_pipeline_context(run_id=run_id) as context:
        with pytest.raises(check.NotImplementedCheckError):
            intermediate_store.set_value(['hello'], context,
                                         resolve_dagster_type(List[String]),
                                         ['obj_name'])
Exemple #9
0
    def __init__(self,
                 name,
                 dagster_type=None,
                 description=None,
                 default_value=_NoValueSentinel):
        ""
        self._name = check_valid_name(name)

        self._dagster_type = check.inst(resolve_dagster_type(dagster_type),
                                        DagsterType)

        self._description = check.opt_str_param(description, "description")

        self._default_value = _check_default_value(self._name,
                                                   self._dagster_type,
                                                   default_value)
Exemple #10
0
def _checked_inferred_type(inferred: InferredInputProps) -> DagsterType:
    try:
        resolved_type = resolve_dagster_type(inferred.annotation)
    except DagsterError as e:
        raise DagsterInvalidDefinitionError(
            f"Problem using type '{inferred.annotation}' from type annotation for argument "
            f"'{inferred.name}', correct the issue or explicitly set the dagster_type on "
            "your InputDefinition.") from e

    if resolved_type.is_nothing:
        raise DagsterInvalidDefinitionError(
            f"Input parameter {inferred.name} is annotated with {resolved_type.display_name} "
            "which is a type that represents passing no data. This type must be used "
            "via InputDefinition and no parameter should be included in the solid function."
        )
    return resolved_type
def test_gcs_intermediate_store_with_composite_type_storage_plugin(gcs_bucket):
    run_id = str(uuid.uuid4())

    intermediate_store = GCSIntermediateStore(
        run_id=run_id,
        gcs_bucket=gcs_bucket,
        type_storage_plugin_registry=TypeStoragePluginRegistry(
            [(RuntimeString, FancyStringGCSTypeStoragePlugin)]
        ),
    )

    with yield_empty_pipeline_context(run_id=run_id) as context:
        with pytest.raises(check.NotImplementedCheckError):
            intermediate_store.set_value(
                ['hello'], context, resolve_dagster_type(List[String]), ['obj_name']
            )
def test_adls2_intermediate_store_with_composite_type_storage_plugin(storage_account, file_system):
    run_id = make_new_run_id()

    intermediate_store = ADLS2IntermediateStore(
        adls2_client=get_adls2_client(storage_account),
        blob_client=get_blob_client(storage_account),
        run_id=run_id,
        file_system=file_system,
        type_storage_plugin_registry=TypeStoragePluginRegistry(
            [(RuntimeString, FancyStringS3TypeStoragePlugin)]
        ),
    )
    with yield_empty_pipeline_context(run_id=run_id) as context:
        with pytest.raises(check.NotImplementedCheckError):
            intermediate_store.set_value(
                ['hello'], context, resolve_dagster_type(List[String]), ['obj_name']
            )
Exemple #13
0
    def __init__(
        self, dagster_type=None, name=None, description=None, is_optional=None, is_required=None
    ):
        self._name = check_valid_name(check.opt_str_param(name, 'name', DEFAULT_OUTPUT))
        self._runtime_type = resolve_dagster_type(dagster_type)
        self._description = check.opt_str_param(description, 'description')
        check.opt_bool_param(is_optional, 'is_optional')
        check.opt_bool_param(is_required, 'is_required')

        canonical_is_required = canonicalize_backcompat_args(
            new_val=is_required,
            new_arg='is_required',
            old_val=is_optional,
            old_arg='is_optional',
            coerce_old_to_new=lambda val: not val,
            additional_warn_txt='"is_optional" deprecated in 0.7.0 and will be removed in 0.8.0. Users should use "is_required" instead.',
        )
        self._optional = False if (canonical_is_required is None) else not canonical_is_required
Exemple #14
0
    def get_intermediate(
        self,
        context,
        dagster_type=None,
        step_output_handle=None,
    ):
        dagster_type = resolve_dagster_type(dagster_type)
        check.opt_inst_param(context, 'context', SystemExecutionContext)
        check.inst_param(dagster_type, 'dagster_type', DagsterType)
        check.inst_param(step_output_handle, 'step_output_handle',
                         StepOutputHandle)
        check.invariant(self.has_intermediate(context, step_output_handle))

        return self._intermediate_store.get_value(
            context=context,
            dagster_type=dagster_type,
            paths=self._get_paths(step_output_handle),
        )
Exemple #15
0
    def __init__(
        self,
        dagster_type=None,
        name=None,
        description=None,
        is_required=None,
        asset_store_key=None,
        asset_metadata=None,
    ):
        from dagster.core.storage.asset_store import AssetStoreHandle

        self._name = check_valid_name(check.opt_str_param(name, "name", DEFAULT_OUTPUT))
        self._dagster_type = resolve_dagster_type(dagster_type)
        self._description = check.opt_str_param(description, "description")
        self._is_required = check.opt_bool_param(is_required, "is_required", default=True)
        self._asset_store_handle = (
            AssetStoreHandle(asset_store_key, asset_metadata) if asset_store_key else None
        )
Exemple #16
0
def test_gcs_intermediate_storage_with_composite_type_storage_plugin(gcs_bucket):
    run_id = make_new_run_id()

    intermediate_storage = GCSIntermediateStorage(
        run_id=run_id,
        gcs_bucket=gcs_bucket,
        type_storage_plugin_registry=TypeStoragePluginRegistry(
            [(RuntimeString, FancyStringGCSTypeStoragePlugin)]
        ),
    )

    obj_name = "obj_name"

    with yield_empty_pipeline_context(run_id=run_id) as context:
        with pytest.raises(check.NotImplementedCheckError):
            intermediate_storage.set_intermediate(
                context, resolve_dagster_type(List[String]), StepOutputHandle(obj_name), ["hello"]
            )
Exemple #17
0
def check_dagster_type(dagster_type, value):
    '''Test a custom Dagster type.

    Args:
        dagster_type (Any): The Dagster type to test. Should be one of the
            :ref:`built-in types <builtin>`, a dagster type explicitly constructed with
            :py:func:`as_dagster_type`, :py:func:`@usable_as_dagster_type <dagster_type>`, or
            :py:func:`PythonObjectDagsterType`, or a Python type.
        value (Any): The runtime value to test.

    Returns:
        TypeCheck: The result of the type check.


    Examples:

        .. code-block:: python

            assert check_dagster_type(Dict[Any, Any], {'foo': 'bar'}).success
    '''

    if is_typing_type(dagster_type):
        raise DagsterInvariantViolationError(
            (
                'Must pass in a type from dagster module. You passed {dagster_type} '
                'which is part of python\'s typing module.'
            ).format(dagster_type=dagster_type)
        )

    dagster_type = resolve_dagster_type(dagster_type)
    with yield_empty_pipeline_context() as pipeline_context:
        context = pipeline_context.for_type(dagster_type)
        try:
            type_check = dagster_type.type_check(context, value)
        except Failure as failure:
            return TypeCheck(success=False, description=failure.description)

        if not isinstance(type_check, TypeCheck):
            raise DagsterInvariantViolationError(
                'Type checks can only return TypeCheck. Type {type_name} returned {value}.'.format(
                    type_name=dagster_type.name, value=repr(type_check)
                )
            )
        return type_check
Exemple #18
0
    def get_intermediate(
        self, context, dagster_type=None, step_output_handle=None,
    ):
        dagster_type = resolve_dagster_type(dagster_type)
        check.opt_inst_param(context, "context", SystemExecutionContext)
        check.inst_param(dagster_type, "dagster_type", DagsterType)
        check.inst_param(step_output_handle, "step_output_handle", StepOutputHandle)
        check.invariant(self.has_intermediate(context, step_output_handle))

        if self.type_storage_plugin_registry.is_registered(dagster_type):
            return self.type_storage_plugin_registry.get(dagster_type.name).get_intermediate_object(
                self, context, dagster_type, step_output_handle
            )
        elif dagster_type.name is None:
            self.type_storage_plugin_registry.check_for_unsupported_composite_overrides(
                dagster_type
            )

        return self.get_intermediate_object(dagster_type, step_output_handle)
Exemple #19
0
 def __init__(
     self,
     dagster_type=None,
     name=None,
     description=None,
     is_required=None,
     manager_key=None,
     metadata=None,
 ):
     self._name = check_valid_name(
         check.opt_str_param(name, "name", DEFAULT_OUTPUT))
     self._dagster_type = resolve_dagster_type(dagster_type)
     self._description = check.opt_str_param(description, "description")
     self._is_required = check.opt_bool_param(is_required,
                                              "is_required",
                                              default=True)
     self._manager_key = check.opt_str_param(manager_key,
                                             "manager_key",
                                             default="io_manager")
     self._metadata = metadata
Exemple #20
0
    def __new__(
        cls,
        step_key: str,
        name: str,
        pipeline_name: str,
        run_id: Optional[str] = None,
        metadata: Optional[Dict[str, Any]] = None,
        mapping_key: Optional[str] = None,
        config: Any = None,
        solid_def: Optional[SolidDefinition] = None,
        dagster_type: Optional[DagsterType] = None,
        log_manager: Optional[DagsterLogManager] = None,
        version: Optional[str] = None,
        # This is used internally by the intermediate storage adapter, we don't usually expect users to mock this.
        step_context: Optional[SystemStepExecutionContext] = None,
        resource_config: Optional[Any] = None,
        resources: Optional[NamedTuple] = None,
    ):

        return super(OutputContext, cls).__new__(
            cls,
            step_key=check.str_param(step_key, "step_key"),
            name=check.str_param(name, "name"),
            pipeline_name=check.str_param(pipeline_name, "pipeline_name"),
            run_id=check.opt_str_param(run_id, "run_id"),
            metadata=check.opt_dict_param(metadata, "metadata"),
            mapping_key=check.opt_str_param(mapping_key, "mapping_key"),
            config=config,
            solid_def=check.opt_inst_param(solid_def, "solid_def",
                                           SolidDefinition),
            dagster_type=check.inst_param(
                resolve_dagster_type(dagster_type), "dagster_type", DagsterType
            ),  # this allows the user to mock the context with unresolved dagster type
            log=check.opt_inst_param(log_manager, "log_manager",
                                     DagsterLogManager),
            version=check.opt_str_param(version, "version"),
            step_context=check.opt_inst_param(step_context, "step_context",
                                              SystemStepExecutionContext),
            resource_config=resource_config,
            resources=resources,
        )
Exemple #21
0
    def set_intermediate_to_address(
        self,
        context,
        dagster_type=None,
        step_output_handle=None,
        value=None,
        address=None,
        version=None,
    ):
        """
        This is an experimental method.
        This will likely to be merged into `set_intermediate_object`. To do so, we will need to
        update the `set_intermediate_object` to take `address` as an arg
        """
        dagster_type = resolve_dagster_type(dagster_type)
        check.opt_inst_param(context, "context", SystemExecutionContext)
        check.inst_param(dagster_type, "dagster_type", DagsterType)
        check.inst_param(step_output_handle, "step_output_handle",
                         StepOutputHandle)
        check.str_param(address, "address")
        check.opt_str_param(version, "version")

        # currently it doesn't support type_storage_plugin_registry
        try:
            uri = self.object_store.set_object(
                key=address,
                obj=value,
                serialization_strategy=dagster_type.serialization_strategy)
            return ObjectStoreOperation(
                op=ObjectStoreOperationType.SET_OBJECT,
                key=uri,
                dest_key=None,
                obj=value,
                serialization_strategy_name=dagster_type.
                serialization_strategy.name,
                object_store_name=self.object_store.name,
                version=version,
            )
        except (IOError, OSError) as e:
            raise DagsterAddressIOError(str(e))
Exemple #22
0
def test_inner_types():
    assert resolve_dagster_type(Int).inner_types == []

    list_int_runtime = resolve_dagster_type(List[Int])
    assert inner_type_key_set(list_int_runtime) == set(["Int"])

    list_list_int_runtime = resolve_dagster_type(List[List[Int]])
    assert inner_type_key_set(list_list_int_runtime) == set(
        ["Int", "List.Int"])

    list_nullable_int_runtime = resolve_dagster_type(List[Optional[Int]])
    assert inner_type_key_set(list_nullable_int_runtime) == set(
        ["Int", "Optional.Int"])
    assert not list_nullable_int_runtime.kind == DagsterTypeKind.SCALAR

    tuple_optional_list = resolve_dagster_type(Tuple[List[Optional[Int]],
                                                     List[Dict[str, str]]])
    assert inner_type_key_set(tuple_optional_list) == set([
        "Int",
        "Optional.Int",
        "List.Optional.Int",
        "String",
        "TypedPythonDict.String.String",
        "List.TypedPythonDict.String.String",
    ])
    assert not tuple_optional_list.kind == DagsterTypeKind.SCALAR

    deep_dict = resolve_dagster_type(Dict[str, Dict[str, Dict[str, int]]])
    assert inner_type_key_set(deep_dict) == set([
        "TypedPythonDict.String.TypedPythonDict.String.Int",
        "TypedPythonDict.String.Int",
        "String",
        "Int",
    ])

    deep_set = resolve_dagster_type(Set[Dict[str, Dict[str, int]]])
    assert inner_type_key_set(deep_set) == set([
        "TypedPythonDict.String.TypedPythonDict.String.Int",
        "TypedPythonDict.String.Int",
        "String",
        "Int",
    ])
def test_s3_intermediate_storage_with_composite_type_storage_plugin(
        mock_s3_bucket):
    run_id = make_new_run_id()

    intermediate_storage = S3IntermediateStorage(
        run_id=run_id,
        s3_bucket=mock_s3_bucket.name,
        type_storage_plugin_registry=TypeStoragePluginRegistry([
            (RuntimeString, FancyStringS3TypeStoragePlugin)
        ]),
    )

    with yield_empty_pipeline_context(run_id=run_id) as context:
        with pytest.raises(check.NotImplementedCheckError):
            intermediate_storage.set_intermediate(
                context,
                resolve_dagster_type(List[String]),
                StepOutputHandle("obj_name"),
                [
                    "hello",
                ],
            )
Exemple #24
0
    def __new__(
        cls,
        pipeline_name: str,
        # This will be None when called from calling SolidExecutionResult.output_value
        name: Optional[str] = None,
        solid_def: Optional[SolidDefinition] = None,
        config: Any = None,
        metadata: Optional[Dict[str, Any]] = None,
        upstream_output: Optional[OutputContext] = None,
        dagster_type: Optional[DagsterType] = None,
        log_manager: Optional[DagsterLogManager] = None,
        # This is used internally by the intermediate storage adapter, we don't expect users to mock this.
        step_context: Optional[SystemStepExecutionContext] = None,
        resource_config: Any = None,
        resources: Optional[NamedTuple] = None,
    ):

        return super(InputContext, cls).__new__(
            cls,
            name=check.opt_str_param(name, "name"),
            pipeline_name=check.opt_str_param(pipeline_name, "pipeline_name"),
            solid_def=check.opt_inst_param(solid_def, "solid_def",
                                           SolidDefinition),
            config=config,
            metadata=metadata,
            upstream_output=check.opt_inst_param(upstream_output,
                                                 "upstream_output",
                                                 OutputContext),
            dagster_type=check.inst_param(
                resolve_dagster_type(dagster_type), "dagster_type", DagsterType
            ),  # this allows the user to mock the context with unresolved dagster type
            log=check.opt_inst_param(log_manager, "log_manager",
                                     DagsterLogManager),
            step_context=check.opt_inst_param(step_context, "step_context",
                                              SystemStepExecutionContext),
            resource_config=resource_config,
            resources=resources,
        )
Exemple #25
0
def check_dagster_type(dagster_type, value):
    '''Test a custom Dagster type.

    Args:
        dagster_type (Any): The Dagster type to test. Should be one of the
            :ref:`built-in types <builtin>`, a dagster type explicitly constructed with
            :py:func:`as_dagster_type`, :py:func:`@dagster_type <dagster_type>`, or
            :py:func:`define_python_dagster_type`, or a Python type.
        value (Any): The runtime value to test.

    Returns:
        TypeCheck: The result of the type check.


    Examples:

        .. code-block:: python

            assert check_dagster_type(Dict[Any, Any], {'foo': 'bar'}).success
    '''

    if is_typing_type(dagster_type):
        raise DagsterInvariantViolationError(
            (
                'Must pass in a type from dagster module. You passed {dagster_type} '
                'which is part of python\'s typing module.'
            ).format(dagster_type=dagster_type)
        )

    runtime_type = resolve_dagster_type(dagster_type)
    type_check = runtime_type.type_check(value)
    if not isinstance(type_check, TypeCheck):
        raise DagsterInvariantViolationError(
            'Type checks can only return TypeCheck. Type {type_name} returned {value}.'.format(
                type_name=runtime_type.name, value=repr(type_check)
            )
        )
    return type_check
Exemple #26
0
    def set_intermediate(
        self, context, dagster_type=None, step_output_handle=None, value=None,
    ):
        dagster_type = resolve_dagster_type(dagster_type)
        check.opt_inst_param(context, "context", SystemExecutionContext)
        check.inst_param(dagster_type, "dagster_type", DagsterType)
        check.inst_param(step_output_handle, "step_output_handle", StepOutputHandle)

        if self.has_intermediate(context, step_output_handle):
            context.log.warning(
                "Replacing existing intermediate for %s.%s"
                % (step_output_handle.step_key, step_output_handle.output_name)
            )

        if self.type_storage_plugin_registry.is_registered(dagster_type):
            return self.type_storage_plugin_registry.get(dagster_type.name).set_intermediate_object(
                self, context, dagster_type, step_output_handle, value
            )
        elif dagster_type.name is None:
            self.type_storage_plugin_registry.check_for_unsupported_composite_overrides(
                dagster_type
            )

        return self.set_intermediate_object(dagster_type, step_output_handle, value)
Exemple #27
0
 def __new__(
     cls,
     dagster_type: Union[Type, DagsterType] = NoValueSentinel,
     description: Optional[str] = None,
     default_value: Any = NoValueSentinel,
     root_manager_key: Optional[str] = None,
     metadata: Optional[Mapping[str, Any]] = None,
     asset_key: Optional[Union[AssetKey, Callable[["InputContext"], AssetKey]]] = None,
     asset_partitions: Optional[Union[Set[str], Callable[["InputContext"], Set[str]]]] = None,
 ):
     return super(In, cls).__new__(
         cls,
         dagster_type=NoValueSentinel
         if dagster_type is NoValueSentinel
         else resolve_dagster_type(dagster_type),
         description=check.opt_str_param(description, "description"),
         default_value=default_value,
         root_manager_key=check.opt_str_param(root_manager_key, "root_manager_key"),
         metadata=check.opt_dict_param(metadata, "metadata", key_type=str),
         asset_key=check.opt_inst_param(asset_key, "asset_key", (AssetKey, FunctionType)),
         asset_partitions=check.opt_inst_param(
             asset_partitions, "asset_partitions", (Set[str], FunctionType)
         ),
     )
def test_is_any():
    assert not resolve_dagster_type(Int).kind == DagsterTypeKind.ANY
    assert resolve_dagster_type(Int).kind == DagsterTypeKind.SCALAR
def test_comparison():
    # Base types
    assert resolve_dagster_type(Any) == resolve_dagster_type(Any)
    assert resolve_dagster_type(String) == resolve_dagster_type(String)
    assert resolve_dagster_type(Bool) == resolve_dagster_type(Bool)
    assert resolve_dagster_type(Float) == resolve_dagster_type(Float)
    assert resolve_dagster_type(Int) == resolve_dagster_type(Int)
    assert resolve_dagster_type(String) == resolve_dagster_type(String)
    assert resolve_dagster_type(Nothing) == resolve_dagster_type(Nothing)
    assert resolve_dagster_type(Optional[String]) == resolve_dagster_type(
        Optional[String])

    types = [Any, Bool, Float, Int, String, Nothing]
    non_equal_pairs = [(t1, t2) for t1 in types for t2 in types if t1 != t2]
    for t1, t2 in non_equal_pairs:
        assert resolve_dagster_type(t1) != resolve_dagster_type(t2)
    assert resolve_dagster_type(Optional[String]) != resolve_dagster_type(
        Optional[Int])

    # List type
    assert resolve_dagster_type(List) == resolve_dagster_type(List)
    assert resolve_dagster_type(List[String]) == resolve_dagster_type(
        List[String])
    assert resolve_dagster_type(List[List[Int]]) == resolve_dagster_type(
        List[List[Int]])
    assert resolve_dagster_type(
        List[Optional[String]]) == resolve_dagster_type(List[Optional[String]])

    assert resolve_dagster_type(List[String]) != resolve_dagster_type(
        List[Int])
    assert resolve_dagster_type(List[List[String]]) != resolve_dagster_type(
        List[List[Int]])
    assert resolve_dagster_type(List[String]) != resolve_dagster_type(
        List[Optional[String]])

    # Tuple type
    assert resolve_dagster_type(Tuple) == resolve_dagster_type(Tuple)
    assert resolve_dagster_type(Tuple[String, Int]) == resolve_dagster_type(
        Tuple[String, Int])
    assert resolve_dagster_type(
        Tuple[Tuple[String, Int]]) == resolve_dagster_type(Tuple[Tuple[String,
                                                                       Int]])
    assert resolve_dagster_type(Tuple[Optional[String],
                                      Int]) == resolve_dagster_type(
                                          Tuple[Optional[String], Int])

    assert resolve_dagster_type(Tuple[String, Int]) != resolve_dagster_type(
        Tuple[Int, String])
    assert resolve_dagster_type(Tuple[Tuple[String,
                                            Int]]) != resolve_dagster_type(
                                                Tuple[Tuple[Int, String]])
    assert resolve_dagster_type(Tuple[String]) != resolve_dagster_type(
        Tuple[Optional[String]])

    # Set type
    assert resolve_dagster_type(Set) == resolve_dagster_type(Set)
    assert resolve_dagster_type(Set[String]) == resolve_dagster_type(
        Set[String])
    assert resolve_dagster_type(Set[Set[Int]]) == resolve_dagster_type(
        Set[Set[Int]])
    assert resolve_dagster_type(Set[Optional[String]]) == resolve_dagster_type(
        Set[Optional[String]])

    assert resolve_dagster_type(Set[String]) != resolve_dagster_type(Set[Int])
    assert resolve_dagster_type(Set[Set[String]]) != resolve_dagster_type(
        Set[Set[Int]])
    assert resolve_dagster_type(Set[String]) != resolve_dagster_type(
        Set[Optional[String]])

    # Dict type
    assert resolve_dagster_type(Dict) == resolve_dagster_type(Dict)
    assert resolve_dagster_type(Dict[String,
                                     Int]) == resolve_dagster_type(Dict[String,
                                                                        Int])
    assert resolve_dagster_type(Dict[String,
                                     Dict[String,
                                          Int]]) == resolve_dagster_type(
                                              Dict[String, Dict[String, Int]])

    assert resolve_dagster_type(Dict[String, Int]) != resolve_dagster_type(
        Dict[Int, String])
    assert resolve_dagster_type(
        Dict[Int, Dict[String, Int]]) != resolve_dagster_type(
            Dict[String, Dict[String, Int]])
Exemple #30
0
def test_type_equality():
    assert resolve_dagster_type(int) == resolve_dagster_type(int)
    assert not (resolve_dagster_type(int) != resolve_dagster_type(int))

    assert resolve_dagster_type(List[int]) == resolve_dagster_type(List[int])
    assert not (resolve_dagster_type(List[int]) != resolve_dagster_type(List[int]))

    assert resolve_dagster_type(Optional[List[int]]) == resolve_dagster_type(Optional[List[int]])
    assert not (
        resolve_dagster_type(Optional[List[int]]) != resolve_dagster_type(Optional[List[int]])
    )