Esempio n. 1
0
            PARTITION_SET_TAG: partition_set.name
        }


@whitelist_for_serdes(serializer=DagsterRunSerializer)
class DagsterRun(PipelineRun):
    """Serializable internal representation of a dagster run, as stored in a
    :py:class:`~dagster.core.storage.runs.RunStorage`.

    Subclasses PipelineRun for backcompat purposes. DagsterRun is the actual initialized class used throughout the system.
    """


# DagsterRun is serialized as PipelineRun so that it can be read by older (pre 0.13.x) version of
# Dagster, but is read back in as a DagsterRun.
register_serdes_tuple_fallbacks({"PipelineRun": DagsterRun})


@whitelist_for_serdes
class PipelineRunsFilter(
        namedtuple(
            "_PipelineRunsFilter",
            "run_ids pipeline_name statuses tags snapshot_id updated_after mode"
        )):
    def __new__(
        cls,
        run_ids=None,
        pipeline_name=None,
        statuses=None,
        tags=None,
        snapshot_id=None,
Esempio n. 2
0
        if event_specific_data.op in ("GET_ASSET", AssetStoreOperationType.GET_ASSET):
            return (
                DagsterEventType.LOADED_INPUT.value,
                LoadedInputData(
                    event_specific_data.output_name, event_specific_data.asset_store_key
                ),
            )
        if event_specific_data.op in ("SET_ASSET", AssetStoreOperationType.SET_ASSET):
            return (
                DagsterEventType.HANDLED_OUTPUT.value,
                HandledOutputData(
                    event_specific_data.output_name, event_specific_data.asset_store_key, []
                ),
            )

    # previous name for ASSET_MATERIALIZATION was STEP_MATERIALIZATION
    if event_type_value == "STEP_MATERIALIZATION":
        return DagsterEventType.ASSET_MATERIALIZATION.value, event_specific_data

    else:
        return event_type_value, event_specific_data


register_serdes_tuple_fallbacks(
    {
        "PipelineProcessStartedData": None,
        "PipelineProcessExitedData": None,
        "PipelineProcessStartData": None,
    }
)
Esempio n. 3
0
File: log.py Progetto: zuik/dagster

def construct_json_event_logger(json_path):
    """Record a stream of event records to json"""
    check.str_param(json_path, "json_path")
    return construct_single_handler_logger(
        "json-event-record-logger",
        "debug",
        JsonEventLoggerHandler(
            json_path,
            lambda record: construct_event_record(
                StructuredLoggerMessage(
                    name=record.name,
                    message=record.msg,
                    level=record.levelno,
                    meta=record.dagster_meta,
                    record=record,
                )),
        ),
    )


register_serdes_tuple_fallbacks({
    # These were originally distinguished from each other but ended up being empty subclasses
    # of EventRecord -- instead of using the subclasses we were relying on
    # EventRecord.is_dagster_event to distinguish events that originate in the logging
    # machinery from events that are yielded by user code
    "DagsterEventRecord": EventRecord,
    "LogMessageRecord": EventRecord,
})
Esempio n. 4
0
                "external_repository_origin",
                ExternalRepositoryOrigin,
            ),
            check.str_param(instigator_name, "instigator_name"),
        )

    def get_id(self):
        return create_snapshot_id(self)


# ExternalInstigatorOrigin used to be called ExternalJobOrigin, before the concept of "job" was
# introduced in 0.12.0. For clarity, we changed the name of the namedtuple with `0.14.0`, but we
# need to maintain the serialized format in order to avoid changing the origin id that is stored in
# our schedule storage.  This registers the serialized ExternalJobOrigin named tuple class to be
# deserialized as an ExternalInstigatorOrigin, using its corresponding serializer for serdes.
register_serdes_tuple_fallbacks({"ExternalJobOrigin": ExternalInstigatorOrigin})


@whitelist_for_serdes
class ExternalPartitionSetOrigin(
    NamedTuple(
        "_PartitionSetOrigin",
        [("external_repository_origin", ExternalRepositoryOrigin), ("partition_set_name", str)],
    )
):
    """Serializable representation of an ExternalPartitionSet that can be used to
    uniquely it or reload it in across process boundaries.
    """

    def __new__(cls, external_repository_origin: ExternalRepositoryOrigin, partition_set_name: str):
        return super(ExternalPartitionSetOrigin, cls).__new__(
Esempio n. 5
0
        return DagsterEventType.ENGINE_EVENT.value, EngineEventData([])
    elif event_type_value == "ASSET_STORE_OPERATION":
        if event_specific_data.op in ("GET_ASSET",
                                      AssetStoreOperationType.GET_ASSET):
            return (
                DagsterEventType.LOADED_INPUT.value,
                LoadedInputData(event_specific_data.output_name,
                                event_specific_data.asset_store_key),
            )
        if event_specific_data.op in ("SET_ASSET",
                                      AssetStoreOperationType.SET_ASSET):
            return (
                DagsterEventType.HANDLED_OUTPUT.value,
                HandledOutputData(event_specific_data.output_name,
                                  event_specific_data.asset_store_key),
            )
    else:
        return event_type_value, event_specific_data


register_serdes_tuple_fallbacks({
    "PipelineProcessStartedData":
    None,
    "PipelineProcessExitedData":
    None,
    "PipelineProcessStartData":
    None,
    "AssetStoreOperationData":
    AssetStoreOperationData,
})