def construct_json_event_logger(json_path): """Record a stream of event records to json""" check.str_param(json_path, "json_path") return construct_single_handler_logger( "json-event-record-logger", "debug", JsonEventLoggerHandler( json_path, lambda record: construct_event_record( StructuredLoggerMessage( name=record.name, message=record.msg, level=record.levelno, meta=record.dagster_meta, record=record, )), ), ) register_serdes_tuple_fallbacks({ # These were originally distinguished from each other but ended up being empty subclasses # of EventLogEntry -- instead of using the subclasses we were relying on # EventLogEntry.is_dagster_event to distinguish events that originate in the logging # machinery from events that are yielded by user code "DagsterEventRecord": EventLogEntry, "LogMessageRecord": EventLogEntry, # renamed EventRecord -> EventLogEntry "EventRecord": EventLogEntry, })
cls, last_tick_timestamp: Optional[float] = None, last_run_key: Optional[str] = None, min_interval: Optional[int] = None, cursor: Optional[str] = None, ): return super(SensorInstigatorData, cls).__new__( cls, check.opt_float_param(last_tick_timestamp, "last_tick_timestamp"), check.opt_str_param(last_run_key, "last_run_key"), check.opt_int_param(min_interval, "min_interval"), check.opt_str_param(cursor, "cursor"), ) register_serdes_tuple_fallbacks({"SensorJobData": SensorInstigatorData}) # for internal backcompat SensorJobData = SensorInstigatorData @whitelist_for_serdes class ScheduleInstigatorData( NamedTuple( "_ScheduleInstigatorData", [("cron_schedule", str), ("start_timestamp", Optional[float])] ) ): # removed scheduler, 1/5/2022 (0.13.13) def __new__(cls, cron_schedule: str, start_timestamp: Optional[float] = None): return super(ScheduleInstigatorData, cls).__new__( cls, check.str_param(cron_schedule, "cron_schedule"),
"parent" in dict_repr, "Dict representation of NodeHandle must have a 'parent' key" ) if isinstance(dict_repr["parent"], (list, tuple)): dict_repr["parent"] = NodeHandle.from_dict( { "name": dict_repr["parent"][0], "parent": dict_repr["parent"][1], } ) return NodeHandle(**{k: dict_repr[k] for k in ["name", "parent"]}) # previous name for NodeHandle was SolidHandle register_serdes_tuple_fallbacks({"SolidHandle": NodeHandle}) class SolidInputHandle( NamedTuple("_SolidInputHandle", [("solid", Node), ("input_def", InputDefinition)]) ): def __new__(cls, solid: Node, input_def: InputDefinition): return super(SolidInputHandle, cls).__new__( cls, check.inst_param(solid, "solid", Node), check.inst_param(input_def, "input_def", InputDefinition), ) def _inner_str(self) -> str: return struct_to_string( "SolidInputHandle",
def test_namedtuple_backcompat(): old_map = WhitelistMap.create() @_whitelist_for_serdes(whitelist_map=old_map) class OldThing(NamedTuple): old_name: str def get_id(self): json_rep = _serialize_dagster_namedtuple(self, whitelist_map=old_map) return hash_str(json_rep) # create the old things old_thing = OldThing("thing") old_thing_id = old_thing.get_id() old_thing_serialized = _serialize_dagster_namedtuple(old_thing, old_map) new_map = WhitelistMap.create() class ThingSerializer(DefaultNamedTupleSerializer): @classmethod def value_from_storage_dict(cls, storage_dict, klass, args_for_class, whitelist_map, descent_path): raw_dict = { key: unpack_inner_value(value, whitelist_map, f"{descent_path}.{key}") for key, value in storage_dict.items() } # typical pattern is to use the same serialization format from an old field and passing # it in as a new field return klass( **{ key: value for key, value in raw_dict.items() if key in args_for_class }, new_name=raw_dict.get("old_name"), ) @classmethod def value_to_storage_dict( cls, value, whitelist_map, descent_path, ): storage = super().value_to_storage_dict(value, whitelist_map, descent_path) name = storage.get("new_name") or storage.get("old_name") if "new_name" in storage: del storage["new_name"] # typical pattern is to use the same serialization format # it in as a new field storage["old_name"] = name storage["__class__"] = "OldThing" # persist using old class name return storage @_whitelist_for_serdes(whitelist_map=new_map, serializer=ThingSerializer) class NewThing(NamedTuple): new_name: str def get_id(self): json_rep = _serialize_dagster_namedtuple(self, whitelist_map=new_map) return hash_str(json_rep) # exercising the old serialization format register_serdes_tuple_fallbacks({"OldThing": NewThing}, whitelist_map=new_map) new_thing = NewThing("thing") new_thing_id = new_thing.get_id() new_thing_serialized = _serialize_dagster_namedtuple(new_thing, new_map) assert new_thing_id == old_thing_id assert new_thing_serialized == old_thing_serialized # ensure that the new serializer can correctly interpret old serialized data old_thing_deserialized = _deserialize_json(old_thing_serialized, new_map) assert isinstance(old_thing_deserialized, NewThing) assert old_thing_deserialized.get_id() == new_thing_id # ensure that the new things serialized can still be read by old code new_thing_deserialized = _deserialize_json(new_thing_serialized, old_map) assert isinstance(new_thing_deserialized, OldThing) assert new_thing_deserialized.get_id() == old_thing_id
try: obj = deserialize_json_to_dagster_namedtuple(json_str) return isinstance(obj, RunStatusSensorCursor) except (JSONDecodeError, DeserializationError): return False def to_json(self) -> str: return serialize_dagster_namedtuple(cast(NamedTuple, self)) @staticmethod def from_json(json_str: str) -> tuple: return deserialize_json_to_dagster_namedtuple(json_str) # handle backcompat register_serdes_tuple_fallbacks({"PipelineSensorCursor": RunStatusSensorCursor}) class RunStatusSensorContext( NamedTuple( "_RunStatusSensorContext", [ ("sensor_name", str), ("dagster_run", DagsterRun), ("dagster_event", DagsterEvent), ("instance", DagsterInstance), ], ) ): """The ``context`` object available to a decorated function of ``run_status_sensor``.