def __init__(self, dag: DAG): self.dag_id = dag.dag_id self.fileloc = dag.full_filepath self.fileloc_hash = DagCode.dag_fileloc_hash(self.fileloc) self.data = SerializedDAG.to_dict(dag) self.last_updated = timezone.utcnow() self.dag_hash = hashlib.md5(json.dumps(self.data, sort_keys=True).encode("utf-8")).hexdigest()
def __init__(self, dag: DAG): self.dag_id = dag.dag_id self.fileloc = dag.full_filepath self.fileloc_hash = DagCode.dag_fileloc_hash(self.fileloc) self.data = SerializedDAG.to_dict(dag) self.last_updated = timezone.utcnow() dag_event_deps = DagEventDependencies(dag) self.event_relationships = DagEventDependencies.to_json(dag_event_deps) self.dag_hash = hashlib.md5( json.dumps(self.data, sort_keys=True).encode("utf-8")).hexdigest()
def __init__(self, dag: DAG): self.dag_id = dag.dag_id self.fileloc = dag.fileloc self.fileloc_hash = DagCode.dag_fileloc_hash(self.fileloc) self.last_updated = timezone.utcnow() dag_data = SerializedDAG.to_dict(dag) dag_data_json = json.dumps(dag_data, sort_keys=True).encode("utf-8") self.dag_hash = hashlib.md5(dag_data_json).hexdigest() if COMPRESS_SERIALIZED_DAGS: self._data = None self._data_compressed = zlib.compress(dag_data_json) else: self._data = dag_data self._data_compressed = None # serve as cache so no need to decompress and load, when accessing data field # when COMPRESS_SERIALIZED_DAGS is True self.__data_cache = dag_data
def to_json(cls, var: Union[DAG, BaseOperator, dict, list, set, tuple]) -> str: """Stringifies DAGs and operators contained by var and returns a JSON string of var.""" return json.dumps(cls.to_dict(var), ensure_ascii=True)
def is_jsonable(x): try: json.dumps(x) return True except (TypeError, OverflowError): return False
def _config_to_json(config: Config) -> str: """Convert a Config object to a JSON formatted string""" return json.dumps(config_schema.dump(config), indent=4)