def dag(self): """The DAG deserialized from the ``data`` column""" from airflow.serialization import SerializedDAG # noqa # pylint: disable=redefined-outer-name if isinstance(self.data, dict): dag = SerializedDAG.from_dict(self.data) # type: Any else: # noinspection PyTypeChecker dag = SerializedDAG.from_json(self.data) return dag
def test_deserialization(self): """A serialized DAG can be deserialized in another process.""" queue = multiprocessing.Queue() proc = multiprocessing.Process(target=serialize_subprocess, args=(queue, )) proc.daemon = True proc.start() stringified_dags = {} while True: v = queue.get() if v is None: break dag = SerializedDAG.from_json(v) self.assertTrue(isinstance(dag, DAG)) stringified_dags[dag.dag_id] = dag dags = collect_dags() self.assertTrue(set(stringified_dags.keys()) == set(dags.keys())) # Verify deserialized DAGs. example_skip_dag = stringified_dags['example_skip_dag'] skip_operator_1_task = example_skip_dag.task_dict['skip_operator_1'] self.validate_deserialized_task(skip_operator_1_task, 'DummySkipOperator', '#e8b7e4', '#000') # Verify that the DAG object has 'full_filepath' attribute # and is equal to fileloc self.assertTrue(hasattr(example_skip_dag, 'full_filepath')) self.assertEqual(example_skip_dag.full_filepath, example_skip_dag.fileloc) example_subdag_operator = stringified_dags['example_subdag_operator'] section_1_task = example_subdag_operator.task_dict['section-1'] self.validate_deserialized_task(section_1_task, SubDagOperator.__name__, SubDagOperator.ui_color, SubDagOperator.ui_fgcolor) simple_dag = stringified_dags['simple_dag'] custom_task = simple_dag.task_dict['custom_task'] self.validate_operator_extra_links(custom_task)