def test_serialize_event_handler(self): from airflow.operators.dummy import DummyOperator from airflow.contrib.jobs.event_handlers import StartEventHandler, AIFlowHandler from notification_service.base_notification import BaseEvent from airflow.executors.scheduling_action import SchedulingAction # event = BaseEvent(key='k', value='v') # op = DummyOperator(task_id='dummy', event_handler=StartEventHandler()) # encoded_op = SerializedBaseOperator.serialize_operator(op) # deserialized_op = SerializedBaseOperator.deserialize_operator(encoded_op) # event_handler = deserialized_op.get_events_handler() # assert type(event_handler) == StartEventHandler # assert event_handler.handle_event(event, None)[0] == SchedulingAction.START # # op = DummyOperator(task_id='dummy') # encoded_op = SerializedBaseOperator.serialize_operator(op) # deserialized_op = SerializedBaseOperator.deserialize_operator(encoded_op) # event_handler = deserialized_op.get_events_handler() # assert event_handler is None configs = '[{"__af_object_type__": "jsonable", "__class__": "MetConfig", "__module__": "ai_flow.graph.edge", "action": "START", "condition": "NECESSARY", "event_key": "key_1", "event_type": "UNDEFINED", "event_value": "value_1", "life": "ONCE", "namespace": "default", "value_condition": "EQUAL"}]' op = DummyOperator(task_id='dummy', event_handler=AIFlowHandler(config=configs)) encoded_op = SerializedBaseOperator.serialize_operator(op) deserialized_op = SerializedBaseOperator.deserialize_operator( encoded_op) event_handler = deserialized_op.get_events_handler() assert type(event_handler) == AIFlowHandler
def check_task_group(node): try: children = node.children.values() except AttributeError: # Round-trip serialization and check the result expected_serialized = SerializedBaseOperator.serialize_operator(dag.get_task(node.task_id)) expected_deserialized = SerializedBaseOperator.deserialize_operator(expected_serialized) expected_dict = SerializedBaseOperator.serialize_operator(expected_deserialized) assert node assert SerializedBaseOperator.serialize_operator(node) == expected_dict return for child in children: check_task_group(child)
def test_serialized_external_task_marker(self): dag = DAG('test_serialized_external_task_marker', start_date=DEFAULT_DATE) task = ExternalTaskMarker( task_id="parent_task", external_dag_id="external_task_marker_child", external_task_id="child_task1", dag=dag, ) serialized_op = SerializedBaseOperator.serialize_operator(task) deserialized_op = SerializedBaseOperator.deserialize_operator(serialized_op) self.assertEqual(deserialized_op.task_type, 'ExternalTaskMarker') self.assertEqual(getattr(deserialized_op, 'external_dag_id'), 'external_task_marker_child') self.assertEqual(getattr(deserialized_op, 'external_task_id'), 'child_task1')
def test_operator_subclass_changing_base_defaults(self): assert BaseOperator(task_id='dummy').do_xcom_push is True, \ "Precondition check! If this fails the test won't make sense" class MyOperator(BaseOperator): def __init__(self, do_xcom_push=False, **kwargs): super().__init__(**kwargs) self.do_xcom_push = do_xcom_push op = MyOperator(task_id='dummy') assert op.do_xcom_push is False blob = SerializedBaseOperator.serialize_operator(op) serialized_op = SerializedBaseOperator.deserialize_operator(blob) assert serialized_op.do_xcom_push is False
def test_serialize_sensor(self, mode, expect_custom_deps): from airflow.sensors.base import BaseSensorOperator class DummySensor(BaseSensorOperator): def poke(self, context): return False op = DummySensor(task_id='dummy', mode=mode, poke_interval=23) blob = SerializedBaseOperator.serialize_operator(op) if expect_custom_deps: assert "deps" in blob else: assert "deps" not in blob serialized_op = SerializedBaseOperator.deserialize_operator(blob) assert op.deps == serialized_op.deps