def test_event_op_dag_read_write(self):
        class TestHandler(EventMetHandler):
            def met(self, ti: TaskInstance, ts: TaskState) -> TaskAction:
                return TaskAction.START

        now = timezone.utcnow()
        dag_id = 'test_add_taskstate_0'
        dag = DAG(dag_id=dag_id, start_date=now)
        task0 = DummyOperator(task_id='backfill_task_0', owner='test', dag=dag)
        task0.add_event_dependency('key', "EVENT")
        task0.set_event_met_handler(TestHandler())
        SDM.write_dag(dag)
        with db.create_session() as session:
            sdag = session.query(SDM).first()
            dag = SerializedDAG.from_dict(sdag.data)
        self.assertEqual(dag_id, dag.dag_id)
        self.assertEqual(
            1, len(dag.task_dict["backfill_task_0"].event_dependencies()))
                               dag=dag1,
                               owner='airflow',
                               uri="localhost:{0}".format(port),
                               event=Event(key="key_1", value="value_1"))

dag1_task2 = DummyOperator(task_id='dummy_2', dag=dag1, owner='airflow')

dag1_task3 = SendEventOperator(task_id='event_3',
                               dag=dag1,
                               owner='airflow',
                               uri="localhost:{0}".format(port),
                               event=Event(key="key_1", value="value_2"))

dag1_task4 = DummyOperator(task_id='dummy_4', dag=dag1, owner='airflow')

dag1_task5 = DummyOperator(task_id='dummy_5', dag=dag1, owner='airflow')

dag1_stop_task = SendEventOperator(task_id='event_5',
                                   dag=dag1,
                                   owner='airflow',
                                   uri="localhost:{0}".format(port),
                                   event=StopSchedulerCMDEvent())

dag1_task2.add_event_dependency("key_1", "UNDEFINED")
dag1_task2.set_event_met_handler(RestartEventMetHandler())

dag1_task2.set_downstream(dag1_task3)
dag1_task3.set_downstream(dag1_task4)
dag1_task4.set_downstream(dag1_task5)
dag1_stop_task.set_upstream(dag1_task5)
예제 #3
0
          schedule_interval="@once")
op_0 = DummyOperator(task_id='0_job', dag=dag)
op_1 = DummyOperator(task_id='1_job', dag=dag)
op_2 = SendEventOperator(task_id='2_job',
                         dag=dag,
                         uri='localhost:50051',
                         event=Event(key='key_1',
                                     value='value_1',
                                     event_type='UNDEFINED'))
op_3 = SendEventOperator(task_id='3_job',
                         dag=dag,
                         uri='localhost:50051',
                         event=Event(key='key_2',
                                     value='value_2',
                                     event_type='UNDEFINED'))
op_4 = DummyOperator(task_id='4_job', dag=dag)
op_5 = SendEventOperator(task_id='5_job',
                         dag=dag,
                         uri='localhost:50051',
                         event=Event(key='key_2',
                                     value='value_2',
                                     event_type='STOP_SCHEDULER_CMD'))
op_2.set_upstream(op_0)
op_2.set_upstream(op_1)
op_4.add_event_dependency('key_1', 'UNDEFINED')
op_4.add_event_dependency('key_2', 'UNDEFINED')
configs_op_4 = '[{"__af_object_type__": "jsonable", "__class__": "MetConfig", "__module__": "ai_flow.graph.edge", "action": "START", "condition": "NECESSARY", "event_key": "key_1", "event_type": "UNDEFINED", "event_value": "value_1", "life": "ONCE", "value_condition": "EQUAL"}, {"__af_object_type__": "jsonable", "__class__": "MetConfig", "__module__": "ai_flow.graph.edge", "action": "START", "condition": "NECESSARY", "event_key": "key_2", "event_type": "UNDEFINED", "event_value": "value_2", "life": "ONCE", "value_condition": "EQUAL"}]'

op_4.set_event_met_handler(AIFlowMetHandler(configs_op_4))
op_5.set_upstream(op_4)