def run_ai_flow_function(self): client = NotificationClient(server_uri="localhost:{}".format( self.port), default_namespace="default", sender='1-job-name') while True: with create_session() as session: tes = session.query(TaskExecution).filter( TaskExecution.dag_id == 'workflow_1', TaskExecution.task_id == '1-job-name').all() if len(tes) > 0: time.sleep(5) client.send_event( BaseEvent(key='key_1', value='value_1', event_type='UNDEFINED')) client.send_event( BaseEvent(key='key_2', value='value_2', event_type='UNDEFINED')) while True: with create_session() as session_2: tes_2 = session_2.query(TaskExecution).filter( TaskExecution.dag_id == 'workflow_1').all() if len(tes_2) == 3: break else: time.sleep(1) break else: time.sleep(1) time.sleep(3) client.send_event(StopSchedulerEvent(job_id=0).to_event())
def run_event_task_function(self): client = NotificationClient(server_uri="localhost:{}".format( self.port), default_namespace="") while True: with create_session() as session: tes = session.query(TaskExecution).filter( TaskExecution.dag_id == 'event_dag', TaskExecution.task_id == 'task_1').all() if len(tes) > 0: time.sleep(5) client.send_event( BaseEvent(key='start', value='', event_type='', namespace='')) while True: with create_session() as session_2: tes_2 = session_2.query(TaskExecution).filter( TaskExecution.dag_id == 'event_dag', TaskExecution.task_id == 'task_2').all() if len(tes_2) > 0: break else: time.sleep(1) break else: time.sleep(1) client.send_event(StopSchedulerEvent(job_id=0).to_event())
def run_trigger_task_function(self): # waiting parsed dag file done, time.sleep(5) ns_client = NotificationClient(server_uri="localhost:{}".format( self.port), default_namespace="a") client = EventSchedulerClient(ns_client=ns_client) execution_context = client.schedule_dag('trigger_task') while True: with create_session() as session: tes = session.query(TaskExecution).filter( TaskExecution.dag_id == 'trigger_task', TaskExecution.task_id == 'task_1').all() if len(tes) > 0: client.schedule_task('trigger_task', 'task_2', SchedulingAction.START, execution_context) while True: with create_session() as session_2: tes_2 = session_2.query(TaskExecution).filter( TaskExecution.dag_id == 'trigger_task', TaskExecution.task_id == 'task_2').all() if len(tes_2) > 0: break else: time.sleep(1) break else: time.sleep(1) ns_client.send_event(StopSchedulerEvent(job_id=0).to_event())
def run_test_fun(): time.sleep(3) client = NotificationClient(server_uri="localhost:{}".format(server_port()), default_namespace="test") try: test_function(client) except Exception as e: raise e finally: client.send_event(StopSchedulerEvent(job_id=0).to_event())
def run_periodic_task_function(self): while True: with create_session() as session: tes = session.query(TaskExecution).filter( TaskExecution.dag_id == 'single', TaskExecution.task_id == 'task_1').all() if len(tes) > 1: break else: time.sleep(1) self.client.send_event(StopSchedulerEvent(job_id=0).to_event())
def run_no_dag_file_function(self): ns_client = NotificationClient(server_uri="localhost:{}".format( self.port), default_namespace="") client = EventSchedulerClient(ns_client=ns_client) with create_session() as session: client.trigger_parse_dag() result = client.schedule_dag('no_dag') print('result {}'.format(result.dagrun_id)) time.sleep(5) ns_client.send_event(StopSchedulerEvent(job_id=0).to_event())
def schedule_with_task_status_change_check(self): self.wait_for_running() while True: with create_session() as session: tis = session.query(TaskExecution).filter( TaskExecution.dag_id == 'schedule_on_state', TaskExecution.state == State.SUCCESS).all() if len(tis) >= 2: break else: time.sleep(1) self.client.send_event(StopSchedulerEvent(job_id=0).to_event())
def run_one_task_function(self): self.wait_for_running() self.client.send_event(BaseEvent(key='a', value='a')) time.sleep(5) self.client.send_event(BaseEvent(key='a', value='a')) while True: with create_session() as session: tes = session.query(TaskExecution).filter( TaskExecution.dag_id == 'single', TaskExecution.task_id == 'task_1').all() if len(tes) >= 2: break else: time.sleep(1) self.client.send_event(StopSchedulerEvent(job_id=0).to_event())
def schedule_task_function(self): stopped = False while not stopped: with create_session() as session: ti_sleep_1000_secs = self._get_task_instance( EVENT_BASED_SCHEDULER_DAG, 'sleep_1000_secs', session) ti_python_sleep = self._get_task_instance( EVENT_BASED_SCHEDULER_DAG, 'python_sleep', session) if ti_sleep_1000_secs and ti_sleep_1000_secs.state == State.SCHEDULED and \ ti_python_sleep and ti_python_sleep.state == State.SCHEDULED: self.client.send_event( BaseEvent(key='start', value='', event_type='', namespace='test_namespace')) while not stopped: ti_sleep_1000_secs.refresh_from_db() ti_python_sleep.refresh_from_db() if ti_sleep_1000_secs and ti_sleep_1000_secs.state == State.RUNNING and \ ti_python_sleep and ti_python_sleep.state == State.RUNNING: time.sleep(10) break else: time.sleep(1) self.client.send_event( BaseEvent(key='stop', value='', event_type=UNDEFINED_EVENT_TYPE, namespace='test_namespace')) self.client.send_event( BaseEvent(key='restart', value='', event_type=UNDEFINED_EVENT_TYPE, namespace='test_namespace')) while not stopped: ti_sleep_1000_secs.refresh_from_db() ti_python_sleep.refresh_from_db() if ti_sleep_1000_secs and ti_sleep_1000_secs.state == State.KILLED and \ ti_python_sleep and ti_python_sleep.state == State.RUNNING: stopped = True else: time.sleep(1) else: time.sleep(1) self.client.send_event(StopSchedulerEvent(job_id=0).to_event())
def run_trigger_dag_function(self): ns_client = NotificationClient(server_uri="localhost:{}".format( self.port), default_namespace="") client = EventSchedulerClient(ns_client=ns_client) while True: with create_session() as session: tes = session.query(TaskExecution).filter( TaskExecution.dag_id == 'trigger_dag', TaskExecution.task_id == 'task_1').all() if len(tes) > 0: break else: client.trigger_parse_dag() result = client.schedule_dag('trigger_dag') print('result {}'.format(result.dagrun_id)) time.sleep(5) ns_client.send_event(StopSchedulerEvent(job_id=0).to_event())
def stop_dag_function(self): stopped = False while not stopped: tes = self.get_task_execution(EVENT_BASED_SCHEDULER_DAG, 'sleep_to_be_stopped') if tes and len(tes) == 1: self.client.send_event( StopDagEvent(EVENT_BASED_SCHEDULER_DAG).to_event()) while not stopped: tes2 = self.get_task_execution(EVENT_BASED_SCHEDULER_DAG, 'sleep_to_be_stopped') if tes2[0].state == State.KILLED: stopped = True time.sleep(5) else: time.sleep(1) else: time.sleep(1) self.client.send_event(StopSchedulerEvent(job_id=0).to_event())
def run_airflow_dag_function(self): # waiting parsed dag file done from datetime import datetime ns_client = NotificationClient(server_uri='localhost:50051') with af.global_config_file(test_util.get_workflow_config_file()): with af.config('task_1'): cmd_executor = af.user_define_operation( output_num=0, executor=CmdExecutor(cmd_line=['echo "hello world!"'])) af.deploy_to_airflow(test_util.get_project_path(), dag_id='test_dag_111', default_args={ 'schedule_interval': None, 'start_date': datetime(2025, 12, 1), }) context = af.run(project_path=test_util.get_project_path(), dag_id='test_dag_111', scheduler_type=SchedulerType.AIRFLOW) print(context.dagrun_id) time.sleep(5) ns_client.send_event(StopSchedulerEvent(job_id=0).to_event())
def stop(self) -> None: self.mailbox.send_message(StopSchedulerEvent(self.id).to_event()) self.log.info("Send stop event to the scheduler.")