def _execute(self): """ 1. Init the DagRun route. 2. Start the executor. 3. Option of start the notification master. 4. Create the notification client. 5. Start the DagTrigger. 6. Run the scheduler event loop. :return: """ notification_client = None try: self._init_route() self.executor.set_use_nf(True) self.executor.start() self.dag_trigger = DagTrigger( subdir=self.subdir, mailbox=self.mail_box, run_duration=self.run_duration, using_sqlite=self.using_sqlite, num_runs=self.num_runs, processor_poll_interval=self._processor_poll_interval) if self.use_local_nf: self.notification_master \ = NotificationMaster(service=NotificationService(EventModelStorage()), port=self.nf_port) self.notification_master.run() self.log.info("start notification service {0}".format( self.nf_port)) notification_client = NotificationClient( server_uri="localhost:{0}".format(self.nf_port)) else: notification_client \ = NotificationClient(server_uri="{0}:{1}".format(self.nf_host, self.nf_port)) notification_client.start_listen_events( watcher=SCEventWatcher(self.mail_box)) self.dag_trigger.start() self._start_executor_heartbeat() self._run_event_loop() except Exception as e: self.log.exception("Exception when executing _execute {0}".format( str(e))) finally: self.running = False self._stop_executor_heartheat() if self.dag_trigger is not None: self.dag_trigger.stop() if notification_client is not None: notification_client.stop_listen_events() if self.notification_master is not None: self.notification_master.stop() self.executor.end() self.log.info("Exited execute event scheduler")
def __init__(self, store_uri, server_uri, notification_uri=None): db_engine = extract_db_engine_from_uri(store_uri) if DBType.value_of(db_engine) == DBType.MONGODB: username, password, host, port, db = parse_mongo_uri(store_uri) self.model_repo_store = MongoStore(host=host, port=int(port), username=username, password=password, db=db) else: self.model_repo_store = SqlAlchemyStore(store_uri) if notification_uri is None: self.notification_client = NotificationClient(server_uri) else: self.notification_client = NotificationClient(notification_uri)
def __init__(self, dag_directory, server_uri=None, max_runs=-1, refresh_dag_dir_interval=conf.getint( 'scheduler', 'refresh_dag_dir_interval', fallback=30), *args, **kwargs): super().__init__(*args, **kwargs) self.mailbox: Mailbox = Mailbox() self.dag_trigger: DagTrigger = DagTrigger( dag_directory=dag_directory, max_runs=max_runs, dag_ids=None, pickle_dags=False, mailbox=self.mailbox, refresh_dag_dir_interval=refresh_dag_dir_interval, notification_service_uri=server_uri) self.task_event_manager = DagRunEventManager(self.mailbox) self.executor.set_mailbox(self.mailbox) self.notification_client: NotificationClient = NotificationClient( server_uri=server_uri, default_namespace=SCHEDULER_NAMESPACE) self.scheduler: EventBasedScheduler = EventBasedScheduler( self.id, self.mailbox, self.task_event_manager, self.executor, self.notification_client) self.last_scheduling_id = self._last_scheduler_job_id()
def run_event_task_function(self): client = NotificationClient(server_uri="localhost:{}".format( self.port), default_namespace="") while True: with create_session() as session: tes = session.query(TaskExecution).filter( TaskExecution.dag_id == 'event_dag', TaskExecution.task_id == 'task_1').all() if len(tes) > 0: time.sleep(5) client.send_event( BaseEvent(key='start', value='', event_type='', namespace='')) while True: with create_session() as session_2: tes_2 = session_2.query(TaskExecution).filter( TaskExecution.dag_id == 'event_dag', TaskExecution.task_id == 'task_2').all() if len(tes_2) > 0: break else: time.sleep(1) break else: time.sleep(1) client.send_event(StopSchedulerEvent(job_id=0).to_event())
def run_trigger_task_function(self): # waiting parsed dag file done, time.sleep(5) ns_client = NotificationClient(server_uri="localhost:{}".format( self.port), default_namespace="a") client = EventSchedulerClient(ns_client=ns_client) execution_context = client.schedule_dag('trigger_task') while True: with create_session() as session: tes = session.query(TaskExecution).filter( TaskExecution.dag_id == 'trigger_task', TaskExecution.task_id == 'task_1').all() if len(tes) > 0: client.schedule_task('trigger_task', 'task_2', SchedulingAction.START, execution_context) while True: with create_session() as session_2: tes_2 = session_2.query(TaskExecution).filter( TaskExecution.dag_id == 'trigger_task', TaskExecution.task_id == 'task_2').all() if len(tes_2) > 0: break else: time.sleep(1) break else: time.sleep(1) ns_client.send_event(StopSchedulerEvent(job_id=0).to_event())
def run_ai_flow_function(self): client = NotificationClient(server_uri="localhost:{}".format( self.port), default_namespace="default", sender='1-job-name') while True: with create_session() as session: tes = session.query(TaskExecution).filter( TaskExecution.dag_id == 'workflow_1', TaskExecution.task_id == '1-job-name').all() if len(tes) > 0: time.sleep(5) client.send_event( BaseEvent(key='key_1', value='value_1', event_type='UNDEFINED')) client.send_event( BaseEvent(key='key_2', value='value_2', event_type='UNDEFINED')) while True: with create_session() as session_2: tes_2 = session_2.query(TaskExecution).filter( TaskExecution.dag_id == 'workflow_1').all() if len(tes_2) == 3: break else: time.sleep(1) break else: time.sleep(1) time.sleep(3) client.send_event(StopSchedulerEvent(job_id=0).to_event())
def setUpClass(cls): kwargs = { "host": "127.0.0.1", "port": 27017, "db": "test" } cls.storage = MongoEventStorage(**kwargs) cls.master = NotificationMaster(NotificationService(cls.storage)) cls.master.run() cls.client = NotificationClient(server_uri="localhost:50051")
def _send_task_status_change_event(self): task_status_changed_event = TaskStateChangedEvent( self.task_instance.task_id, self.task_instance.dag_id, self.task_instance.execution_date, self.task_instance.state) event = task_status_changed_event.to_event() client = NotificationClient(self.server_uri, default_namespace=event.namespace, sender=event.sender) self.log.info("LocalTaskJob sending event: {}".format(event)) client.send_event(event)
def run_test_fun(): time.sleep(3) client = NotificationClient(server_uri="localhost:{}".format(server_port()), default_namespace="test") try: test_function(client) except Exception as e: raise e finally: client.send_event(StopSchedulerEvent(job_id=0).to_event())
def run_no_dag_file_function(self): ns_client = NotificationClient(server_uri="localhost:{}".format( self.port), default_namespace="") client = EventSchedulerClient(ns_client=ns_client) with create_session() as session: client.trigger_parse_dag() result = client.schedule_dag('no_dag') print('result {}'.format(result.dagrun_id)) time.sleep(5) ns_client.send_event(StopSchedulerEvent(job_id=0).to_event())
def wait_for_master_started(cls, server_uri="localhost:50051"): last_exception = None for i in range(100): try: return NotificationClient(server_uri=server_uri, enable_ha=True) except Exception as e: time.sleep(10) last_exception = e raise Exception("The server %s is unavailable." % server_uri) from last_exception
def execute(self, function_context: FunctionContext, input_list: List) -> List: from notification_service.client import NotificationClient client = NotificationClient(server_uri="localhost:{}".format( self.port), default_namespace="default", sender=self.sender) client.send_event( BaseEvent(key=self.key, value=self.value, event_type=self.event_type)) return []
def change_state(self, key, state): self.log.debug("Changing state: %s %s", key, state) self.running.pop(key, None) if self.use_nf: if self.client is None: self.client: NotificationClient = NotificationClient( server_uri="{0}:{1}".format(self.nf_host, self.nf_port)) dag_id, task_id, execution_date, try_number = key self.client.send_event(TaskStatusEvent( task_instance_key=TaskInstanceHelper.to_task_key(dag_id, task_id, execution_date), status=TaskInstanceHelper.to_event_value(state, try_number))) else: self.event_buffer[key] = state
def _send_request_and_receive_response(self, server_uri, file_path): key = '{}_{}'.format(file_path, time.time_ns()) client = NotificationClient(server_uri=server_uri, default_namespace=SCHEDULER_NAMESPACE) event = BaseEvent(key=key, event_type=SchedulerInnerEventType.PARSE_DAG_REQUEST.value, value=file_path) client.send_event(event) watcher: ResponseWatcher = ResponseWatcher() client.start_listen_event(key=key, event_type=SchedulerInnerEventType.PARSE_DAG_RESPONSE.value, watcher=watcher) res: BaseEvent = watcher.get_result() self.assertEquals(event.key, res.key) self.assertEquals(event.value, file_path)
def stop_workflow(self, workflow_name) -> bool: """ Stop the workflow. No more workflow execution(Airflow dag_run) would be scheduled and all running jobs would be stopped. :param workflow_name: workflow name :return: True if succeed """ # TODO For now, simply return True as long as message is sent successfully, # actually we need a response from try: notification_client = NotificationClient(self.server_uri, SCHEDULER_NAMESPACE) notification_client.send_event( StopDagEvent(workflow_name).to_event()) return True except Exception: return False
def setUp(self): db.clear_db_jobs() db.clear_db_dags() db.clear_db_serialized_dags() db.clear_db_runs() db.clear_db_task_execution() db.clear_db_message() self.scheduler = None self.port = 50102 self.storage = MemoryEventStorage() self.master = NotificationMaster(NotificationService(self.storage), self.port) self.master.run() self.client = NotificationClient(server_uri="localhost:{}".format( self.port), default_namespace="test_namespace") time.sleep(1)
def run_trigger_dag_function(self): ns_client = NotificationClient(server_uri="localhost:{}".format( self.port), default_namespace="") client = EventSchedulerClient(ns_client=ns_client) while True: with create_session() as session: tes = session.query(TaskExecution).filter( TaskExecution.dag_id == 'trigger_dag', TaskExecution.task_id == 'task_1').all() if len(tes) > 0: break else: client.trigger_parse_dag() result = client.schedule_dag('trigger_dag') print('result {}'.format(result.dagrun_id)) time.sleep(5) ns_client.send_event(StopSchedulerEvent(job_id=0).to_event())
def run_airflow_dag_function(self): # waiting parsed dag file done from datetime import datetime ns_client = NotificationClient(server_uri='localhost:50051') with af.global_config_file(test_util.get_workflow_config_file()): with af.config('task_1'): cmd_executor = af.user_define_operation( output_num=0, executor=CmdExecutor(cmd_line=['echo "hello world!"'])) af.deploy_to_airflow(test_util.get_project_path(), dag_id='test_dag_111', default_args={ 'schedule_interval': None, 'start_date': datetime(2025, 12, 1), }) context = af.run(project_path=test_util.get_project_path(), dag_id='test_dag_111', scheduler_type=SchedulerType.AIRFLOW) print(context.dagrun_id) time.sleep(5) ns_client.send_event(StopSchedulerEvent(job_id=0).to_event())
def test_send_listening_on_different_server(self): event_list = [] class TestWatch(EventWatcher): def __init__(self, event_list) -> None: super().__init__() self.event_list = event_list def process(self, events: List[BaseEvent]): self.event_list.extend(events) self.master2 = self.start_master("localhost", "50052") self.wait_for_new_members_detected("localhost:50052") another_client = NotificationClient(server_uri="localhost:50052") try: event1 = another_client.send_event(BaseEvent(key="key1", value="value1")) self.client.start_listen_events(watcher=TestWatch(event_list), version=event1.version) another_client.send_event(BaseEvent(key="key2", value="value2")) another_client.send_event(BaseEvent(key="key3", value="value3")) finally: self.client.stop_listen_events() self.assertEqual(2, len(event_list))
def setUp(self): self.storage.clean_up() self.client = NotificationClient(server_uri="localhost:50051")
def execute(self, context): notification_client = NotificationClient(server_uri=self.uri) notification_client.send_event(event=self.event)
def setUp(self): self.storage.clean_up() self.master1 = self.start_master("localhost", "50051") self.client = NotificationClient(server_uri="localhost:50051", enable_ha=True)
def setUpClass(cls): cls.storage = MemoryEventStorage() cls.master = NotificationMaster(NotificationService(cls.storage)) cls.master.run() cls.client = NotificationClient(server_uri="localhost:50051")
def setUpClass(cls): cls.master = NotificationMaster( NotificationService(EventModelStorage())) cls.master.run() cls.client = NotificationClient(server_uri="localhost:50051")
def __init__(self, server_uri=None, namespace=None, ns_client=None): if ns_client is None: self.ns_client = NotificationClient(server_uri, namespace) else: self.ns_client = ns_client
def test_start_with_multiple_servers(self): self.client.disable_high_availability() self.client = NotificationClient(server_uri="localhost:55001,localhost:50051", enable_ha=True) self.assertTrue(self.client.current_uri, "localhost:50051")
def __init__(self, store_uri, server_uri, notification_uri=None): self.model_repo_store = SqlAlchemyStore(store_uri) if notification_uri is None: self.notification_client = NotificationClient(server_uri) else: self.notification_client = NotificationClient(notification_uri)
def setUp(self): self.storage.clean_up() self.client = NotificationClient(server_uri="localhost:50052", enable_ha=True, list_member_interval_ms=1000, retry_timeout_ms=10000)
def setUp(self): clear_db_event_model() self.master = NotificationMaster(service=NotificationService(EventModelStorage())) self.master.run() self.client = NotificationClient(server_uri="localhost:50051")