예제 #1
0
 def run_trigger_task_function(self):
     # waiting parsed dag file done,
     time.sleep(5)
     ns_client = NotificationClient(server_uri="localhost:{}".format(
         self.port),
                                    default_namespace="a")
     client = EventSchedulerClient(ns_client=ns_client)
     execution_context = client.schedule_dag('trigger_task')
     while True:
         with create_session() as session:
             tes = session.query(TaskExecution).filter(
                 TaskExecution.dag_id == 'trigger_task',
                 TaskExecution.task_id == 'task_1').all()
             if len(tes) > 0:
                 client.schedule_task('trigger_task', 'task_2',
                                      SchedulingAction.START,
                                      execution_context)
                 while True:
                     with create_session() as session_2:
                         tes_2 = session_2.query(TaskExecution).filter(
                             TaskExecution.dag_id == 'trigger_task',
                             TaskExecution.task_id == 'task_2').all()
                         if len(tes_2) > 0:
                             break
                         else:
                             time.sleep(1)
                 break
             else:
                 time.sleep(1)
     ns_client.send_event(StopSchedulerEvent(job_id=0).to_event())
예제 #2
0
class AirflowOperation(object):
    def __init__(self,
                 notification_server_uri=None,
                 ns_client: NotificationClient = None):
        self.server_uri = notification_server_uri
        from airflow.contrib.jobs.event_based_scheduler_job import SCHEDULER_NAMESPACE
        self.namespace = SCHEDULER_NAMESPACE
        if ns_client is not None:
            from airflow.contrib.jobs.scheduler_client import EventSchedulerClient
            self.airflow_client = EventSchedulerClient(
                ns_client=ns_client, namespace=SCHEDULER_NAMESPACE)
        elif notification_server_uri is not None:
            from airflow.contrib.jobs.scheduler_client import EventSchedulerClient
            self.airflow_client = EventSchedulerClient(
                server_uri=notification_server_uri,
                namespace=SCHEDULER_NAMESPACE)
        else:
            raise Exception(
                'notification_server_uri and ns_client can not both empty!')

    def stop_workflow(self, workflow_name) -> bool:
        """
        Stop the workflow. No more workflow execution(Airflow dag_run) would be scheduled and all running jobs would be stopped.

        :param workflow_name: workflow name
        :return: True if succeed
        """
        # TODO For now, simply return True as long as message is sent successfully,
        #  actually we need a response from
        try:
            from airflow.contrib.jobs.event_based_scheduler_job import SCHEDULER_NAMESPACE
            notification_client = NotificationClient(self.server_uri,
                                                     SCHEDULER_NAMESPACE)
            from airflow.events.scheduler_events import StopDagEvent
            notification_client.send_event(
                StopDagEvent(workflow_name).to_event())
            return True
        except Exception:
            return False

    def suspend_workflow(workflow_name) -> bool:
        """
        Suspend the workflow. No more dag_run would be scheduled.

        :param workflow_name: workflow name
        :return: True if succeed
        """
        pass

    def resume_workflow(workflow_name) -> bool:
        """
        Resume a stopped workflow.

        :param workflow_name: workflow name
        :return: True if succeed
        """
        pass

    def trigger_workflow_execution(self, project_desc, workflow_name):
        """
        Trigger a new instance of workflow immediately.

        :param workflow_name: workflow name
        :param project_desc: project desc
        :return: True if a new instance is triggered
        """
        deploy_path = project_desc.project_config.get_airflow_deploy_path()
        if deploy_path is None:
            raise Exception("airflow_deploy_path config not set!")
        airflow_file_path = deploy_path + '/' + workflow_name + '.py'
        self.airflow_client.trigger_parse_dag(airflow_file_path)
        return self.airflow_client.schedule_dag(workflow_name)

    def stop_workflow_execution(self, workflow_name, context) -> bool:
        """
        Stop the specific workflow execution(Airflow dag_run)
        """
        result = self.airflow_client.stop_dag_run(dag_id=workflow_name,
                                                  context=context)
        if result and result.dagrun_id == context.dagrun_id:
            return True
        else:
            return False

    def start_task_instance(self, workflow_name, job_name, context) -> bool:
        """
        Force start a task. if it is running, do nothing.

        :param workflow_name: workflow name
        :param job_name: job name
        :param context: context of workflow instance
        :return: True if the task is started
        """
        from airflow.executors.scheduling_action import SchedulingAction

        result = self.airflow_client.schedule_task(
            dag_id=workflow_name,
            task_id=job_name,
            action=SchedulingAction.START,
            context=context)
        if result and result.dagrun_id == context.dagrun_id:
            return True
        else:
            return False

    def stop_task_instance(self, workflow_name, job_name, context) -> bool:
        """
        Force stop a running task

        :param workflow_name: workflow name
        :param job_name: job name
        :param context: context of workflow instance
        :return: True if the task is stopped
        """
        from airflow.executors.scheduling_action import SchedulingAction

        result = self.airflow_client.schedule_task(
            dag_id=workflow_name,
            task_id=job_name,
            action=SchedulingAction.STOP,
            context=context)
        if result and result.dagrun_id == context.dagrun_id:
            return True
        else:
            return False

    def restart_task_instance(self, workflow_name, job_name, context) -> bool:
        """
        Force restart a task

        :param workflow_name: workflow name
        :param job_name: job name
        :param context: context of workflow instance
        :return: True if the task is restarted
        """
        from airflow.executors.scheduling_action import SchedulingAction
        result = self.airflow_client.schedule_task(
            dag_id=workflow_name,
            task_id=job_name,
            action=SchedulingAction.RESTART,
            context=context)
        if result and result.dagrun_id == context.dagrun_id:
            return True
        else:
            return False

    def trigger_parse_dag(self, file_path):
        """Trigger a dag parse of specific file. """
        return self.airflow_client.trigger_parse_dag(file_path)
예제 #3
0
class AirflowOperation(object):
    def __init__(self,
                 notification_server_uri=None,
                 ns_client: NotificationClient = None):
        self.server_uri = notification_server_uri
        self.namespace = SCHEDULER_NAMESPACE
        if ns_client is not None:
            self.airflow_client = EventSchedulerClient(
                ns_client=ns_client, namespace=SCHEDULER_NAMESPACE)
        elif notification_server_uri is not None:
            self.airflow_client = EventSchedulerClient(
                server_uri=notification_server_uri,
                namespace=SCHEDULER_NAMESPACE)
        else:
            raise Exception(
                'notification_server_uri and ns_client can not both empty!')

    def stop_workflow(self, workflow_name) -> bool:
        """
        Stop the workflow. No more workflow execution(Airflow dag_run) would be scheduled and all running jobs would be stopped.

        :param workflow_name: workflow name
        :return: True if succeed
        """
        # TODO For now, simply return True as long as message is sent successfully,
        #  actually we need a response from
        try:
            notification_client = NotificationClient(self.server_uri,
                                                     SCHEDULER_NAMESPACE)
            notification_client.send_event(
                StopDagEvent(workflow_name).to_event())
            return True
        except Exception:
            return False

    def suspend_workflow(workflow_name) -> bool:
        """
        Suspend the workflow. No more dag_run would be scheduled.

        :param workflow_name: workflow name
        :return: True if succeed
        """
        pass

    def resume_workflow(workflow_name) -> bool:
        """
        Resume a stopped workflow.

        :param workflow_name: workflow name
        :return: True if succeed
        """
        pass

    def trigger_workflow_execution(self, workflow_name) -> ExecutionContext:
        """
        Trigger a new instance of workflow immediately.

        :param workflow_name: workflow name
        :return: True if a new instance is triggered
        """
        self.airflow_client.trigger_parse_dag()
        return self.airflow_client.schedule_dag(workflow_name)

    def stop_workflow_execution(self, workflow_name, context) -> bool:
        """
        Stop the specific workflow execution(Airflow dag_run)
        """
        result = self.airflow_client.stop_dag_run(dag_id=workflow_name,
                                                  context=context)
        if result and result.dagrun_id == context.dagrun_id:
            return True
        else:
            return False

    def start_task_instance(self, workflow_name, job_name,
                            context: ExecutionContext) -> bool:
        """
        Force start a task. if it is running, do nothing.

        :param workflow_name: workflow name
        :param job_name: job name
        :param context: context of workflow instance
        :return: True if the task is started
        """
        result = self.airflow_client.schedule_task(
            dag_id=workflow_name,
            task_id=job_name,
            action=SchedulingAction.START,
            context=context)
        if result and result.dagrun_id == context.dagrun_id:
            return True
        else:
            return False

    def stop_task_instance(self, workflow_name, job_name,
                           context: ExecutionContext) -> bool:
        """
        Force stop a running task

        :param workflow_name: workflow name
        :param job_name: job name
        :param context: context of workflow instance
        :return: True if the task is stopped
        """
        result = self.airflow_client.schedule_task(
            dag_id=workflow_name,
            task_id=job_name,
            action=SchedulingAction.STOP,
            context=context)
        if result and result.dagrun_id == context.dagrun_id:
            return True
        else:
            return False

    def restart_task_instance(self, workflow_name, job_name,
                              context: ExecutionContext) -> bool:
        """
        Force restart a task

        :param workflow_name: workflow name
        :param job_name: job name
        :param context: context of workflow instance
        :return: True if the task is restarted
        """
        result = self.airflow_client.schedule_task(
            dag_id=workflow_name,
            task_id=job_name,
            action=SchedulingAction.RESTART,
            context=context)
        if result and result.dagrun_id == context.dagrun_id:
            return True
        else:
            return False

    def trigger_parse_dag(self):
        """Trigger a dag parse. """
        return self.airflow_client.trigger_parse_dag()