def test_list_workflow_executions(self):
        with mock.patch(
                'ai_flow.test.scheduler.test_scheduling_service.MockScheduler'
        ) as mockScheduler:
            instance = mockScheduler.return_value
            self.server.scheduling_service._scheduler = instance

            instance.list_workflow_executions.return_value \
                = [WorkflowExecutionInfo(execution_id='id_1', state=State.INIT),
                   WorkflowExecutionInfo(execution_id='id_2', state=State.INIT)]
            client = SchedulingClient("localhost:{}".format(_PORT))
            workflow_execution_list = client.list_workflow_executions(
                namespace='namespace', workflow_name='test_workflow')
            self.assertEqual(2, len(workflow_execution_list))
 def restart_job(self, job_name: Text,
                 execution_id: Text) -> Optional[JobInfo]:
     with create_session() as session:
         dag_run = session.query(DagRun).filter(
             DagRun.run_id == execution_id).first()
         if dag_run is None:
             return None
         if dag_run.state != State.RUNNING:
             raise Exception(
                 'execution: {} state: {} can not trigger job.'.format(
                     execution_id, dag_run.state))
         task = dag_run.get_task_instance(job_name, session)
         if task is None:
             return None
         self.airflow_client.schedule_task(
             dag_id=dag_run.dag_id,
             task_id=job_name,
             action=SchedulingAction.RESTART,
             context=ExecutionContext(dagrun_id=dag_run.run_id))
         project_name, workflow_name = self.dag_id_to_namespace_workflow(
             dag_run.dag_id)
         return JobInfo(
             job_name=job_name,
             state=self.airflow_state_to_state(task.state),
             workflow_execution=WorkflowExecutionInfo(
                 workflow_info=WorkflowInfo(namespace=project_name,
                                            workflow_name=workflow_name),
                 execution_id=dag_run.run_id,
                 state=self.airflow_state_to_state(dag_run.state)))
 def list_jobs(self, execution_id: Text) -> List[JobInfo]:
     with create_session() as session:
         dag_run = session.query(DagRun).filter(
             DagRun.run_id == execution_id).first()
         if dag_run is None:
             return None
         task_list = session.query(TaskInstance).filter(
             TaskInstance.dag_id == dag_run.dag_id,
             TaskInstance.execution_date == dag_run.execution_date).all()
         if task_list is None:
             return []
         else:
             result = []
             project_name, workflow_name = self.dag_id_to_namespace_workflow(
                 dag_run.dag_id)
             for task in task_list:
                 job = JobInfo(
                     job_name=task.task_id,
                     state=self.airflow_state_to_state(task.state),
                     workflow_execution=WorkflowExecutionInfo(
                         workflow_info=WorkflowInfo(
                             namespace=project_name,
                             workflow_name=workflow_name),
                         execution_id=dag_run.run_id,
                         state=self.airflow_state_to_state(dag_run.state)))
                 result.append(job)
             return result
    def test_list_jobs(self):
        with mock.patch(
                'ai_flow.test.scheduler.test_scheduling_service.MockScheduler'
        ) as mockScheduler:
            instance = mockScheduler.return_value
            self.server.scheduling_service._scheduler = instance

            instance.list_jobs.return_value \
                = [JobInfo(job_name='job_name_1',
                           state=State.RUNNING,
                           workflow_execution=WorkflowExecutionInfo(execution_id='id', state=State.INIT)),
                   JobInfo(job_name='job_name_2',
                           state=State.RUNNING,
                           workflow_execution=WorkflowExecutionInfo(execution_id='id', state=State.INIT))]
            client = SchedulingClient("localhost:{}".format(_PORT))
            job_list = client.list_jobs(execution_id='id')
            self.assertEqual(2, len(job_list))
Ejemplo n.º 5
0
def proto_to_workflow_execution(
        proto: WorkflowExecutionProto) -> WorkflowExecutionInfo:
    if proto is None:
        return None
    else:
        return WorkflowExecutionInfo(
            execution_id=proto.execution_id,
            state=ProtoToMeta.proto_to_state(proto.execution_state),
            workflow_info=proto_to_workflow(proto.workflow))
 def start_new_workflow_execution(
         self, project_name: Text,
         workflow_name: Text) -> Optional[WorkflowExecutionInfo]:
     dag_id = self.airflow_dag_id(project_name, workflow_name)
     deploy_path = self.config.properties().get('airflow_deploy_path')
     if deploy_path is None:
         raise Exception("airflow_deploy_path config not set!")
     if not self.dag_exist(dag_id):
         return None
     context: ExecutionContext = self.airflow_client.schedule_dag(dag_id)
     return WorkflowExecutionInfo(workflow_info=WorkflowInfo(
         namespace=project_name, workflow_name=workflow_name),
                                  execution_id=context.dagrun_id,
                                  state=job_meta.State.INIT)
    def test_get_workflow_execution(self):
        with mock.patch(
                'ai_flow.test.scheduler.test_scheduling_service.MockScheduler'
        ) as mockScheduler:
            instance = mockScheduler.return_value
            self.server.scheduling_service._scheduler = instance

            instance.get_workflow_execution.return_value \
                = WorkflowExecutionInfo(execution_id='id', state=State.INIT)
            client = SchedulingClient("localhost:{}".format(_PORT))
            workflow_execution = client.get_workflow_execution(
                execution_id='id')
            self.assertEqual('id', workflow_execution.execution_id)
            self.assertEqual(StateProto.INIT,
                             workflow_execution.execution_state)
 def get_workflow_execution(
         self, execution_id: Text) -> Optional[WorkflowExecutionInfo]:
     with create_session() as session:
         dag_run = session.query(DagRun).filter(
             DagRun.run_id == execution_id).first()
         if dag_run is None:
             return None
         else:
             state = self.airflow_state_to_state(dag_run.state)
             project_name, workflow_name = self.dag_id_to_namespace_workflow(
                 dag_run.dag_id)
             return WorkflowExecutionInfo(workflow_info=WorkflowInfo(
                 namespace=project_name, workflow_name=workflow_name),
                                          execution_id=dag_run.run_id,
                                          state=state)
 def kill_workflow_execution(
         self, execution_id: Text) -> Optional[WorkflowExecutionInfo]:
     with create_session() as session:
         dag_run = session.query(DagRun).filter(
             DagRun.run_id == execution_id).first()
         if dag_run is None:
             return None
         project_name, workflow_name = self.dag_id_to_namespace_workflow(
             dag_run.dag_id)
         context: ExecutionContext = ExecutionContext(execution_id)
         current_context = self.airflow_client.stop_dag_run(
             dag_run.dag_id, context)
         return WorkflowExecutionInfo(
             workflow_info=WorkflowInfo(namespace=project_name,
                                        workflow_name=workflow_name),
             execution_id=current_context.dagrun_id,
             state=job_meta.State.KILLING)
    def test_get_job(self):
        with mock.patch(
                'ai_flow.test.scheduler.test_scheduling_service.MockScheduler'
        ) as mockScheduler:
            instance = mockScheduler.return_value
            self.server.scheduling_service._scheduler = instance

            instance.get_job.return_value \
                = JobInfo(job_name='job_name',
                          state=State.RUNNING,
                          workflow_execution=WorkflowExecutionInfo(execution_id='id', state=State.INIT))
            client = SchedulingClient("localhost:{}".format(_PORT))
            job = client.get_job(job_name='job_name', execution_id='id')
            self.assertEqual('job_name', job.name)
            self.assertEqual(StateProto.RUNNING, job.job_state)
            self.assertEqual('id', job.workflow_execution.execution_id)
            self.assertEqual(StateProto.INIT,
                             job.workflow_execution.execution_state)
 def list_workflow_executions(
         self, project_name: Text,
         workflow_name: Text) -> List[WorkflowExecutionInfo]:
     dag_id = self.airflow_dag_id(project_name, workflow_name)
     with create_session() as session:
         dagrun_list = session.query(DagRun).filter(
             DagRun.dag_id == dag_id).all()
         if dagrun_list is None:
             return []
         else:
             result = []
             for dagrun in dagrun_list:
                 state = self.airflow_state_to_state(dagrun.state)
                 result.append(
                     WorkflowExecutionInfo(workflow_info=WorkflowInfo(
                         namespace=project_name,
                         workflow_name=workflow_name),
                                           execution_id=dagrun.run_id,
                                           state=state))
             return result
 def get_job(self, job_name: Text, execution_id: Text) -> Optional[JobInfo]:
     with create_session() as session:
         dag_run = session.query(DagRun).filter(
             DagRun.run_id == execution_id).first()
         if dag_run is None:
             return None
         task = session.query(TaskInstance).filter(
             TaskInstance.dag_id == dag_run.dag_id,
             TaskInstance.execution_date == dag_run.execution_date,
             TaskInstance.task_id == job_name).first()
         if task is None:
             return None
         else:
             project_name, workflow_name = self.dag_id_to_namespace_workflow(
                 dag_run.dag_id)
             return JobInfo(job_name=job_name,
                            state=self.airflow_state_to_state(task.state),
                            workflow_execution=WorkflowExecutionInfo(
                                workflow_info=WorkflowInfo(
                                    namespace=project_name,
                                    workflow_name=workflow_name),
                                execution_id=dag_run.run_id,
                                state=self.airflow_state_to_state(
                                    dag_run.state)))