def run_task_function(client: NotificationClient):
            with af.global_config_file(workflow_config_file()):
                with af.config('task_1'):
                    cmd_executor = af.user_define_operation(
                        output_num=0,
                        executor=CmdExecutor(
                            cmd_line='echo "hello world" && sleep 30'.format(
                                1)))
                workflow_info = af.workflow_operation.submit_workflow(
                    workflow_name)

            we = af.workflow_operation.start_new_workflow_execution(
                workflow_name)
            while True:
                with create_session() as session:
                    ti = session.query(TaskInstance) \
                        .filter(TaskInstance.dag_id == 'test_project.test_workflow').first()
                    if ti is not None and ti.state == State.RUNNING:
                        af.workflow_operation.stop_job('task_1',
                                                       we.execution_id)
                    elif ti.state == State.KILLED:
                        break
                    else:
                        time.sleep(1)
            job_info = af.workflow_operation.get_job('task_1', we.execution_id)
            self.assertEqual('task_1', job_info.job_name)
            self.assertEqual(
                project_name,
                job_info.workflow_execution.workflow_info.namespace)
            self.assertEqual(
                workflow_name,
                job_info.workflow_execution.workflow_info.workflow_name)
            job_info_list = af.workflow_operation.list_jobs(we.execution_id)
            self.assertEqual(1, len(job_info_list))
Пример #2
0
 def test_user_define_control_dependency(self):
     print(sys._getframe().f_code.co_name)
     trigger = af.external_trigger(name='stream_trigger')
     job_config = af.BaseJobConfig('local', 'cmd_line')
     job_config.job_name = 'test_cmd'
     with af.config(job_config):
         cmd_executor = af.user_define_operation(
             output_num=0,
             executor=CmdExecutor(
                 cmd_line="echo 'hello world' && sleep {}".format(1)))
     af.user_define_control_dependency(
         src=cmd_executor,
         dependency=trigger,
         event_key='key',
         event_value='value',
         event_type='name',
         condition=MetCondition.NECESSARY,
         action=TaskAction.START,
         life=EventLife.ONCE,
         value_condition=MetValueCondition.UPDATE)
     workflow_id = af.submit_ai_flow()
     af.get_ai_flow_client().publish_event('key', 'value1', 'name')
     time.sleep(5)
     af.get_ai_flow_client().publish_event('key', 'value2', 'name')
     time.sleep(10)
     af.stop_execution_by_id(workflow_id)
     res = af.get_ai_flow_client().list_job(5, 0)
     self.assertEqual(3, len(res))
 def build_and_submit_ai_flow():
     with af.global_config_file(workflow_config_file()):
         with af.config('task_1'):
             cmd_executor = af.user_define_operation(
                 output_num=0,
                 executor=CmdExecutor(
                     cmd_line='echo "hello world"'.format(1)))
         workflow_info = af.workflow_operation.submit_workflow(
             'test_workflow')
     return workflow_info.workflow_name
Пример #4
0
 def build_ai_graph(sleep_time: int):
     with af.engine('cmd_line'):
         p_list = []
         for i in range(3):
             p = af.user_define_operation(executor=CmdExecutor(
                 cmd_line="echo 'hello_{}' && sleep {}".format(
                     i, sleep_time)))
             p_list.append(p)
         af.stop_before_control_dependency(p_list[0], p_list[1])
         af.stop_before_control_dependency(p_list[0], p_list[2])
Пример #5
0
        def build_ai_graph() -> AIGraph:
            with af.engine('cmd_line'):
                p_list = []
                for i in range(3):
                    p = af.user_define_operation(
                        executor=CmdExecutor(cmd_line="echo 'hello_{}' && sleep 3".format(i)))
                    p_list.append(p)
                af.stop_before_control_dependency(p_list[0], p_list[1])
                af.stop_before_control_dependency(p_list[0], p_list[2])

            return af.default_graph()
 def test_k8s_cmd(self):
     print(sys._getframe().f_code.co_name)
     project_path = os.path.dirname(__file__) + '/../'
     job_config = af.KubernetesCMDJobConfig()
     job_config.job_name = 'test_cmd'
     with af.config(job_config):
         cmd_executor = af.user_define_operation(
             output_num=0,
             executor=CmdExecutor(
                 cmd_line="echo 'hello world' && sleep {}".format(1)))
     code_text = af.generate_airflow_file_text(project_path, "hh")
     print(code_text)
 def run_task_function(client: NotificationClient):
     with af.global_config_file(workflow_config_file()):
         with af.config('task_1'):
             cmd_executor = af.user_define_operation(
                 output_num=0,
                 executor=CmdExecutor(
                     cmd_line='echo "hello world" && sleep 30'.format(
                         1)))
         workflow_info = af.workflow_operation.submit_workflow(
             workflow_name)
         self.assertFalse(is_paused())
         af.workflow_operation.pause_workflow_scheduling(workflow_name)
         self.assertTrue(is_paused())
         af.workflow_operation.resume_workflow_scheduling(workflow_name)
         self.assertFalse(is_paused())
Пример #8
0
    def test_stream_with_external_trigger_with_model_control(self):
        print(sys._getframe().f_code.co_name)
        model_name = 'test_create_model_version'
        model_desc = 'test create model version'
        response = af.register_model(model_name=model_name,
                                     model_type=af.ModelType.CHECKPOINT,
                                     model_desc=model_desc)

        trigger = af.external_trigger(name='stream_trigger')
        job_config = af.BaseJobConfig('local', 'cmd_line')
        job_config.job_name = 'test_cmd'
        with af.config(job_config):
            cmd_executor = af.user_define_operation(
                output_num=0,
                executor=CmdExecutor(
                    cmd_line="echo 'hello world' && sleep {}".format(1)))
        af.model_version_control_dependency(
            src=cmd_executor,
            dependency=trigger,
            model_name=model_name,
            model_version_event_type='MODEL_DEPLOYED')
        workflow_id = af.submit_ai_flow()

        model_path1 = 'fs://source1.pkl'
        model_metric1 = 'http://metric1'
        model_flavor1 = '{"flavor.version":1}'
        version_desc1 = 'test create model version1'
        time.sleep(1)
        response = af.register_model_version(
            model=model_name,
            model_path=model_path1,
            model_metric=model_metric1,
            model_flavor=model_flavor1,
            version_desc=version_desc1,
            current_stage=af.ModelVersionStage.DEPLOYED)
        time.sleep(5)
        response = af.register_model_version(
            model=model_name,
            model_path=model_path1,
            model_metric=model_metric1,
            model_flavor=model_flavor1,
            version_desc=version_desc1,
            current_stage=af.ModelVersionStage.DEPLOYED)
        time.sleep(10)
        af.stop_execution_by_id(workflow_id)
        res = af.get_ai_flow_client().list_job(5, 0)
        self.assertEqual(3, len(res))
        def run_task_function(client: NotificationClient):
            with af.global_config_file(workflow_config_file()):
                with af.config('task_1'):
                    cmd_executor = af.user_define_operation(
                        output_num=0,
                        executor=CmdExecutor(
                            cmd_line='echo "hello world"'.format(1)))
                workflow_info = af.workflow_operation.submit_workflow(
                    workflow_name)

            af.workflow_operation.start_new_workflow_execution(workflow_name)
            while True:
                with create_session() as session:
                    dag_run = session.query(DagRun)\
                        .filter(DagRun.dag_id == 'test_project.{}'.format(workflow_name)).first()
                    if dag_run is not None and dag_run.state == State.SUCCESS:
                        break
                    else:
                        time.sleep(1)
        def run_task_function(client: NotificationClient):
            with af.global_config_file(workflow_config_file()):
                with af.config('task_1'):
                    cmd_executor = af.user_define_operation(
                        output_num=0,
                        executor=CmdExecutor(
                            cmd_line='echo "hello world"'.format(1)))
                workflow_info = af.workflow_operation.submit_workflow(
                    workflow_name)

            workflow_info = af.workflow_operation.get_workflow(workflow_name)
            self.assertEqual(workflow_name, workflow_info.workflow_name)
            self.assertEqual(project_name, workflow_info.namespace)

            workflow_info_list = af.workflow_operation.list_workflows()
            self.assertEqual(1, len(workflow_info_list))
            workflow_info = af.workflow_operation.delete_workflow(
                workflow_name)
            with self.assertRaises(Exception) as context:
                workflow_info = af.workflow_operation.get_workflow(
                    workflow_name)
Пример #11
0
 def run_airflow_dag_function(self):
     # waiting parsed dag file done
     from datetime import datetime
     ns_client = NotificationClient(server_uri='localhost:50051')
     with af.global_config_file(test_util.get_workflow_config_file()):
         with af.config('task_1'):
             cmd_executor = af.user_define_operation(
                 output_num=0,
                 executor=CmdExecutor(cmd_line=['echo "hello world!"']))
     af.deploy_to_airflow(test_util.get_project_path(),
                          dag_id='test_dag_111',
                          default_args={
                              'schedule_interval': None,
                              'start_date': datetime(2025, 12, 1),
                          })
     context = af.run(project_path=test_util.get_project_path(),
                      dag_id='test_dag_111',
                      scheduler_type=SchedulerType.AIRFLOW)
     print(context.dagrun_id)
     time.sleep(5)
     ns_client.send_event(StopSchedulerEvent(job_id=0).to_event())