示例#1
0
        def run_task_function(client: NotificationClient):
            with af.global_config_file(workflow_config_file()):
                with af.config('task_2'):
                    executor_1 = af.user_define_operation(
                        af.PythonObjectExecutor(
                            SendExecutor(sender='task_2',
                                         key='key_1',
                                         value='value_1',
                                         event_type='UNDEFINED',
                                         port=server_port())))
                with af.config('task_5'):
                    executor_2 = af.user_define_operation(
                        af.PythonObjectExecutor(SimpleExecutor()))
                af.user_define_control_dependency(src=executor_2,
                                                  dependency=executor_1,
                                                  event_key='key_1',
                                                  event_value='value_1')
                workflow_info = af.workflow_operation.submit_workflow(
                    workflow_name)

            af.workflow_operation.start_new_workflow_execution(workflow_name)
            while True:
                with create_session() as session:

                    dag_run = session.query(DagRun).filter(
                        DagRun.dag_id == 'test_project.test_workflow').first()
                    if dag_run is not None and dag_run.state in State.finished:
                        break
                    else:
                        time.sleep(1)
示例#2
0
        def run_task_function(client: NotificationClient):
            with af.global_config_file(workflow_config_file()):
                with af.config('task_2'):
                    executor_1 = af.user_define_operation(
                        af.PythonObjectExecutor(SimpleExecutor()))
                with af.config('task_5'):
                    executor_2 = af.user_define_operation(
                        af.PythonObjectExecutor(SimpleExecutor()))
                af.user_define_control_dependency(src=executor_2,
                                                  dependency=executor_1,
                                                  namespace='test',
                                                  event_key='key_1',
                                                  event_value='value_1',
                                                  sender='*')
                workflow_info = af.workflow_operation.submit_workflow(
                    workflow_name)

            af.workflow_operation.start_new_workflow_execution(workflow_name)
            flag = True
            while True:
                with create_session() as session:
                    tes = session.query(TaskExecution).filter(
                        TaskExecution.dag_id == 'test_project.test_workflow',
                        TaskExecution.task_id == 'task_2').all()
                    if 1 == len(tes) and flag:
                        client.send_event(
                            BaseEvent(key='key_1', value='value_1'))
                        flag = False
                    dag_run = session.query(DagRun).filter(
                        DagRun.dag_id == 'test_project.test_workflow').first()
                    if dag_run is not None and dag_run.state in State.finished:
                        break
                    else:
                        time.sleep(1)
示例#3
0
def build_workflow():
    with af.global_config_file(project_path +
                               '/resources/workflow_config.yaml'):
        with af.config('job_1'):
            op_1 = af.user_define_operation(
                af.PythonObjectExecutor(StreamPrintHelloExecutor('job_1')))

        with af.config('job_2'):
            op_2 = af.user_define_operation(
                af.PythonObjectExecutor(
                    SendEventExecutor(key='key_1', value='value_1')))

        with af.config('job_3'):
            op_3 = af.user_define_operation(
                af.PythonObjectExecutor(
                    SendEventExecutor(key='key_2', value='value_2')))

    af.user_define_control_dependency(op_1,
                                      op_2,
                                      event_key='key_1',
                                      event_type='UNDEFINED',
                                      event_value="value_1")

    af.user_define_control_dependency(op_1,
                                      op_3,
                                      event_key='key_2',
                                      event_type='UNDEFINED',
                                      event_value="value_2")
def build_workflow():
    with af.global_config_file(project_path + '/resources/workflow_config.yaml'):
        with af.config('job_1'):
            af.user_define_operation(af.PythonObjectExecutor(StreamPrintEventExecutor('job_1')))

        with af.config('job_2'):
            af.user_define_operation(af.PythonObjectExecutor(
                SendEventExecutor(key='key_1', value='value_1', num=5, post_time=5)))
示例#5
0
def build_workflow(workflow_config_path):
    with ai_flow.global_config_file(workflow_config_path):
        with ai_flow.config('job_1'):
            op_1 = ai_flow.user_define_operation(
                ai_flow.PythonObjectExecutor(PrintHelloExecutor('job_1')))

        with ai_flow.config('job_2'):
            op_2 = ai_flow.user_define_operation(
                ai_flow.PythonObjectExecutor(PrintHelloExecutor('job_2')))

        ai_flow.stop_before_control_dependency(op_2, op_1)
def build_workflow():
    with af.global_config_file(project_path +
                               '/resources/workflow_config.yaml'):
        with af.config('job_1'):
            op_1 = af.user_define_operation(
                af.PythonObjectExecutor(PrintHelloExecutor('job_1')))

        with af.config('job_2'):
            op_2 = af.user_define_operation(
                af.PythonObjectExecutor(PrintHelloExecutor('job_2')))

        with af.config('job_3'):
            op_3 = af.user_define_operation(
                af.PythonObjectExecutor(PrintHelloExecutor('job_3')))

    af.stop_before_control_dependency(op_3, op_1)
    af.stop_before_control_dependency(op_3, op_2)
 def test_deploy_airflow(self):
     airflow_path = af.project_config().get_airflow_deploy_path()
     if not os.path.exists(airflow_path):
         os.makedirs(airflow_path)
     with af.config(LocalPythonJobConfig(job_name="simple")):
         op = af.user_define_operation(
             af.PythonObjectExecutor(SimpleExecutor()))
     res = af.run(test_util.get_project_path())
     af.wait_workflow_execution_finished(res)
示例#8
0
def build_workflow():
    with af.global_config_file(project_path +
                               '/resources/workflow_config.yaml'):
        with af.config('job_1'):
            op_1 = af.user_define_operation(
                af.PythonObjectExecutor(PrintHelloExecutor('job_1')))

        with af.config('job_2'):
            op_2 = af.user_define_operation(
                af.PythonObjectExecutor(
                    SendEventExecutor(key='key_1',
                                      value='value_1',
                                      num=3,
                                      post_time=20)))

    af.user_define_control_dependency(op_1,
                                      op_2,
                                      event_key='key_1',
                                      event_type='UNDEFINED',
                                      event_value="value_1",
                                      action=TaskAction.RESTART)
示例#9
0
        def run_task_function(client: NotificationClient):
            with af.global_config_file(workflow_config_file()):
                with af.config('task_2'):
                    executor_1 = af.user_define_operation(
                        af.PythonObjectExecutor(SimpleExecutor()))
                with af.config('task_3'):
                    executor_2 = af.user_define_operation(
                        af.PythonObjectExecutor(SimpleExecutor()))
                af.model_version_control_dependency(
                    src=executor_2,
                    dependency=executor_1,
                    model_name='model_1',
                    model_version_event_type=ModelVersionEventType.
                    MODEL_GENERATED)
                workflow_info = af.workflow_operation.submit_workflow(
                    workflow_name)

            af.workflow_operation.start_new_workflow_execution(workflow_name)
            r_flag = True
            while True:
                with create_session() as session:
                    tes2 = session.query(TaskExecution).filter(
                        TaskExecution.dag_id == 'test_project.test_workflow',
                        TaskExecution.task_id == 'task_2').all()
                    if len(tes2) == 1 and r_flag:
                        af.register_model_version(
                            model='model_1',
                            model_path='/tmp/model/v1',
                            current_stage=af.ModelVersionStage.GENERATED)
                        r_flag = False

                    dag_run = session.query(DagRun).filter(
                        DagRun.dag_id == 'test_project.test_workflow').first()
                    if dag_run is not None and dag_run.state in State.finished:
                        break
                    else:
                        time.sleep(1)
        def run_task_function(client: NotificationClient):
            with af.global_config_file(workflow_config_file()):
                with af.config('task_2'):
                    executor = af.user_define_operation(
                        af.PythonObjectExecutor(SimpleExecutor()))
                workflow_info = af.workflow_operation.submit_workflow(
                    workflow_name)

            af.workflow_operation.start_new_workflow_execution(workflow_name)
            while True:
                with create_session() as session:
                    dag_run = session.query(DagRun).filter(
                        DagRun.dag_id == 'test_project.test_workflow').first()
                    if dag_run is not None and dag_run.state in State.finished:
                        break
                    else:
                        time.sleep(1)