예제 #1
0
def build_workflow():
    with af.global_config_file(project_path +
                               '/resources/workflow_config.yaml'):
        with af.config('job_1'):
            op_1 = af.user_define_operation(
                af.PythonObjectExecutor(StreamPrintHelloExecutor('job_1')))

        with af.config('job_2'):
            op_2 = af.user_define_operation(
                af.PythonObjectExecutor(
                    SendEventExecutor(key='key_1', value='value_1')))

        with af.config('job_3'):
            op_3 = af.user_define_operation(
                af.PythonObjectExecutor(
                    SendEventExecutor(key='key_2', value='value_2')))

    af.user_define_control_dependency(op_1,
                                      op_2,
                                      event_key='key_1',
                                      event_type='UNDEFINED',
                                      event_value="value_1")

    af.user_define_control_dependency(op_1,
                                      op_3,
                                      event_key='key_2',
                                      event_type='UNDEFINED',
                                      event_value="value_2")
예제 #2
0
        def run_task_function(client: NotificationClient):
            with af.global_config_file(workflow_config_file()):
                with af.config('task_2'):
                    executor_1 = af.user_define_operation(
                        af.PythonObjectExecutor(SimpleExecutor()))
                with af.config('task_5'):
                    executor_2 = af.user_define_operation(
                        af.PythonObjectExecutor(SimpleExecutor()))
                af.user_define_control_dependency(src=executor_2,
                                                  dependency=executor_1,
                                                  namespace='test',
                                                  event_key='key_1',
                                                  event_value='value_1',
                                                  sender='*')
                workflow_info = af.workflow_operation.submit_workflow(
                    workflow_name)

            af.workflow_operation.start_new_workflow_execution(workflow_name)
            flag = True
            while True:
                with create_session() as session:
                    tes = session.query(TaskExecution).filter(
                        TaskExecution.dag_id == 'test_project.test_workflow',
                        TaskExecution.task_id == 'task_2').all()
                    if 1 == len(tes) and flag:
                        client.send_event(
                            BaseEvent(key='key_1', value='value_1'))
                        flag = False
                    dag_run = session.query(DagRun).filter(
                        DagRun.dag_id == 'test_project.test_workflow').first()
                    if dag_run is not None and dag_run.state in State.finished:
                        break
                    else:
                        time.sleep(1)
예제 #3
0
        def run_task_function(client: NotificationClient):
            with af.global_config_file(workflow_config_file()):
                with af.config('task_2'):
                    executor_1 = af.user_define_operation(
                        af.PythonObjectExecutor(
                            SendExecutor(sender='task_2',
                                         key='key_1',
                                         value='value_1',
                                         event_type='UNDEFINED',
                                         port=server_port())))
                with af.config('task_5'):
                    executor_2 = af.user_define_operation(
                        af.PythonObjectExecutor(SimpleExecutor()))
                af.user_define_control_dependency(src=executor_2,
                                                  dependency=executor_1,
                                                  event_key='key_1',
                                                  event_value='value_1')
                workflow_info = af.workflow_operation.submit_workflow(
                    workflow_name)

            af.workflow_operation.start_new_workflow_execution(workflow_name)
            while True:
                with create_session() as session:

                    dag_run = session.query(DagRun).filter(
                        DagRun.dag_id == 'test_project.test_workflow').first()
                    if dag_run is not None and dag_run.state in State.finished:
                        break
                    else:
                        time.sleep(1)
def build_workflow():
    with af.global_config_file(project_path + '/resources/workflow_config.yaml'):
        with af.config('job_1'):
            af.user_define_operation(af.PythonObjectExecutor(StreamPrintEventExecutor('job_1')))

        with af.config('job_2'):
            af.user_define_operation(af.PythonObjectExecutor(
                SendEventExecutor(key='key_1', value='value_1', num=5, post_time=5)))
예제 #5
0
def build_workflow(workflow_config_path):
    with ai_flow.global_config_file(workflow_config_path):
        with ai_flow.config('job_1'):
            op_1 = ai_flow.user_define_operation(
                ai_flow.PythonObjectExecutor(PrintHelloExecutor('job_1')))

        with ai_flow.config('job_2'):
            op_2 = ai_flow.user_define_operation(
                ai_flow.PythonObjectExecutor(PrintHelloExecutor('job_2')))

        ai_flow.stop_before_control_dependency(op_2, op_1)
예제 #6
0
def run_project(project_root_path):

    af.set_project_config_file(project_root_path + '/project.yaml')
    # Config command line job, we set platform to local and engine to cmd_line here
    cmd_job_config = af.BaseJobConfig(platform=LocalPlatform.platform(), engine=CMDEngine().engine())
    with af.config(cmd_job_config):
        # Command line job executor
        cmd_job = af.user_define_operation(executor=CmdExecutor(cmd_line="echo Start AI flow"))

    # Config python job, we set platform to local and engine to python here
    python_job_config = af.BaseJobConfig(platform=LocalPlatform.platform(), engine=PythonEngine.engine())

    # Set execution mode of this python job to BATCH,
    # which indicates jobs with this config is running in the form of batch.
    python_job_config.exec_mode = af.ExecutionMode.BATCH

    with af.config(python_job_config):
        # Path of Source data(under '..../simple_transform_airflow' dir)
        source_path = os.path.dirname(os.path.abspath(__file__)) + '/source_data.csv'
        # Path of Sink data
        sink_path = os.path.dirname(os.path.abspath(__file__)) + '/sink_data.csv'

        # To make the project replaceable, we register the example in metadata service
        read_example_meta = af.register_example(name='read_example', support_type=ExampleSupportType.EXAMPLE_BATCH,
                                                data_format='csv', data_type='pandas', batch_uri=source_path)

        # Read training example using af.read_example()
        # example_info is the meta information of the example
        read_example_channel = af.read_example(example_info=read_example_meta, exec_args=ExecuteArgs(
            batch_properties=Args(header=None, names=["a", "b", "c"])))

        # Transform examples using af.transform()
        transform_channel = af.transform(input_data_list=[read_example_channel],
                                         executor=PythonObjectExecutor(python_object=SimpleTransform()))

        write_example_meta = af.register_example(name='write_example', support_type=ExampleSupportType.EXAMPLE_BATCH,
                                                 data_format='csv', data_type='pandas', batch_uri=sink_path)

        # Write example to specific path
        write = af.write_example(input_data=transform_channel, example_info=write_example_meta,
                                 exec_args=ExecuteArgs(batch_properties=Args(sep=',', header=False, index=False)))

    # Add control dependency, which means read_example job will start right after command line job finishes.
    af.stop_before_control_dependency(read_example_channel, cmd_job)

    transform_dag = 'simple_transform'
    af.deploy_to_airflow(project_root_path, dag_id=transform_dag)
    context = af.run(project_path=project_root_path,
                     dag_id=transform_dag,
                     scheduler_type=SchedulerType.AIRFLOW)
예제 #7
0
 def test_context_with_yaml_file(self):
     config_file = path_util.get_file_dir(__file__) + "/workflow.yaml"
     with af.global_config_file(config_path=config_file) as g_config:
         with af.config('task_1') as config_1:
             self.assertEqual('task_1', config_1.job_name)
             self.assertEqual('cmd_line', config_1.engine)
             self.assertEqual('interval',
                              config_1.periodic_config.periodic_type)
             self.assertEqual(20, config_1.periodic_config.args['seconds'])
         with af.config('task_2') as config_2:
             self.assertEqual('task_2', config_2.job_name)
             self.assertEqual('cmd_line', config_2.engine)
             self.assertEqual('cron',
                              config_2.periodic_config.periodic_type)
             self.assertEqual('* * * * *', config_2.periodic_config.args)
예제 #8
0
    def test_batch_train_component_with_an_output(self):
        input_example_meta = af.register_example(
            name='batch_train_example',
            support_type=ExampleSupportType.EXAMPLE_BATCH)
        model_meta = af.register_model(model_name='mnist_model',
                                       model_type=ModelType.SAVED_MODEL)

        example_meta = af.register_example(
            name='output_example',
            support_type=ExampleSupportType.EXAMPLE_BATCH,
            data_type='numpy',
            data_format='npz',
            batch_uri=os.path.abspath(
                os.path.dirname(__file__) + '/numpy_output.npz'))
        with af.config(
                af.BaseJobConfig(platform='local',
                                 engine='python',
                                 job_name='batch_train')):
            input_example = af.read_example(
                example_info=input_example_meta,
                executor=PythonObjectExecutor(
                    python_object=ReadBatchExample()))
            train_channel = af.train(
                input_data_list=[input_example],
                executor=PythonObjectExecutor(
                    python_object=TrainBatchMnistModelWithOutput()),
                model_info=model_meta,
                output_num=1)
            af.write_example(input_data=train_channel,
                             example_info=example_meta)
        workflow_id = af.run(test_util.get_project_path())
        res = af.wait_workflow_execution_finished(workflow_id)
        self.assertEqual(0, res)
예제 #9
0
def run_flink_python_job():
    with af.global_config_file(test_util.get_job_config_file()):
        with af.config('vvp_python_job'):
            faf.vvp_job()
    workflow_id = af.run(test_util.get_project_path(),
                         dag_id='wordcount_vvp_python',
                         scheduler_type=af.SchedulerType.AIRFLOW)
예제 #10
0
def run_flink_job():
    with af.global_config_file(test_util.get_job_config_file()):
        with af.config('vvp_job'):
            faf.vvp_job()
    workflow_id = af.run(test_util.get_project_path())
    res = af.wait_workflow_execution_finished(workflow_id)
    print(res)
예제 #11
0
    def test_stream_transform_component(self):
        file = get_file_dir(__file__) + '/test1.csv'
        input_example_meta = af.register_example(
            name='test_example',
            support_type=ExampleSupportType.EXAMPLE_BOTH,
            stream_uri=file)
        output_file = get_file_dir(
            __file__) + "/output_transform_stream_test1.csv"
        output_example_meta = af.register_example(
            name='test_example_output',
            support_type=ExampleSupportType.EXAMPLE_BOTH,
            stream_uri=output_file)
        with af.config(
                af.BaseJobConfig(platform='local',
                                 engine='python',
                                 job_name='stream_transform')):
            input_example = af.read_example(
                example_info=input_example_meta,
                executor=PythonObjectExecutor(
                    python_object=ReadStreamExample()))
            transform_example = af.transform(
                input_data_list=[input_example],
                executor=PythonObjectExecutor(
                    python_object=TransformStreamData()))

            af.write_example(input_data=transform_example,
                             example_info=output_example_meta.name,
                             executor=PythonObjectExecutor(
                                 python_object=WriteStreamExample()))
        workflow_id = af.run(test_util.get_project_path())
        res = af.wait_workflow_execution_finished(workflow_id)
        self.assertEqual(0, res)
        def run_task_function(client: NotificationClient):
            with af.global_config_file(workflow_config_file()):
                with af.config('task_1'):
                    cmd_executor = af.user_define_operation(
                        output_num=0,
                        executor=CmdExecutor(
                            cmd_line='echo "hello world" && sleep 30'.format(
                                1)))
                workflow_info = af.workflow_operation.submit_workflow(
                    workflow_name)

            we = af.workflow_operation.start_new_workflow_execution(
                workflow_name)
            while True:
                with create_session() as session:
                    ti = session.query(TaskInstance) \
                        .filter(TaskInstance.dag_id == 'test_project.test_workflow').first()
                    if ti is not None and ti.state == State.RUNNING:
                        af.workflow_operation.stop_job('task_1',
                                                       we.execution_id)
                    elif ti.state == State.KILLED:
                        break
                    else:
                        time.sleep(1)
            job_info = af.workflow_operation.get_job('task_1', we.execution_id)
            self.assertEqual('task_1', job_info.job_name)
            self.assertEqual(
                project_name,
                job_info.workflow_execution.workflow_info.namespace)
            self.assertEqual(
                workflow_name,
                job_info.workflow_execution.workflow_info.workflow_name)
            job_info_list = af.workflow_operation.list_jobs(we.execution_id)
            self.assertEqual(1, len(job_info_list))
        def run_task_function(client: NotificationClient):
            with af.global_config_file(workflow_config_file()):
                with af.config(task_config):
                    input_example = af.read_example(
                        example_info=example_1,
                        executor=faf.flink_executor.FlinkPythonExecutor(
                            python_object=Source()))
                    processed = af.transform(
                        input_data_list=[input_example],
                        executor=faf.flink_executor.FlinkPythonExecutor(
                            python_object=Transformer()))

                    af.write_example(
                        input_data=processed,
                        example_info=example_2,
                        executor=faf.flink_executor.FlinkPythonExecutor(
                            python_object=Sink()))
                workflow_info = af.workflow_operation.submit_workflow(
                    workflow_name)

            af.workflow_operation.start_new_workflow_execution(workflow_name)
            while True:
                with create_session() as session:
                    dag_run = session.query(DagRun).filter(
                        DagRun.dag_id == 'test_project.test_workflow').first()
                    if dag_run is not None and dag_run.state in State.finished:
                        break
                    else:
                        time.sleep(1)
 def test_read_example_with_numpy_npy(self):
     npy_name = 'test.npy'
     np.save(file=npy_name, arr=np.arange(10))
     input_example_meta = af.register_example(
         name='input_numpy_example',
         data_type='numpy',
         data_format='npy',
         support_type=ExampleSupportType.EXAMPLE_BATCH,
         batch_uri=os.path.abspath(
             os.path.dirname(__file__) + "/" + npy_name))
     output_example_meta = af.register_example(
         name='ouput_numpy_example',
         data_type='numpy',
         data_format='npy',
         support_type=ExampleSupportType.EXAMPLE_BATCH,
         batch_uri=os.path.abspath(
             os.path.dirname(__file__) + '/numpy_output.npy'))
     with af.config(
             af.BaseJobConfig(platform='local',
                              engine='python',
                              job_name='test_npy')):
         example_channel = af.read_example(example_info=input_example_meta)
         af.write_example(input_data=example_channel,
                          example_info=output_example_meta)
     workflow_id = af.run(test_util.get_project_path())
     res = af.wait_workflow_execution_finished(workflow_id)
     self.assertEqual(0, res)
 def test_read_example_with_pandas(self):
     input_example_meta = af.register_example(
         name='input_pandas_example',
         data_type='pandas',
         data_format='csv',
         support_type=ExampleSupportType.EXAMPLE_BATCH,
         batch_uri=os.path.abspath(
             os.path.dirname(__file__) + '/test1.csv'))
     output_example_meta = af.register_example(
         name='ouput_pandas_example',
         data_type='pandas',
         data_format='csv',
         support_type=ExampleSupportType.EXAMPLE_BATCH,
         batch_uri=os.path.abspath(
             os.path.dirname(__file__) + '/pandas_output.csv'))
     with af.config(
             af.BaseJobConfig(platform='local',
                              engine='python',
                              job_name='test_csv')):
         example_channel = af.read_example(example_info=input_example_meta)
         af.write_example(input_data=example_channel,
                          example_info=output_example_meta)
     workflow_id = af.run(test_util.get_project_path())
     res = af.wait_workflow_execution_finished(workflow_id)
     self.assertEqual(0, res)
예제 #16
0
 def test_user_define_control_dependency(self):
     print(sys._getframe().f_code.co_name)
     trigger = af.external_trigger(name='stream_trigger')
     job_config = af.BaseJobConfig('local', 'cmd_line')
     job_config.job_name = 'test_cmd'
     with af.config(job_config):
         cmd_executor = af.user_define_operation(
             output_num=0,
             executor=CmdExecutor(
                 cmd_line="echo 'hello world' && sleep {}".format(1)))
     af.user_define_control_dependency(
         src=cmd_executor,
         dependency=trigger,
         event_key='key',
         event_value='value',
         event_type='name',
         condition=MetCondition.NECESSARY,
         action=TaskAction.START,
         life=EventLife.ONCE,
         value_condition=MetValueCondition.UPDATE)
     workflow_id = af.submit_ai_flow()
     af.get_ai_flow_client().publish_event('key', 'value1', 'name')
     time.sleep(5)
     af.get_ai_flow_client().publish_event('key', 'value2', 'name')
     time.sleep(10)
     af.stop_execution_by_id(workflow_id)
     res = af.get_ai_flow_client().list_job(5, 0)
     self.assertEqual(3, len(res))
예제 #17
0
 def test_stream_train_component(self):
     batch_input_example_meta = af.register_example(
         name='stream_train_example',
         support_type=ExampleSupportType.EXAMPLE_BOTH)
     model_meta = af.register_model(model_name='mnist_model',
                                    model_type=ModelType.SAVED_MODEL)
     stream_input_example_meta = af.register_example(
         name='stream_train_example',
         support_type=ExampleSupportType.EXAMPLE_BOTH)
     with af.config(
             af.BaseJobConfig(platform='local',
                              engine='python',
                              job_name='stream_train')):
         batch_input_example = af.read_example(
             example_info=batch_input_example_meta,
             executor=PythonObjectExecutor(
                 python_object=ReadBatchExample()))
         batch_train = af.train(input_data_list=[batch_input_example],
                                executor=PythonObjectExecutor(
                                    python_object=TrainBatchMnistModel()),
                                model_info=model_meta)
         stream_input_example = af.read_example(
             example_info=stream_input_example_meta,
             executor=PythonObjectExecutor(
                 python_object=ReadStreamExample()))
         stream_train = af.train(input_data_list=[stream_input_example],
                                 executor=PythonObjectExecutor(
                                     python_object=TrainStreamMnistModel()),
                                 model_info=model_meta)
     af.stop_before_control_dependency(stream_train, batch_train)
     workflow_id = af.run(test_util.get_project_path())
     res = af.wait_workflow_execution_finished(workflow_id)
     self.assertEqual(0, res)
예제 #18
0
def run_flink_job():
    input_file = "/test1.csv"
    output_file ="/output_test1.csv"
    if os.path.exists(output_file):
        os.remove(output_file)

    example_1 = af.create_example(name="example_1",
                                  support_type=af.ExampleSupportType.EXAMPLE_BOTH,
                                  batch_uri=input_file,
                                  stream_uri=input_file,
                                  data_format="csv")

    example_2 = af.create_example(name="example_2",
                                  support_type=af.ExampleSupportType.EXAMPLE_BOTH,
                                  batch_uri=output_file,
                                  stream_uri=output_file,
                                  data_format="csv")
    flink_config = faf.LocalFlinkJobConfig()
    flink_config.flink_home = "/Users/chenwuchao/soft/apache/flink-1.10.0"
    with af.config(flink_config):
        batch_args_1: Properties = {}
        ddl = """CREATE TABLE input_table (a STRING, b STRING, c STRING) WITH ('connector' = 'filesystem',
                'path' = 'INPUT',
                'format' = 'csv'
                )"""
        table_name = "input_table"
        batch_args_1['ddl'] = ddl
        batch_args_1['table_name'] = table_name

        stream_args_1 = batch_args_1

        batch_args_2: Properties = {}
        ddl = """CREATE TABLE output_table (aa STRING, bb STRING) WITH ('connector' = 'filesystem',
                'path' = 'OUTPUT',
                'format' = 'csv'
                )"""
        table_name = "output_table"
        batch_args_2['ddl'] = ddl
        batch_args_2['table_name'] = table_name
        stream_args_2 = batch_args_2

        input_example = af.read_example(example_info=example_1,
                                        exec_args=ExecuteProperties(
                                            batch_properties=batch_args_1,
                                            stream_properties=stream_args_1)
                                        )
        processed = af.transform(input_data_list=[input_example],
                                 executor=faf.FlinkJavaExecutor(
                                     java_class="com.apache.flink.ai.flow.TestTransformer"))

        af.write_example(input_data=processed,
                         example_info=example_2,
                         exec_args=ExecuteProperties(
                             batch_properties=batch_args_2,
                             stream_properties=stream_args_2)
                         )

    workflow = af.compile_workflow(test_util.get_project_path())
    print(json_utils.dumps(list(workflow.jobs.values())[0]))
def build_workflow():
    with af.global_config_file(project_path +
                               '/resources/workflow_config.yaml'):
        with af.config('job_1'):
            op_1 = af.user_define_operation(
                af.PythonObjectExecutor(PrintHelloExecutor('job_1')))

        with af.config('job_2'):
            op_2 = af.user_define_operation(
                af.PythonObjectExecutor(PrintHelloExecutor('job_2')))

        with af.config('job_3'):
            op_3 = af.user_define_operation(
                af.PythonObjectExecutor(PrintHelloExecutor('job_3')))

    af.stop_before_control_dependency(op_3, op_1)
    af.stop_before_control_dependency(op_3, op_2)
예제 #20
0
def run_flink_spec_job():
    with af.global_config_file(test_util.get_job_config_file()):
        with af.config('vvp_spec_job'):
            faf.vvp_job()
    workflow_id = af.run(test_util.get_project_path(),
                         dag_id='wordcount_vvp_python',
                         scheduler_type=af.SchedulerType.AIFLOW)
    res = af.wait_workflow_execution_finished(workflow_id)
    print(res)
 def test_deploy_airflow(self):
     airflow_path = af.project_config().get_airflow_deploy_path()
     if not os.path.exists(airflow_path):
         os.makedirs(airflow_path)
     with af.config(LocalPythonJobConfig(job_name="simple")):
         op = af.user_define_operation(
             af.PythonObjectExecutor(SimpleExecutor()))
     res = af.run(test_util.get_project_path())
     af.wait_workflow_execution_finished(res)
 def build_and_submit_ai_flow():
     with af.global_config_file(workflow_config_file()):
         with af.config('task_1'):
             cmd_executor = af.user_define_operation(
                 output_num=0,
                 executor=CmdExecutor(
                     cmd_line='echo "hello world"'.format(1)))
         workflow_info = af.workflow_operation.submit_workflow(
             'test_workflow')
     return workflow_info.workflow_name
 def test_k8s_cmd(self):
     print(sys._getframe().f_code.co_name)
     project_path = os.path.dirname(__file__) + '/../'
     job_config = af.KubernetesCMDJobConfig()
     job_config.job_name = 'test_cmd'
     with af.config(job_config):
         cmd_executor = af.user_define_operation(
             output_num=0,
             executor=CmdExecutor(
                 cmd_line="echo 'hello world' && sleep {}".format(1)))
     code_text = af.generate_airflow_file_text(project_path, "hh")
     print(code_text)
예제 #24
0
def build_workflow():
    with af.global_config_file(project_path +
                               '/resources/workflow_config.yaml'):
        with af.config('job_1'):
            op_1 = af.user_define_operation(
                af.PythonObjectExecutor(PrintHelloExecutor('job_1')))

        with af.config('job_2'):
            op_2 = af.user_define_operation(
                af.PythonObjectExecutor(
                    SendEventExecutor(key='key_1',
                                      value='value_1',
                                      num=3,
                                      post_time=20)))

    af.user_define_control_dependency(op_1,
                                      op_2,
                                      event_key='key_1',
                                      event_type='UNDEFINED',
                                      event_value="value_1",
                                      action=TaskAction.RESTART)
예제 #25
0
 def test_periodic_job(self):
     print(sys._getframe().f_code.co_name)
     periodic_config = PeriodicConfig(periodic_type='interval',
                                      args={'seconds': 5})
     job_config = af.BaseJobConfig(platform='local', engine='cmd_line')
     job_config.job_name = 'test_periodic'
     job_config.periodic_config = periodic_config
     with af.config(job_config):
         af.user_define_operation(executor=af.CmdExecutor(
             cmd_line="echo 'hello world!'"))
     workflow_id = af.submit_ai_flow()
     time.sleep(10)
     af.stop_execution_by_id(workflow_id)
예제 #26
0
        def run_task_function(client: NotificationClient):
            with af.global_config_file(workflow_config_file()):
                with af.config('task_2'):
                    executor_1 = af.user_define_operation(
                        af.PythonObjectExecutor(SimpleExecutor()))
                with af.config('task_3'):
                    executor_2 = af.user_define_operation(
                        af.PythonObjectExecutor(SimpleExecutor()))
                af.model_version_control_dependency(
                    src=executor_2,
                    dependency=executor_1,
                    model_name='model_1',
                    model_version_event_type=ModelVersionEventType.
                    MODEL_GENERATED)
                workflow_info = af.workflow_operation.submit_workflow(
                    workflow_name)

            af.workflow_operation.start_new_workflow_execution(workflow_name)
            r_flag = True
            while True:
                with create_session() as session:
                    tes2 = session.query(TaskExecution).filter(
                        TaskExecution.dag_id == 'test_project.test_workflow',
                        TaskExecution.task_id == 'task_2').all()
                    if len(tes2) == 1 and r_flag:
                        af.register_model_version(
                            model='model_1',
                            model_path='/tmp/model/v1',
                            current_stage=af.ModelVersionStage.GENERATED)
                        r_flag = False

                    dag_run = session.query(DagRun).filter(
                        DagRun.dag_id == 'test_project.test_workflow').first()
                    if dag_run is not None and dag_run.state in State.finished:
                        break
                    else:
                        time.sleep(1)
    def test_stream_evaluate_component(self):
        input_example_meta = af.register_example(
            name='batch_train_example',
            support_type=ExampleSupportType.EXAMPLE_BATCH)
        model_meta = af.register_model(model_name='mnist_model',
                                       model_type=ModelType.SAVED_MODEL)
        stream_evaluate_example_meta = af.register_example(
            name='stream_evaluate_example',
            support_type=ExampleSupportType.EXAMPLE_STREAM)
        stream_output_file = get_file_dir(__file__) + '/stream_evaluate'
        evaluate_output = af.register_artifact(name='stream_evaluate',
                                               stream_uri=stream_output_file)
        stream_evaluate_result_example_meta = af.register_example(
            name='stream_evaluate_result_example',
            support_type=ExampleSupportType.EXAMPLE_STREAM,
            stream_uri=stream_output_file)
        if os.path.exists(stream_output_file):
            os.remove(stream_output_file)
        with af.config(
                af.BaseJobConfig(platform='local',
                                 engine='python',
                                 job_name='stream_evaluate')):
            input_example = af.read_example(
                example_info=input_example_meta,
                executor=PythonObjectExecutor(
                    python_object=ReadBatchExample()))

            batch_train = af.train(input_data_list=[input_example],
                                   executor=PythonObjectExecutor(
                                       python_object=TrainBatchMnistModel()),
                                   model_info=model_meta)
            stream_evaluate_example = af.read_example(
                example_info=stream_evaluate_example_meta,
                executor=PythonObjectExecutor(
                    python_object=ReadStreamExample()))
            stream_evaluate = af.evaluate(
                input_data_list=[stream_evaluate_example],
                model_info=model_meta,
                executor=PythonObjectExecutor(
                    python_object=EvaluateStreamMnistModel()),
                output_num=1)
            af.write_example(input_data=stream_evaluate,
                             example_info=stream_evaluate_result_example_meta,
                             executor=PythonObjectExecutor(
                                 python_object=WriteStreamExample()))
        af.stop_before_control_dependency(stream_evaluate, batch_train)
        workflow_id = af.run(test_util.get_project_path())
        res = af.wait_workflow_execution_finished(workflow_id)
        self.assertEqual(0, res)
 def run_task_function(client: NotificationClient):
     with af.global_config_file(workflow_config_file()):
         with af.config('task_1'):
             cmd_executor = af.user_define_operation(
                 output_num=0,
                 executor=CmdExecutor(
                     cmd_line='echo "hello world" && sleep 30'.format(
                         1)))
         workflow_info = af.workflow_operation.submit_workflow(
             workflow_name)
         self.assertFalse(is_paused())
         af.workflow_operation.pause_workflow_scheduling(workflow_name)
         self.assertTrue(is_paused())
         af.workflow_operation.resume_workflow_scheduling(workflow_name)
         self.assertFalse(is_paused())
예제 #29
0
 def test_context(self):
     global_config = af.BaseJobConfig(platform='a',
                                      engine='b',
                                      properties={'c': 'c'})
     job_config = af.BaseJobConfig(platform='aa',
                                   engine='bb',
                                   properties={'cc': 'cc'})
     with af.global_config(global_config):
         with af.config(job_config):
             af.user_define_operation(executor=None)
     node_list = list(_default_ai_graph.nodes.values())
     self.assertEqual('bb', node_list[0].properties[ENGINE_NAME])
     self.assertEqual('cc', node_list[0].config.properties["cc"])
     self.assertEqual('c', node_list[0].config.properties["c"])
     self.assertEqual('bb', node_list[0].config.engine)
     self.assertEqual('aa', node_list[0].config.platform)
예제 #30
0
    def test_stream_with_external_trigger_with_model_control(self):
        print(sys._getframe().f_code.co_name)
        model_name = 'test_create_model_version'
        model_desc = 'test create model version'
        response = af.register_model(model_name=model_name,
                                     model_type=af.ModelType.CHECKPOINT,
                                     model_desc=model_desc)

        trigger = af.external_trigger(name='stream_trigger')
        job_config = af.BaseJobConfig('local', 'cmd_line')
        job_config.job_name = 'test_cmd'
        with af.config(job_config):
            cmd_executor = af.user_define_operation(
                output_num=0,
                executor=CmdExecutor(
                    cmd_line="echo 'hello world' && sleep {}".format(1)))
        af.model_version_control_dependency(
            src=cmd_executor,
            dependency=trigger,
            model_name=model_name,
            model_version_event_type='MODEL_DEPLOYED')
        workflow_id = af.submit_ai_flow()

        model_path1 = 'fs://source1.pkl'
        model_metric1 = 'http://metric1'
        model_flavor1 = '{"flavor.version":1}'
        version_desc1 = 'test create model version1'
        time.sleep(1)
        response = af.register_model_version(
            model=model_name,
            model_path=model_path1,
            model_metric=model_metric1,
            model_flavor=model_flavor1,
            version_desc=version_desc1,
            current_stage=af.ModelVersionStage.DEPLOYED)
        time.sleep(5)
        response = af.register_model_version(
            model=model_name,
            model_path=model_path1,
            model_metric=model_metric1,
            model_flavor=model_flavor1,
            version_desc=version_desc1,
            current_stage=af.ModelVersionStage.DEPLOYED)
        time.sleep(10)
        af.stop_execution_by_id(workflow_id)
        res = af.get_ai_flow_client().list_job(5, 0)
        self.assertEqual(3, len(res))