コード例 #1
0
    def test_cluster_flink_java_task(self):
        flink_home = os.environ.get('FLINK_HOME')
        word_count_jar = os.path.join(flink_home, 'examples', 'batch',
                                      'WordCount.jar')
        output_file = os.path.join(flink_home, 'log', 'output')
        if os.path.exists(output_file):
            os.remove(output_file)
        jar_dir = os.path.join(project_path, 'dependencies', 'jar')
        if not os.path.exists(jar_dir):
            os.makedirs(jar_dir)
            shutil.copy(word_count_jar, jar_dir)

        args = [
            '--input',
            os.path.join(flink_home, 'conf', 'flink-conf.yaml'), '--output',
            output_file
        ]
        with af.job_config('task_2'):
            af.user_define_operation(processor=flink.FlinkJavaProcessor(
                entry_class=None, main_jar_file='WordCount.jar', args=args))
        w = af.workflow_operation.submit_workflow(
            workflow_name=af.current_workflow_config().workflow_name)
        je = af.workflow_operation.start_job_execution(job_name='task_2',
                                                       execution_id='1')
        je = af.workflow_operation.get_job_execution(job_name='task_2',
                                                     execution_id='1')
        self.assertEqual(Status.FINISHED, je.status)
        dep_dir = os.path.join(project_path, 'dependencies')
        if os.path.exists(dep_dir):
            shutil.rmtree(dep_dir)
コード例 #2
0
 def test_one_task(self):
     with af.job_config('task_1'):
         af.user_define_operation(processor=None)
     w = af.workflow_operation.submit_workflow(
         workflow_name='test_dag_generator')
     code = w.properties.get('code')
     self.assertTrue('op_0 = AIFlowOperator' in code)
コード例 #3
0
        def run_task_function(client: NotificationClient):
            with af.global_config_file(workflow_config_file()):
                with af.config('task_2'):
                    executor_1 = af.user_define_operation(
                        af.PythonObjectExecutor(
                            SendExecutor(sender='task_2',
                                         key='key_1',
                                         value='value_1',
                                         event_type='UNDEFINED',
                                         port=server_port())))
                with af.config('task_5'):
                    executor_2 = af.user_define_operation(
                        af.PythonObjectExecutor(SimpleExecutor()))
                af.user_define_control_dependency(src=executor_2,
                                                  dependency=executor_1,
                                                  event_key='key_1',
                                                  event_value='value_1')
                workflow_info = af.workflow_operation.submit_workflow(
                    workflow_name)

            af.workflow_operation.start_new_workflow_execution(workflow_name)
            while True:
                with create_session() as session:

                    dag_run = session.query(DagRun).filter(
                        DagRun.dag_id == 'test_project.test_workflow').first()
                    if dag_run is not None and dag_run.state in State.finished:
                        break
                    else:
                        time.sleep(1)
コード例 #4
0
def build_workflow():
    with af.global_config_file(project_path +
                               '/resources/workflow_config.yaml'):
        with af.config('job_1'):
            op_1 = af.user_define_operation(
                af.PythonObjectExecutor(StreamPrintHelloExecutor('job_1')))

        with af.config('job_2'):
            op_2 = af.user_define_operation(
                af.PythonObjectExecutor(
                    SendEventExecutor(key='key_1', value='value_1')))

        with af.config('job_3'):
            op_3 = af.user_define_operation(
                af.PythonObjectExecutor(
                    SendEventExecutor(key='key_2', value='value_2')))

    af.user_define_control_dependency(op_1,
                                      op_2,
                                      event_key='key_1',
                                      event_type='UNDEFINED',
                                      event_value="value_1")

    af.user_define_control_dependency(op_1,
                                      op_3,
                                      event_key='key_2',
                                      event_type='UNDEFINED',
                                      event_value="value_2")
コード例 #5
0
        def run_task_function(client: NotificationClient):
            with af.global_config_file(workflow_config_file()):
                with af.config('task_2'):
                    executor_1 = af.user_define_operation(
                        af.PythonObjectExecutor(SimpleExecutor()))
                with af.config('task_5'):
                    executor_2 = af.user_define_operation(
                        af.PythonObjectExecutor(SimpleExecutor()))
                af.user_define_control_dependency(src=executor_2,
                                                  dependency=executor_1,
                                                  namespace='test',
                                                  event_key='key_1',
                                                  event_value='value_1',
                                                  sender='*')
                workflow_info = af.workflow_operation.submit_workflow(
                    workflow_name)

            af.workflow_operation.start_new_workflow_execution(workflow_name)
            flag = True
            while True:
                with create_session() as session:
                    tes = session.query(TaskExecution).filter(
                        TaskExecution.dag_id == 'test_project.test_workflow',
                        TaskExecution.task_id == 'task_2').all()
                    if 1 == len(tes) and flag:
                        client.send_event(
                            BaseEvent(key='key_1', value='value_1'))
                        flag = False
                    dag_run = session.query(DagRun).filter(
                        DagRun.dag_id == 'test_project.test_workflow').first()
                    if dag_run is not None and dag_run.state in State.finished:
                        break
                    else:
                        time.sleep(1)
コード例 #6
0
def build_workflow():
    with af.global_config_file(project_path + '/resources/workflow_config.yaml'):
        with af.config('job_1'):
            af.user_define_operation(af.PythonObjectExecutor(StreamPrintEventExecutor('job_1')))

        with af.config('job_2'):
            af.user_define_operation(af.PythonObjectExecutor(
                SendEventExecutor(key='key_1', value='value_1', num=5, post_time=5)))
コード例 #7
0
 def test_bash_task(self):
     with af.job_config('task_1'):
         af.user_define_operation(processor=bash.BashProcessor(
             bash_command='echo "Xiao ming hello world!"'))
     w = af.workflow_operation.submit_workflow(workflow_name='test_bash')
     je = af.workflow_operation.start_job_execution(job_name='task_1',
                                                    execution_id='1')
     je = af.workflow_operation.get_job_execution(job_name='task_1',
                                                  execution_id='1')
     self.assertEqual(Status.FINISHED, je.status)
コード例 #8
0
 def test_python_task(self):
     with af.job_config('task_1'):
         af.user_define_operation(processor=PyProcessor1())
     w = af.workflow_operation.submit_workflow(
         workflow_name=af.current_workflow_config().workflow_name)
     je = af.workflow_operation.start_job_execution(job_name='task_1',
                                                    execution_id='1')
     je = af.workflow_operation.get_job_execution(job_name='task_1',
                                                  execution_id='1')
     self.assertEqual(Status.FINISHED, je.status)
コード例 #9
0
def build_workflow(workflow_config_path):
    with ai_flow.global_config_file(workflow_config_path):
        with ai_flow.config('job_1'):
            op_1 = ai_flow.user_define_operation(
                ai_flow.PythonObjectExecutor(PrintHelloExecutor('job_1')))

        with ai_flow.config('job_2'):
            op_2 = ai_flow.user_define_operation(
                ai_flow.PythonObjectExecutor(PrintHelloExecutor('job_2')))

        ai_flow.stop_before_control_dependency(op_2, op_1)
コード例 #10
0
def main():
    af.init_ai_flow_context()
    with af.job_config('task_1'):
        af.user_define_operation(BashProcessor("echo hello"))
    with af.job_config('task_2'):
        af.user_define_operation(BashProcessor("echo hello"))

    af.action_on_job_status('task_2', 'task_1')

    workflow_name = af.current_workflow_config().workflow_name
    stop_workflow_executions(workflow_name)
    af.workflow_operation.submit_workflow(workflow_name)
    af.workflow_operation.start_new_workflow_execution(workflow_name)
コード例 #11
0
 def test_cluster_flink_task(self):
     with af.job_config('task_2'):
         input_example = af.user_define_operation(processor=Source())
         processed = af.transform(input=[input_example],
                                  transform_processor=Transformer())
         af.user_define_operation(input=[processed], processor=Sink())
     w = af.workflow_operation.submit_workflow(
         workflow_name=af.current_workflow_config().workflow_name)
     je = af.workflow_operation.start_job_execution(job_name='task_2',
                                                    execution_id='1')
     je = af.workflow_operation.get_job_execution(job_name='task_2',
                                                  execution_id='1')
     self.assertEqual(Status.FINISHED, je.status)
コード例 #12
0
 def test_periodic_job(self):
     print(sys._getframe().f_code.co_name)
     periodic_config = PeriodicConfig(periodic_type='interval',
                                      args={'seconds': 5})
     job_config = af.BaseJobConfig(platform='local', engine='cmd_line')
     job_config.job_name = 'test_periodic'
     job_config.periodic_config = periodic_config
     with af.config(job_config):
         af.user_define_operation(executor=af.CmdExecutor(
             cmd_line="echo 'hello world!'"))
     workflow_id = af.submit_ai_flow()
     time.sleep(10)
     af.stop_execution_by_id(workflow_id)
コード例 #13
0
 def test_stop_bash_task(self):
     time.sleep(1)
     with af.job_config('task_1'):
         af.user_define_operation(processor=bash.BashProcessor(
             bash_command='sleep 10'))
     w = af.workflow_operation.submit_workflow(workflow_name='test_bash')
     je = af.workflow_operation.start_job_execution(job_name='task_1',
                                                    execution_id='1')
     af.workflow_operation.stop_job_execution(job_name='task_1',
                                              execution_id='1')
     je = af.workflow_operation.get_job_execution(job_name='task_1',
                                                  execution_id='1')
     self.assertEqual(Status.FAILED, je.status)
     self.assertTrue('err' in je.properties)
コード例 #14
0
 def test_stop_python_task(self):
     time.sleep(1)
     with af.job_config('task_1'):
         af.user_define_operation(processor=PyProcessor2())
     w = af.workflow_operation.submit_workflow(workflow_name='test_python')
     je = af.workflow_operation.start_job_execution(job_name='task_1',
                                                    execution_id='1')
     time.sleep(2)
     af.workflow_operation.stop_job_execution(job_name='task_1',
                                              execution_id='1')
     je = af.workflow_operation.get_job_execution(job_name='task_1',
                                                  execution_id='1')
     self.assertEqual(Status.FAILED, je.status)
     self.assertTrue('err' in je.properties)
コード例 #15
0
 def test_periodic_interval_workflow(self):
     workflow_config_ = af.current_workflow_config()
     workflow_config_.periodic_config = PeriodicConfig(trigger_config={
         'start_date': "2020,1,1,,,,Asia/Chongqing",
         'interval': "1,1,1,"
     })
     with af.job_config('task_1'):
         af.user_define_operation(processor=None)
     w = af.workflow_operation.submit_workflow(
         workflow_name='test_dag_generator')
     code = w.properties.get('code')
     self.assertTrue('op_0 = AIFlowOperator' in code)
     self.assertTrue('datetime' in code)
     self.assertTrue('schedule_interval' in code)
     self.assertTrue('timedelta' in code)
コード例 #16
0
 def test_user_define_control_dependency(self):
     print(sys._getframe().f_code.co_name)
     trigger = af.external_trigger(name='stream_trigger')
     job_config = af.BaseJobConfig('local', 'cmd_line')
     job_config.job_name = 'test_cmd'
     with af.config(job_config):
         cmd_executor = af.user_define_operation(
             output_num=0,
             executor=CmdExecutor(
                 cmd_line="echo 'hello world' && sleep {}".format(1)))
     af.user_define_control_dependency(
         src=cmd_executor,
         dependency=trigger,
         event_key='key',
         event_value='value',
         event_type='name',
         condition=MetCondition.NECESSARY,
         action=TaskAction.START,
         life=EventLife.ONCE,
         value_condition=MetValueCondition.UPDATE)
     workflow_id = af.submit_ai_flow()
     af.get_ai_flow_client().publish_event('key', 'value1', 'name')
     time.sleep(5)
     af.get_ai_flow_client().publish_event('key', 'value2', 'name')
     time.sleep(10)
     af.stop_execution_by_id(workflow_id)
     res = af.get_ai_flow_client().list_job(5, 0)
     self.assertEqual(3, len(res))
コード例 #17
0
        def run_task_function(client: NotificationClient):
            with af.global_config_file(workflow_config_file()):
                with af.config('task_1'):
                    cmd_executor = af.user_define_operation(
                        output_num=0,
                        executor=CmdExecutor(
                            cmd_line='echo "hello world" && sleep 30'.format(
                                1)))
                workflow_info = af.workflow_operation.submit_workflow(
                    workflow_name)

            we = af.workflow_operation.start_new_workflow_execution(
                workflow_name)
            while True:
                with create_session() as session:
                    ti = session.query(TaskInstance) \
                        .filter(TaskInstance.dag_id == 'test_project.test_workflow').first()
                    if ti is not None and ti.state == State.RUNNING:
                        af.workflow_operation.stop_job('task_1',
                                                       we.execution_id)
                    elif ti.state == State.KILLED:
                        break
                    else:
                        time.sleep(1)
            job_info = af.workflow_operation.get_job('task_1', we.execution_id)
            self.assertEqual('task_1', job_info.job_name)
            self.assertEqual(
                project_name,
                job_info.workflow_execution.workflow_info.namespace)
            self.assertEqual(
                workflow_name,
                job_info.workflow_execution.workflow_info.workflow_name)
            job_info_list = af.workflow_operation.list_jobs(we.execution_id)
            self.assertEqual(1, len(job_info_list))
コード例 #18
0
 def test_context(self):
     global_config = af.BaseJobConfig(platform='a',
                                      engine='b',
                                      properties={'c': 'c'})
     job_config = af.BaseJobConfig(platform='aa',
                                   engine='bb',
                                   properties={'cc': 'cc'})
     with af.global_config(global_config):
         with af.config(job_config):
             af.user_define_operation(executor=None)
     node_list = list(_default_ai_graph.nodes.values())
     self.assertEqual('bb', node_list[0].properties[ENGINE_NAME])
     self.assertEqual('cc', node_list[0].config.properties["cc"])
     self.assertEqual('c', node_list[0].config.properties["c"])
     self.assertEqual('bb', node_list[0].config.engine)
     self.assertEqual('aa', node_list[0].config.platform)
コード例 #19
0
 def test_stop_local_flink_task(self):
     with af.job_config('task_1'):
         input_example = af.user_define_operation(processor=Source())
         processed = af.transform(input=[input_example],
                                  transform_processor=Transformer2())
         af.user_define_operation(input=[processed], processor=Sink())
     w = af.workflow_operation.submit_workflow(workflow_name='test_python')
     je = af.workflow_operation.start_job_execution(job_name='task_1',
                                                    execution_id='1')
     time.sleep(2)
     af.workflow_operation.stop_job_execution(job_name='task_1',
                                              execution_id='1')
     je = af.workflow_operation.get_job_execution(job_name='task_1',
                                                  execution_id='1')
     self.assertEqual(Status.FAILED, je.status)
     self.assertTrue('err' in je.properties)
コード例 #20
0
def build_workflow():
    with af.global_config_file(project_path +
                               '/resources/workflow_config.yaml'):
        with af.config('job_1'):
            op_1 = af.user_define_operation(
                af.PythonObjectExecutor(PrintHelloExecutor('job_1')))

        with af.config('job_2'):
            op_2 = af.user_define_operation(
                af.PythonObjectExecutor(PrintHelloExecutor('job_2')))

        with af.config('job_3'):
            op_3 = af.user_define_operation(
                af.PythonObjectExecutor(PrintHelloExecutor('job_3')))

    af.stop_before_control_dependency(op_3, op_1)
    af.stop_before_control_dependency(op_3, op_2)
コード例 #21
0
 def run_workflow(client: NotificationClient):
     with af.job_config('task_1'):
         af.user_define_operation(processor=PyProcessor1())
     w = af.workflow_operation.submit_workflow(
         workflow_name=af.current_workflow_config().workflow_name)
     wei = af.workflow_operation.start_new_workflow_execution(
         workflow_name=af.current_workflow_config().workflow_name)
     set_workflow_execution_info(wei)
     while True:
         with create_session() as session:
             dag_run = session.query(DagRun) \
                 .filter(DagRun.dag_id == 'test_project.{}'
                         .format(af.current_workflow_config().workflow_name)).first()
             if dag_run is not None and dag_run.state == State.SUCCESS:
                 break
             else:
                 time.sleep(1)
コード例 #22
0
 def test_two_task(self):
     with af.job_config('task_1'):
         af.user_define_operation(processor=None)
     with af.job_config('task_2'):
         af.user_define_operation(processor=None)
     af.action_on_event(job_name='task_2',
                        event_key='a',
                        event_type='a',
                        event_value='a',
                        sender='task_1')
     w = af.workflow_operation.submit_workflow(
         workflow_name='test_dag_generator')
     code = w.properties.get('code')
     self.assertTrue(
         "op_1.subscribe_event('a', 'a', 'default', 'task_1')" in code)
     self.assertTrue(
         "op_1.set_events_handler(AIFlowHandler(configs_op_1))" in code)
コード例 #23
0
 def test_action_on_job_status_two_status(self):
     with af.job_config('task_1'):
         af.user_define_operation(processor=None)
     with af.job_config('task_2'):
         af.user_define_operation(processor=None)
     af.action_on_job_status(job_name='task_2',
                             upstream_job_name='task_1',
                             upstream_job_status=Status.RUNNING,
                             action=JobAction.START)
     af.action_on_job_status(job_name='task_2',
                             upstream_job_name='task_1',
                             upstream_job_status=Status.FINISHED,
                             action=JobAction.STOP)
     w = af.workflow_operation.submit_workflow(
         workflow_name='test_dag_generator')
     code = w.properties.get('code')
     self.assertTrue('"event_value": "RUNNING"' in code)
     self.assertTrue('"event_value": "FINISHED"' in code)
コード例 #24
0
        def run_workflow(client: NotificationClient):
            with af.job_config(task_name):
                af.user_define_operation(processor=bash.BashProcessor(bash_command='echo "Xiao ming hello world!"'))

            workflow_info = af.workflow_operation.submit_workflow(
                workflow_name=af.current_workflow_config().workflow_name)
            workflow_execution = af.workflow_operation.start_new_workflow_execution(
                workflow_name=af.current_workflow_config().workflow_name)
            while True:
                with create_session() as session:
                    tes = session.query(TaskExecution)\
                        .filter(TaskExecution.dag_id == 'test_project.{}'
                                .format(af.current_workflow_config().workflow_name),
                                TaskExecution.task_id == task_name).all()
                    if len(tes) == 2:
                        break
                    else:
                        time.sleep(1)
コード例 #25
0
 def test_deploy_airflow(self):
     airflow_path = af.project_config().get_airflow_deploy_path()
     if not os.path.exists(airflow_path):
         os.makedirs(airflow_path)
     with af.config(LocalPythonJobConfig(job_name="simple")):
         op = af.user_define_operation(
             af.PythonObjectExecutor(SimpleExecutor()))
     res = af.run(test_util.get_project_path())
     af.wait_workflow_execution_finished(res)
コード例 #26
0
 def build_and_submit_ai_flow():
     with af.global_config_file(workflow_config_file()):
         with af.config('task_1'):
             cmd_executor = af.user_define_operation(
                 output_num=0,
                 executor=CmdExecutor(
                     cmd_line='echo "hello world"'.format(1)))
         workflow_info = af.workflow_operation.submit_workflow(
             'test_workflow')
     return workflow_info.workflow_name
コード例 #27
0
 def build_ai_graph(sleep_time: int):
     with af.engine('cmd_line'):
         p_list = []
         for i in range(3):
             p = af.user_define_operation(executor=CmdExecutor(
                 cmd_line="echo 'hello_{}' && sleep {}".format(
                     i, sleep_time)))
             p_list.append(p)
         af.stop_before_control_dependency(p_list[0], p_list[1])
         af.stop_before_control_dependency(p_list[0], p_list[2])
コード例 #28
0
        def build_ai_graph() -> AIGraph:
            with af.engine('cmd_line'):
                p_list = []
                for i in range(3):
                    p = af.user_define_operation(
                        executor=CmdExecutor(cmd_line="echo 'hello_{}' && sleep 3".format(i)))
                    p_list.append(p)
                af.stop_before_control_dependency(p_list[0], p_list[1])
                af.stop_before_control_dependency(p_list[0], p_list[2])

            return af.default_graph()
コード例 #29
0
 def test_k8s_cmd(self):
     print(sys._getframe().f_code.co_name)
     project_path = os.path.dirname(__file__) + '/../'
     job_config = af.KubernetesCMDJobConfig()
     job_config.job_name = 'test_cmd'
     with af.config(job_config):
         cmd_executor = af.user_define_operation(
             output_num=0,
             executor=CmdExecutor(
                 cmd_line="echo 'hello world' && sleep {}".format(1)))
     code_text = af.generate_airflow_file_text(project_path, "hh")
     print(code_text)
コード例 #30
0
 def run_workflow(client: NotificationClient):
     with af.job_config('task_1'):
         af.user_define_operation(processor=bash.BashProcessor(bash_command='echo "Xiao ming hello world!"'))
     with af.job_config('task_2'):
         af.user_define_operation(processor=bash.BashProcessor(bash_command='echo "Xiao li hello world!"'))
     af.action_on_job_status('task_2', 'task_1', Status.FINISHED, JobAction.START)
     workflow_info = af.workflow_operation.submit_workflow(
         workflow_name=af.current_workflow_config().workflow_name)
     workflow_execution = af.workflow_operation.start_new_workflow_execution(
         workflow_name=af.current_workflow_config().workflow_name)
     while True:
         with create_session() as session:
             ti = session.query(TaskInstance)\
                 .filter(TaskInstance.dag_id ==
                         'test_project.{}'.format(af.current_workflow_config().workflow_name),
                         TaskInstance.task_id == 'task_2')\
                 .first()
             if ti is not None and ti.state == State.SUCCESS:
                 break
             else:
                 time.sleep(1)