def run_task_function(client: NotificationClient): with af.global_config_file(workflow_config_file()): with af.config('task_2'): executor_1 = af.user_define_operation( af.PythonObjectExecutor(SimpleExecutor())) with af.config('task_5'): executor_2 = af.user_define_operation( af.PythonObjectExecutor(SimpleExecutor())) af.user_define_control_dependency(src=executor_2, dependency=executor_1, namespace='test', event_key='key_1', event_value='value_1', sender='*') workflow_info = af.workflow_operation.submit_workflow( workflow_name) af.workflow_operation.start_new_workflow_execution(workflow_name) flag = True while True: with create_session() as session: tes = session.query(TaskExecution).filter( TaskExecution.dag_id == 'test_project.test_workflow', TaskExecution.task_id == 'task_2').all() if 1 == len(tes) and flag: client.send_event( BaseEvent(key='key_1', value='value_1')) flag = False dag_run = session.query(DagRun).filter( DagRun.dag_id == 'test_project.test_workflow').first() if dag_run is not None and dag_run.state in State.finished: break else: time.sleep(1)
def run_task_function(client: NotificationClient): with af.global_config_file(workflow_config_file()): with af.config('task_1'): cmd_executor = af.user_define_operation( output_num=0, executor=CmdExecutor( cmd_line='echo "hello world" && sleep 30'.format( 1))) workflow_info = af.workflow_operation.submit_workflow( workflow_name) we = af.workflow_operation.start_new_workflow_execution( workflow_name) while True: with create_session() as session: ti = session.query(TaskInstance) \ .filter(TaskInstance.dag_id == 'test_project.test_workflow').first() if ti is not None and ti.state == State.RUNNING: af.workflow_operation.stop_job('task_1', we.execution_id) elif ti.state == State.KILLED: break else: time.sleep(1) job_info = af.workflow_operation.get_job('task_1', we.execution_id) self.assertEqual('task_1', job_info.job_name) self.assertEqual( project_name, job_info.workflow_execution.workflow_info.namespace) self.assertEqual( workflow_name, job_info.workflow_execution.workflow_info.workflow_name) job_info_list = af.workflow_operation.list_jobs(we.execution_id) self.assertEqual(1, len(job_info_list))
def run_task_function(client: NotificationClient): with af.global_config_file(workflow_config_file()): with af.config('task_2'): executor_1 = af.user_define_operation( af.PythonObjectExecutor( SendExecutor(sender='task_2', key='key_1', value='value_1', event_type='UNDEFINED', port=server_port()))) with af.config('task_5'): executor_2 = af.user_define_operation( af.PythonObjectExecutor(SimpleExecutor())) af.user_define_control_dependency(src=executor_2, dependency=executor_1, event_key='key_1', event_value='value_1') workflow_info = af.workflow_operation.submit_workflow( workflow_name) af.workflow_operation.start_new_workflow_execution(workflow_name) while True: with create_session() as session: dag_run = session.query(DagRun).filter( DagRun.dag_id == 'test_project.test_workflow').first() if dag_run is not None and dag_run.state in State.finished: break else: time.sleep(1)
def run_task_function(client: NotificationClient): with af.global_config_file(workflow_config_file()): with af.config(task_config): input_example = af.read_example( example_info=example_1, executor=faf.flink_executor.FlinkPythonExecutor( python_object=Source())) processed = af.transform( input_data_list=[input_example], executor=faf.flink_executor.FlinkPythonExecutor( python_object=Transformer())) af.write_example( input_data=processed, example_info=example_2, executor=faf.flink_executor.FlinkPythonExecutor( python_object=Sink())) workflow_info = af.workflow_operation.submit_workflow( workflow_name) af.workflow_operation.start_new_workflow_execution(workflow_name) while True: with create_session() as session: dag_run = session.query(DagRun).filter( DagRun.dag_id == 'test_project.test_workflow').first() if dag_run is not None and dag_run.state in State.finished: break else: time.sleep(1)
def run_task_function(client: NotificationClient): with af.global_config_file(workflow_config_file()): with af.config('task_1'): cmd_executor = af.user_define_operation( output_num=0, executor=CmdExecutor( cmd_line='echo "hello world" && sleep 30'.format( 1))) workflow_info = af.workflow_operation.submit_workflow( workflow_name) self.assertFalse(is_paused()) af.workflow_operation.pause_workflow_scheduling(workflow_name) self.assertTrue(is_paused()) af.workflow_operation.resume_workflow_scheduling(workflow_name) self.assertFalse(is_paused())
def run_task_function(client: NotificationClient): with af.global_config_file(workflow_config_file()): with af.config('task_2'): executor = af.user_define_operation( af.PythonObjectExecutor(SimpleExecutor())) workflow_info = af.workflow_operation.submit_workflow( workflow_name) af.workflow_operation.start_new_workflow_execution(workflow_name) while True: with create_session() as session: dag_run = session.query(DagRun).filter( DagRun.dag_id == 'test_project.test_workflow').first() if dag_run is not None and dag_run.state in State.finished: break else: time.sleep(1)
def run_task_function(client: NotificationClient): with af.global_config_file(workflow_config_file()): with af.config('task_1'): cmd_executor = af.user_define_operation( output_num=0, executor=CmdExecutor( cmd_line='echo "hello world"'.format(1))) workflow_info = af.workflow_operation.submit_workflow( workflow_name) af.workflow_operation.start_new_workflow_execution(workflow_name) while True: with create_session() as session: dag_run = session.query(DagRun)\ .filter(DagRun.dag_id == 'test_project.{}'.format(workflow_name)).first() if dag_run is not None and dag_run.state == State.SUCCESS: break else: time.sleep(1)
def run_task_function(client: NotificationClient): with af.global_config_file(workflow_config_file()): with af.config('task_1'): cmd_executor = af.user_define_operation( output_num=0, executor=CmdExecutor( cmd_line='echo "hello world"'.format(1))) workflow_info = af.workflow_operation.submit_workflow( workflow_name) workflow_info = af.workflow_operation.get_workflow(workflow_name) self.assertEqual(workflow_name, workflow_info.workflow_name) self.assertEqual(project_name, workflow_info.namespace) workflow_info_list = af.workflow_operation.list_workflows() self.assertEqual(1, len(workflow_info_list)) workflow_info = af.workflow_operation.delete_workflow( workflow_name) with self.assertRaises(Exception) as context: workflow_info = af.workflow_operation.get_workflow( workflow_name)
def run_task_function(client: NotificationClient): with af.global_config_file(workflow_config_file()): with af.config('task_2'): executor_1 = af.user_define_operation( af.PythonObjectExecutor(SimpleExecutor())) with af.config('task_3'): executor_2 = af.user_define_operation( af.PythonObjectExecutor(SimpleExecutor())) af.model_version_control_dependency( src=executor_2, dependency=executor_1, model_name='model_1', model_version_event_type=ModelVersionEventType. MODEL_GENERATED) workflow_info = af.workflow_operation.submit_workflow( workflow_name) af.workflow_operation.start_new_workflow_execution(workflow_name) r_flag = True while True: with create_session() as session: tes2 = session.query(TaskExecution).filter( TaskExecution.dag_id == 'test_project.test_workflow', TaskExecution.task_id == 'task_2').all() if len(tes2) == 1 and r_flag: af.register_model_version( model='model_1', model_path='/tmp/model/v1', current_stage=af.ModelVersionStage.GENERATED) r_flag = False dag_run = session.query(DagRun).filter( DagRun.dag_id == 'test_project.test_workflow').first() if dag_run is not None and dag_run.state in State.finished: break else: time.sleep(1)