Esempio n. 1
0
 def test_three_task(self):
     with af.job_config('task_1'):
         af.user_define_operation(processor=None)
     with af.job_config('task_2'):
         af.user_define_operation(processor=None)
     with af.job_config('task_3'):
         af.user_define_operation(processor=None)
     af.action_on_event(job_name='task_3',
                        event_key='a',
                        event_type='a',
                        event_value='a',
                        sender='task_1')
     af.action_on_job_status(job_name='task_3',
                             upstream_job_name='task_2',
                             upstream_job_status=Status.FINISHED,
                             action=JobAction.START)
     w = af.workflow_operation.submit_workflow(
         workflow_name='test_dag_generator')
     code = w.properties.get('code')
     self.assertTrue(
         ".subscribe_event('a', 'a', 'default', 'task_1')" in code)
     # Now do not support the event_type equals JOB_STATUS_CHANGED event.
     # self.assertTrue(".subscribe_event('test_dag_generator', 'JOB_STATUS_CHANGED', 'test_project', 'task_2')" in code)
     self.assertTrue(
         ".set_events_handler(AIFlowHandler(configs_op_" in code)
def main():
    af.init_ai_flow_context()
    with af.job_config('task_1'):
        af.user_define_operation(BashProcessor("echo hello"))
    with af.job_config('task_2'):
        af.user_define_operation(BashProcessor("echo hello"))

    af.action_on_job_status('task_2', 'task_1')

    workflow_name = af.current_workflow_config().workflow_name
    stop_workflow_executions(workflow_name)
    af.workflow_operation.submit_workflow(workflow_name)
    af.workflow_operation.start_new_workflow_execution(workflow_name)
def main():
    af.init_ai_flow_context()
    with af.job_config('task_1'):
        af.user_define_operation(BashProcessor("sleep 30"))
    with af.job_config('task_2'):
        af.user_define_operation(BashProcessor("sleep 60"))
    with af.job_config('task_3'):
        af.user_define_operation(BashProcessor("echo hello"))

    af.action_on_job_status('task_2',
                            'task_1',
                            upstream_job_status=Status.RUNNING,
                            action=JobAction.START)
    af.action_on_job_status('task_2',
                            'task_1',
                            upstream_job_status=Status.FINISHED,
                            action=JobAction.STOP)

    af.action_on_job_status('task_3',
                            'task_1',
                            upstream_job_status=Status.RUNNING,
                            action=JobAction.START)
    af.action_on_job_status('task_3',
                            'task_2',
                            upstream_job_status=Status.KILLED,
                            action=JobAction.RESTART)

    workflow_name = af.current_workflow_config().workflow_name
    stop_workflow_executions(workflow_name)
    af.workflow_operation.submit_workflow(workflow_name)
    af.workflow_operation.start_new_workflow_execution(workflow_name)
Esempio n. 4
0
 def test_action_on_job_status_two_status(self):
     with af.job_config('task_1'):
         af.user_define_operation(processor=None)
     with af.job_config('task_2'):
         af.user_define_operation(processor=None)
     af.action_on_job_status(job_name='task_2',
                             upstream_job_name='task_1',
                             upstream_job_status=Status.RUNNING,
                             action=JobAction.START)
     af.action_on_job_status(job_name='task_2',
                             upstream_job_name='task_1',
                             upstream_job_status=Status.FINISHED,
                             action=JobAction.STOP)
     w = af.workflow_operation.submit_workflow(
         workflow_name='test_dag_generator')
     code = w.properties.get('code')
     self.assertTrue('"event_value": "RUNNING"' in code)
     self.assertTrue('"event_value": "FINISHED"' in code)
Esempio n. 5
0
 def run_workflow(client: NotificationClient):
     with af.job_config('task_1'):
         af.user_define_operation(processor=bash.BashProcessor(bash_command='echo "Xiao ming hello world!"'))
     with af.job_config('task_2'):
         af.user_define_operation(processor=bash.BashProcessor(bash_command='echo "Xiao li hello world!"'))
     af.action_on_job_status('task_2', 'task_1', Status.FINISHED, JobAction.START)
     workflow_info = af.workflow_operation.submit_workflow(
         workflow_name=af.current_workflow_config().workflow_name)
     workflow_execution = af.workflow_operation.start_new_workflow_execution(
         workflow_name=af.current_workflow_config().workflow_name)
     while True:
         with create_session() as session:
             ti = session.query(TaskInstance)\
                 .filter(TaskInstance.dag_id ==
                         'test_project.{}'.format(af.current_workflow_config().workflow_name),
                         TaskInstance.task_id == 'task_2')\
                 .first()
             if ti is not None and ti.state == State.SUCCESS:
                 break
             else:
                 time.sleep(1)
Esempio n. 6
0
 def test_action_on_job_status(self):
     with af.job_config('task_1'):
         af.user_define_operation(processor=None)
     with af.job_config('task_2'):
         af.user_define_operation(processor=None)
     with af.job_config('task_3'):
         af.user_define_operation(processor=None)
     af.action_on_job_status(job_name='task_2', upstream_job_name='task_1')
     af.action_on_job_status(job_name='task_3',
                             upstream_job_name='task_2',
                             upstream_job_status=Status.RUNNING,
                             action=JobAction.START)
     w = af.workflow_operation.submit_workflow(
         workflow_name='test_dag_generator')
     code = w.properties.get('code')
     self.assertTrue(
         "op_1.subscribe_event('test_dag_generator.task_1', 'TASK_STATUS_CHANGED', 'test_project', 'task_1')"
         in code)
     self.assertTrue(
         "op_2.subscribe_event('test_dag_generator.task_2', 'TASK_STATUS_CHANGED', 'test_project', 'task_2')"
         in code)
Esempio n. 7
0
def run_workflow():
    af.init_ai_flow_context()
    artifact_prefix = af.current_project_config().get_project_name() + "."
    with af.job_config('train'):
        # Training of model
        # Register metadata raw training data(dataset) and read dataset(i.e. training dataset)
        train_dataset = af.register_dataset(name=artifact_prefix +
                                            'train_dataset',
                                            uri=DATASET_URI.format('train'))
        train_read_dataset = af.read_dataset(
            dataset_info=train_dataset, read_dataset_processor=DatasetReader())

        # Transform(preprocessing) dataset
        train_transform = af.transform(
            input=[train_read_dataset],
            transform_processor=DatasetTransformer())

        # Register model metadata and train model
        train_model = af.register_model(model_name=artifact_prefix +
                                        'logistic-regression',
                                        model_desc='logistic regression model')
        train_channel = af.train(input=[train_transform],
                                 training_processor=ModelTrainer(),
                                 model_info=train_model)

    with af.job_config('evaluate'):
        # Evaluation of model
        evaluate_dataset = af.register_dataset(
            name=artifact_prefix + 'evaluate_dataset',
            uri=DATASET_URI.format('evaluate'))
        evaluate_read_dataset = af.read_dataset(
            dataset_info=evaluate_dataset,
            read_dataset_processor=EvaluateDatasetReader())
        evaluate_transform = af.transform(
            input=[evaluate_read_dataset],
            transform_processor=EvaluateTransformer())
        # Register disk path used to save evaluate result
        evaluate_artifact_name = artifact_prefix + 'evaluate_artifact'
        evaluate_artifact = af.register_artifact(name=evaluate_artifact_name,
                                                 uri=get_file_dir(__file__) +
                                                 '/evaluate_result')
        # Evaluate model
        evaluate_channel = af.evaluate(
            input=[evaluate_transform],
            model_info=train_model,
            evaluation_processor=ModelEvaluator(evaluate_artifact_name))

    with af.job_config('validate'):
        # Validation of model
        # Read validation dataset and validate model before it is used to predict

        validate_dataset = af.register_dataset(
            name=artifact_prefix + 'validate_dataset',
            uri=DATASET_URI.format('evaluate'))
        validate_read_dataset = af.read_dataset(
            dataset_info=validate_dataset,
            read_dataset_processor=ValidateDatasetReader())
        validate_transform = af.transform(
            input=[validate_read_dataset],
            transform_processor=ValidateTransformer())
        validate_artifact_name = artifact_prefix + 'validate_artifact'
        validate_artifact = af.register_artifact(name=validate_artifact_name,
                                                 uri=get_file_dir(__file__) +
                                                 '/validate_result')
        validate_channel = af.model_validate(
            input=[validate_transform],
            model_info=train_model,
            model_validation_processor=ModelValidator(validate_artifact_name))
    with af.job_config('push'):
        # Push model to serving
        # Register metadata of pushed model
        push_model_artifact_name = artifact_prefix + 'push_model_artifact'
        push_model_artifact = af.register_artifact(
            name=push_model_artifact_name,
            uri=get_file_dir(__file__) + '/pushed_model')
        af.push_model(
            model_info=train_model,
            pushing_model_processor=ModelPusher(push_model_artifact_name))

    with af.job_config('predict'):
        # Prediction(Inference)
        predict_dataset = af.register_dataset(
            name=artifact_prefix + 'predict_dataset',
            uri=DATASET_URI.format('predict'))
        predict_read_dataset = af.read_dataset(
            dataset_info=predict_dataset,
            read_dataset_processor=PredictDatasetReader())
        predict_transform = af.transform(
            input=[predict_read_dataset],
            transform_processor=PredictTransformer())
        predict_channel = af.predict(input=[predict_transform],
                                     model_info=train_model,
                                     prediction_processor=ModelPredictor())
        # Save prediction result
        write_dataset = af.register_dataset(
            name=artifact_prefix + 'write_dataset',
            uri=get_file_dir(__file__) + '/predict_result')
        af.write_dataset(input=predict_channel,
                         dataset_info=write_dataset,
                         write_dataset_processor=DatasetWriter())

        # Define relation graph connected by control edge: train -> evaluate -> validate -> push -> predict
        af.action_on_job_status('evaluate', 'train')
        af.action_on_job_status('validate', 'evaluate')
        af.action_on_job_status('push', 'validate')
        af.action_on_job_status('predict', 'push')

    # Run workflow
    af.workflow_operation.submit_workflow(
        af.current_workflow_config().workflow_name)
    af.workflow_operation.start_new_workflow_execution(
        af.current_workflow_config().workflow_name)