def submit_workflow(workflow_name: Text = None) -> WorkflowInfo: """ Submits the user-defined workflow to the scheduler with the given name of workflow. Before the submission of workflow in Scheduler Service, the metadata of workflow will be registered in Metadata Service. The submission of workflow translates the current :class:`~ai_flow.ai_graph.ai_graph.AIGraph` , uploads the project package, registers the metadata of the specified workflow and submits the workflow by Scheduler Service which delegates the submission to the :class:`~ai_flow.plugin_interface.scheduler_interface.Scheduler`. :param workflow_name: The name of the workflow. :return: The :class:`~ai_flow.plugin_interface.scheduler_interface.WorkflowInfo` which contains the information about the submitted workflow. """ if current_graph().is_empty(): raise EmptyGraphException("Cannot submit empty graph") entry_module_path = current_project_context().get_workflow_entry_module(workflow_name=workflow_name) namespace = current_project_config().get_project_name() translator = get_translator() workflow = translator.translate(graph=current_graph(), project_context=current_project_context()) _apply_full_info_to_workflow(workflow, entry_module_path) current_graph().clear_graph() workflow_meta = get_ai_flow_client().get_workflow_by_name(project_name=current_project_config().get_project_name(), workflow_name=workflow_name) if workflow_meta is None: get_ai_flow_client().register_workflow(name=workflow_name, project_id=int(current_project_config().get_project_uuid())) return proto_to_workflow(get_ai_flow_client() .submit_workflow_to_scheduler(namespace=namespace, workflow_json=json_utils.dumps(workflow), workflow_name=workflow_name, args={}))
def test_transform(self): with job_config('task_1'): o = ops.read_dataset(read_dataset_processor=None, dataset_info=DatasetMeta(name='dataset')) t = ops.transform(input=o, transform_processor=None) ops.write_dataset(input=t, dataset_info=DatasetMeta(name='dataset')) self.assertEqual(3, len(current_graph().nodes)) self.assertEqual(2, len(current_graph().edges))
def test_user_define_operation(self): with job_config('task_1'): o = ops.user_define_operation(processor=None, a='a', name='1') ops.user_define_operation(input=o, b='b', name='2') self.assertEqual(2, len(current_graph().nodes)) self.assertEqual(1, len(current_graph().edges)) node_0 = list(current_graph().nodes.values())[0] node_1 = list(current_graph().nodes.values())[1] self.assertEqual('mock', node_0.config.job_type) self.assertEqual('mock', node_1.config.job_type) self.assertEqual('a', self.get_node_by_name('1').node_config.get('a')) self.assertEqual('b', self.get_node_by_name('2').node_config.get('b'))
def test_model_validate(self): with job_config('task_1'): o = ops.read_dataset(read_dataset_processor=None, dataset_info=DatasetMeta(name='dataset')) t = ops.model_validate(input=o, model_validation_processor=None, model_info=ModelMeta(name='model'), name='a') self.assertEqual(2, len(current_graph().nodes)) self.assertEqual(1, len(current_graph().edges)) n = self.get_node_by_name('a') self.assertEqual('model', n.node_config.get('model_info').name)
def test_add_ai_node_to_graph(self): node1 = AINode(processor=None, arg1='arg1_1', arg2='arg2_1') add_ai_node_to_graph(node1, inputs=None) node2 = AINode(processor=None, arg1='arg1_2', arg2='arg2_2') add_ai_node_to_graph(node2, inputs=None) node3 = AINode(processor=None, arg1='arg1_3', arg2='arg2_3') add_ai_node_to_graph( node3, inputs=[Channel(node1.node_id, 0), Channel(node2.node_id, 0)]) self.assertEqual(3, len(current_graph().nodes)) self.assertEqual(1, len(current_graph().edges))
def test_read_write_dataset(self): with job_config('task_1'): o = ops.read_dataset(read_dataset_processor=None, dataset_info=DatasetMeta(name='source')) ops.write_dataset(input=o, dataset_info=DatasetMeta(name='sink')) self.assertEqual(2, len(current_graph().nodes)) self.assertEqual(1, len(current_graph().edges)) node_list = list(current_graph().nodes.values()) for node in node_list: if isinstance(node, ReadDatasetNode): self.assertEqual('source', node.node_config.get('dataset').name) elif isinstance(node, WriteDatasetNode): self.assertEqual('sink', node.node_config.get('dataset').name) self.assertEqual('mock', node.config.job_type)
def test_train(self): with job_config('task_1'): o = ops.read_dataset(read_dataset_processor=None, dataset_info=DatasetMeta(name='dataset')) t = ops.train(input=o, training_processor=None, output_num=1, model_info=ModelMeta(name='model'), name='a') ops.write_dataset(input=t, dataset_info=DatasetMeta(name='dataset')) self.assertEqual(3, len(current_graph().nodes)) self.assertEqual(2, len(current_graph().edges)) n = self.get_node_by_name('a') self.assertEqual('model', n.node_config.get('model_info').name)
def test_action_on_event(self): with job_config('task_1'): o1 = ops.user_define_operation(processor=None, a='a', name='1') with job_config('task_2'): o2 = ops.user_define_operation(processor=None, b='b', name='2') ops.action_on_event(job_name='task_1', sender='task_2', event_key='a', event_value='a') self.assertEqual(1, len(current_graph().edges)) edge: ControlEdge = current_graph().edges.get('task_1')[0] self.assertEqual('task_1', edge.destination) events = edge.scheduling_rule.event_condition.events event = next(iter(events)) self.assertEqual('task_2', event.sender) self.assertEqual('a', event.event_key) self.assertEqual('a', event.event_value)
def test_push_model(self): with job_config('task_1'): ops.push_model(pushing_model_processor=None, model_info=ModelMeta(name='model'), name='a') self.assertEqual(1, len(current_graph().nodes)) n = self.get_node_by_name('a') self.assertEqual('model', n.node_config.get('model_info').name)
def action_on_events(job_name: Text, event_condition: EventCondition, action: JobAction): """ Defines a rule which is the combination of a :class:`~ai_flow.workflow.control_edge.EventCondition` and :class:`~ai_flow.workflow.control_edge.JobAction`, on the job with the given name. When the :class:`~ai_flow.workflow.control_edge.EventCondition` of the rule met, the corresponding :class:`~ai_flow.workflow.control_edge.JobAction` will be triggered on the job. User could call the method multiple times on the same job to add multiples rules, the rules will be checked according to the order of the the method call. :param job_name: The name of the job to add the rule. :param event_condition: The :class:`~ai_flow.workflow.control_edge.EventCondition` of the rule. :param action: The :class:`~ai_flow.workflow.control_edge.JobAction` to take when the :class:`~ai_flow.workflow.control_edge.EventCondition` is met. """ rule = SchedulingRule(event_condition, action) control_edge = ControlEdge(destination=job_name, scheduling_rule=rule) current_graph().add_edge(job_name, control_edge)
def test_action_on_status(self): with job_config('task_1'): o1 = ops.user_define_operation(processor=None, a='a', name='1') with job_config('task_2'): o2 = ops.user_define_operation(processor=None, b='b', name='2') ops.action_on_job_status(job_name='task_1', upstream_job_name='task_2', upstream_job_status=Status.FINISHED, action=JobAction.START) self.assertEqual(1, len(current_graph().edges)) edge: ControlEdge = current_graph().edges.get('task_1')[0] self.assertEqual('task_1', edge.destination) events = edge.scheduling_rule.event_condition.events event = next(iter(events)) self.assertEqual('task_2', event.sender) self.assertEqual(AIFlowInternalEventType.JOB_STATUS_CHANGED, event.event_type) self.assertEqual(JobAction.START, edge.scheduling_rule.action)
def get_node_by_name(self, name) -> AINode: for n in current_graph().nodes.values(): if n.name == name: return n return None
def tearDown(self): current_graph().clear_graph()
def build_ai_graph(self): g = current_graph() for jc in current_workflow_config().job_configs.values(): n = AINode(name=jc.job_name) n.config = jc g.add_node(n)