Ejemplo n.º 1
0
 def add_node(self, node: AINode):
     """
     Add an ai node(ai_flow.ai_graph.ai_node.AINode) to AIGraph.
     """
     if current_workflow_config() is not None \
             and current_job_name() is not None \
             and current_job_name() in current_workflow_config().job_configs:
         node.config = current_workflow_config().job_configs.get(
             current_job_name())
     self.nodes[node.node_id] = node
 def test_init_job_runtime_context(self):
     working_dir = os.path.dirname(__file__)
     job_runtime_env = JobRuntimeEnv(
         working_dir=working_dir,
         job_execution_info=JobExecutionInfo(
             job_name='task_1',
             workflow_execution=WorkflowExecutionInfo(
                 workflow_execution_id='1',
                 workflow_info=WorkflowInfo(workflow_name='workflow_1'))))
     init_job_runtime_context(job_runtime_env)
     self.assertEqual('workflow_1', current_workflow_config().workflow_name)
     self.assertEqual(
         'task_1',
         current_workflow_config().job_configs[current_job_name()].job_name)
     self.assertEqual('test_project',
                      current_project_config().get_project_name())
 def test_workflow_context(self):
     workflow_file = os.path.join(os.path.dirname(__file__),
                                  'workflow_1.yaml')
     init_workflow_config(workflow_file)
     workflow_config = current_workflow_config()
     self.assertEqual('workflow_1', workflow_config.workflow_name)
     self.assertEqual(3, len(workflow_config.job_configs))
Ejemplo n.º 4
0
 def test_init_ai_flow_context(self):
     init_ai_flow_context()
     project_config = current_project_config()
     self.assertEqual('test_project', project_config.get_project_name())
     self.assertEqual('a', project_config.get('a'))
     project_context = current_project_context()
     self.assertEqual('test_project', project_context.project_name)
     workflow_config_ = current_workflow_config()
     self.assertEqual('test_ai_flow_context', workflow_config_.workflow_name)
     self.assertEqual(5, len(workflow_config_.job_configs))
Ejemplo n.º 5
0
def _apply_full_info_to_workflow(workflow: Workflow, entry_module_path: Text):
    """
    Applies the full information to the specified :class:`~ai_flow.workflow.workflow.Workflow` with the given entry
    module path. The application of the workflow full information sets the entry module path, uploads the project
    package of the workflow and set the registered job plugins.

    :param workflow: The generated :class:`~ai_flow.workflow.workflow.Workflow`.
    :param entry_module_path: The entry module path of the workflow.
    """
    workflow.workflow_config = current_workflow_config()
    _set_entry_module_path(workflow, entry_module_path)
    _upload_project_package(workflow)
    _set_job_plugins(workflow)
Ejemplo n.º 6
0
    def build_workflow(self, split_graph: SplitGraph,
                       project_context: ProjectContext) -> Workflow:
        workflow = Workflow()
        workflow.workflow_config = current_workflow_config()
        workflow.workflow_snapshot_id = '{}.{}.{}'.format(
            project_context.project_name, workflow.workflow_name,
            round(time.time() * 1000))
        # add ai_nodes to workflow
        for sub in split_graph.nodes.values():
            if sub.config.job_type not in self.job_generator_registry.object_dict:
                raise Exception("job generator not support job_type {}".format(
                    sub.config.job_type))
            generator: JobGenerator = self.job_generator_registry \
                .get_object(sub.config.job_type)

            # set job resource dir
            job_resource_dir = os.path.join(
                project_context.get_generated_path(),
                workflow.workflow_snapshot_id, sub.config.job_name)
            if not os.path.exists(job_resource_dir):
                os.makedirs(job_resource_dir)

            job: Job = generator.generate(sub_graph=sub,
                                          resource_dir=job_resource_dir)
            job.resource_dir = job_resource_dir

            # set input output dataset
            for node in sub.nodes.values():
                if isinstance(node, ReadDatasetNode):
                    job.input_dataset_list.append(node.dataset())
                elif isinstance(node, WriteDatasetNode):
                    job.output_dataset_list.append(node.dataset())

            workflow.add_job(job)

        def validate_edge(head, tail):
            if head not in workflow.jobs:
                raise Exception(
                    'job: {} is not defined in workflow!'.format(head))
            if tail is not None and tail != '' and tail != '*' and tail not in workflow.jobs:
                raise Exception(
                    'job: {} is not defined in workflow!'.format(tail))

        # add edges to workflow
        for edges in split_graph.edges.values():
            for e in edges:
                control_edge = copy.deepcopy(e)
                validate_edge(control_edge.destination, control_edge.source)
                workflow.add_edge(control_edge.destination, control_edge)
        return workflow
Ejemplo n.º 7
0
def job_config(job_name: Text):
    """
    Set the job config by job_name.
    :param job_name: The name of the Job.
    """
    __current_job_context__.current_job_name = job_name
    __current_job_context__.job_depth += 1
    if __current_job_context__.job_depth > 1:
        raise Exception("job_config can not nesting")
    try:
        yield current_workflow_config().job_configs.get(
            __current_job_context__.current_job_name)
    finally:
        __current_job_context__.current_job_name = None
        __current_job_context__.job_depth -= 1
Ejemplo n.º 8
0
def action_on_job_status(job_name: Text,
                         upstream_job_name: Text,
                         upstream_job_status: Status = Status.FINISHED,
                         action: JobAction = JobAction.START):
    """
    Triggers the job with the given name when the status of the upstream job changes.

    :param job_name: The name of the job triggered.
    :param upstream_job_name: The name of the upstream job.
    :param upstream_job_status: The status of the upstream job.
    :param action: The :class:`~ai_flow.workflow.control_edge.JobAction` acts on the source channel whose value includes
                             START, RESTART, STOP and NONE.
    """
    event_key = '.'.join(
        [current_workflow_config().workflow_name, upstream_job_name])
    action_on_event(job_name=job_name,
                    event_key=event_key,
                    event_type=AIFlowInternalEventType.JOB_STATUS_CHANGED,
                    sender=upstream_job_name,
                    event_value=upstream_job_status,
                    action=action,
                    namespace=current_project_config().get_project_name())
Ejemplo n.º 9
0
 def test_job_context(self):
     workflow_config = current_workflow_config()
     workflow_config.job_configs['job_1'] = JobConfig(job_name='job_1')
     with job_config('job_1') as jc:
         self.assertEqual(jc.job_name, current_job_name())
 def test_list_workflow(self):
     w = workflow_operation.submit_workflow(
         workflow_name=current_workflow_config().workflow_name)
     w_list = workflow_operation.list_workflows(page_size=5, offset=0)
     self.assertEqual(1, len(w_list))
     self.assertEqual('test_workflow_operation', w_list[0].workflow_name)
 def test_get_workflow(self):
     w = workflow_operation.submit_workflow(
         workflow_name=current_workflow_config().workflow_name)
     w = workflow_operation.get_workflow(
         workflow_name=current_workflow_config().workflow_name)
     self.assertEqual('test_workflow_operation', w.workflow_name)
 def build_ai_graph(self):
     g = current_graph()
     for jc in current_workflow_config().job_configs.values():
         n = AINode(name=jc.job_name)
         n.config = jc
         g.add_node(n)