def create_bash_workflow() -> Workflow: workflow = Workflow() workflow.project_desc = ProjectDesc() workflow.project_desc.project_name = "workflow_1" for i in range(3): job = TestDAGGenerator.create_bash_job(i) workflow.add_job(job) deps_1 = [ JobControlEdge(target_node_id='', source_node_id='1_job', met_config=MetConfig( event_key='key_1', event_value='value_1', event_type=UNDEFINED_EVENT_TYPE)) ] deps_2 = [ JobControlEdge(target_node_id='', source_node_id='2_job', met_config=MetConfig( event_key='key_2', event_value='value_2', event_type=UNDEFINED_EVENT_TYPE)) ] workflow.add_edges("1_job", deps_1) workflow.add_edges("2_job", deps_2) workflow.workflow_id = 1 return workflow
def create_workflow() -> Workflow: workflow = Workflow() workflow.project_desc = ProjectDesc() workflow.project_desc.project_name = "workflow_1" for i in range(6): job = TestDAGGenerator.create_dummy_job(i) if i == 2: job.job_config = SendEventJobConfig('localhost:50051', 'key_1', 'value_1', UNDEFINED_EVENT_TYPE) elif i == 3: job.job_config = SendEventJobConfig('localhost:50051', 'key_2', 'value_2', UNDEFINED_EVENT_TYPE) elif i == 5: job.job_config = SendEventJobConfig('localhost:50051', 'key_2', 'value_2', "STOP_SCHEDULER_CMD") workflow.add_job(job) dependencies = [ JobControlEdge(target_node_id='0_job', source_node_id='2_job', met_config=MetConfig( event_key=generate_job_status_key('0_job'), event_value=State.FINISHED.value)), JobControlEdge(target_node_id='1_job', source_node_id='2_job', met_config=MetConfig( event_key=generate_job_status_key('1_job'), event_value=State.FINISHED.value)) ] workflow.add_edges("2_job", dependencies) dependencies = [ JobControlEdge(target_node_id='2_job', source_node_id='4_job', met_config=MetConfig( event_key='key_1', event_value='value_1', event_type=UNDEFINED_EVENT_TYPE)), JobControlEdge(target_node_id='3_job', source_node_id='4_job', met_config=MetConfig( event_key='key_2', event_value='value_2', event_type=UNDEFINED_EVENT_TYPE)) ] workflow.add_edges("4_job", dependencies) dependencies = [ JobControlEdge(target_node_id='4_job', source_node_id='5_job', met_config=MetConfig( event_key=generate_job_status_key('5_job'), event_value=State.FINISHED.value)) ] workflow.add_edges("5_job", dependencies) workflow.workflow_id = 1 return workflow
def build_workflow(self, split_graph: SplitGraph, project_desc: ProjectDesc) -> Workflow: sub_id_to_job_id = {} workflow = Workflow() workflow.project_desc = project_desc # add ai_nodes to workflow for sub in split_graph.nodes.values(): if (sub.config.platform, sub.config.engine ) not in self.job_generator_registry.object_dict: raise Exception( "job generator not support platform {} engine {}".format( sub.config.platform, sub.config.engine)) generator: BaseJobGenerator = self.job_generator_registry \ .get_object((sub.config.platform, sub.config.engine)) job: BaseJob = generator.generate(sub_graph=sub, project_desc=project_desc) job.job_config.project_desc = project_desc if job.job_name is None: job.job_name = job.job_config.job_name workflow.add_job(job) sub_id_to_job_id[sub.instance_id] = job.instance_id # add edges to workflow for edges in split_graph.edges.values(): for e in edges: control_edge = copy.deepcopy(e) control_edge.source_node_id = sub_id_to_job_id[ e.source_node_id] control_edge.target_node_id = sub_id_to_job_id[ e.target_node_id] job_edge: JobControlEdge = control_edge_to_job_edge( control_edge=control_edge) workflow.add_edge(sub_id_to_job_id[e.source_node_id], job_edge) self.register_workflow_execution(workflow) for job in workflow.jobs.values(): job.job_config.project_path = project_desc.project_path if job.job_config.exec_mode is None: job.job_config.exec_mode = ExecutionMode.BATCH generator: BaseJobGenerator = self.job_generator_registry \ .get_object((job.platform, job.exec_engine)) generator.generate_job_resource(job) return workflow