def submitWorkflow(self, request, context): try: rq: ScheduleWorkflowRequest = request if rq.workflow_json is None or '' == rq.workflow_json: return WorkflowInfoResponse(result=ResultProto( status=StatusProto.ERROR, error_message='workflow json is empty!')) workflow: Workflow = json_utils.loads(rq.workflow_json) config = {} config.update(workflow.properties['blob']) blob_manager = BlobManagerFactory.get_blob_manager(config) project_path: Text = blob_manager\ .download_project(workflow_snapshot_id=workflow.workflow_snapshot_id, remote_path=workflow.project_uri, local_path=self._scheduler_service_config.repository()) project_context: ProjectContext = build_project_context( project_path) project_name = project_context.project_name workflow_info = self._scheduler.submit_workflow( workflow, project_context) if workflow_info is None: return WorkflowInfoResponse(result=ResultProto( status=StatusProto.ERROR, error_message='{}, {} do not exist!'.format( project_name, workflow.workflow_name))) return WorkflowInfoResponse( result=ResultProto(status=StatusProto.OK), workflow=workflow_to_proto(workflow_info)) except Exception as err: return WorkflowInfoResponse( result=ResultProto(status=StatusProto.ERROR, error_message=traceback.format_exc()))
def test_event_condition_serde(self): condition = EventCondition([], ConditionType.MEET_ANY) condition.add_event(event_key='k1', event_value='v1') condition.add_event(event_key='k2', event_value='v2') loaded_condition = json_utils.loads(json_utils.dumps(condition)) self.assertEqual(condition, loaded_condition) loaded_condition.add_event(event_key='k1', event_value='v1') loaded_condition.add_event(event_key='k2', event_value='v2') self.assertEqual(condition, loaded_condition)
def test_graph_serde(self): graph = Graph() nodes = [] for i in range(3): node = Node(name=str(i)) graph.add_node(node) nodes.append(node) edge = Edge(destination=nodes[0].node_id, source=nodes[1].node_id) graph.add_edge(nodes[0].node_id, edge) edge = Edge(destination=nodes[0].node_id, source=nodes[2].node_id) graph.add_edge(nodes[0].node_id, edge) json_text = json_utils.dumps(graph) g: Graph = json_utils.loads(json_text) self.assertEqual(3, len(g.nodes)) self.assertEqual(2, len(graph.edges.get(nodes[0].node_id)))
def load_workflow_config(config_path: Text) -> WorkflowConfig: """ Load the workflow configuration file. :param config_path: Workflow configuration file path. return: WorkflowConfig. """ if config_path.endswith('.json'): with open(config_path, 'r') as f: workflow_config_json = f.read() workflow_config: WorkflowConfig = loads(workflow_config_json) return workflow_config elif config_path.endswith('.yaml'): # -5 is length of .yaml; The config file name equals workflow name. workflow_name = os.path.basename(config_path)[:-5] workflow_data = yaml_utils.load_yaml_file(config_path) workflow_config: WorkflowConfig = WorkflowConfig(workflow_name=workflow_name) if PERIODIC_CONFIG in workflow_data: p_data = workflow_data.get(PERIODIC_CONFIG) workflow_config.periodic_config = PeriodicConfig.from_dict(p_data) if WORKFLOW_PROPERTIES in workflow_data: workflow_config.properties = workflow_data[WORKFLOW_PROPERTIES] if WORKFLOW_DEPENDENCIES in workflow_data: workflow_config.dependencies = workflow_data[WORKFLOW_DEPENDENCIES] for k, v in workflow_data.items(): if k == WORKFLOW_DEPENDENCIES or k == WORKFLOW_PROPERTIES or k == PERIODIC_CONFIG: continue job_config = JobConfig.from_dict({k: v}) workflow_config.add_job_config(k, job_config) if PERIODIC_CONFIG in v: p_data = v.get(PERIODIC_CONFIG) periodic_config = PeriodicConfig.from_dict(p_data) workflow_config.job_periodic_config_dict[k] = periodic_config return workflow_config else: return None
def test_workflow_serde(self): workflow_config_file = os.path.join(os.path.dirname(__file__), 'workflow_1.yaml') workflow_config = load_workflow_config(workflow_config_file) workflow = Workflow() workflow.workflow_config = workflow_config jobs = [] for job_config in workflow_config.job_configs.values(): job = Job(job_config=job_config) workflow.add_job(job) jobs.append(job) edge = ControlEdge(destination=jobs[0].job_name, scheduling_rule=SchedulingRule(MeetAnyEventCondition().add_event('a', 'a'), JobAction.START)) workflow.add_edge(jobs[0].job_name, edge) edge = ControlEdge(destination=jobs[0].job_name, scheduling_rule=SchedulingRule(MeetAnyEventCondition().add_event('b', 'b'), JobAction.START)) workflow.add_edge(jobs[0].job_name, edge) json_text = json_utils.dumps(workflow) w: Workflow = json_utils.loads(json_text) self.assertEqual(3, len(w.jobs)) self.assertEqual(2, len(w.edges.get(jobs[0].job_name)))
def _parse_configs(scheduling_rules_json_str: str) -> List[SchedulingRule]: rules: List[SchedulingRule] = json_utils.loads( scheduling_rules_json_str) return rules
def load_graph(json_text: str) -> Graph: """Load the graph from the json string(json_text)""" graph: Graph = loads(json_text) return graph
def test_scheduling_rule_serde(self): condition = MeetAllEventCondition() action = JobAction.START rule = SchedulingRule(condition, action) loaded_rule = json_utils.loads(json_utils.dumps(rule)) self.assertEqual(rule, loaded_rule)
def test_edge_serde(self): edge = Edge("a", 'b') json_text = json_utils.dumps(edge) c2: Edge = json_utils.loads(json_text) self.assertEqual(edge.source, c2.source)
def test_node_serde(self): node = Node(name="a") json_text = json_utils.dumps(node) n2: Node = json_utils.loads(json_text) self.assertEqual(node.name, n2.name)