예제 #1
0
 def startScheduleWorkflow(self, request, context):
     try:
         rq: WorkflowRequest = request
         workflow_json = rq.workflow_json
         workflow: Workflow = loads(workflow_json)
         workflow_id = self.scheduler_manager.schedule_workflow(workflow=workflow)
         return ScheduleResponse(return_code=0, return_msg='', data=str(workflow_id))
     except Exception as err:
         logging.error(err.args)
         return ScheduleResponse(return_code=1, return_msg=str(err), data='0')
def load_workflow_config(config_path: Text) -> WorkFlowConfig:
    if config_path.endswith('.json'):
        with open(config_path, 'r') as f:
            workflow_config_json = f.read()
        workflow_config: WorkFlowConfig = loads(workflow_config_json)
        return workflow_config
    elif config_path.endswith('.yaml'):
        workflow_data = yaml_utils.load_yaml_file(config_path)
        workflow_config: WorkFlowConfig = WorkFlowConfig()
        for k, v in workflow_data.items():
            if k == GLOBAL_CONFIG_KEY:
                continue
            job_config_class = get_job_config_class(v['platform'], v['engine'])
            job_config = job_config_class()
            job_config_class.from_dict(v, job_config)
            workflow_config.add_job_config(k, job_config)
        return workflow_config
    else:
        return None
    def submitWorkflow(self, request, context):
        try:
            rq: ScheduleWorkflowRequest = request
            workflow: Workflow = json_utils.loads(rq.workflow_json)
            workflow.workflow_name = rq.workflow_name
            config = {}
            config.update(workflow.project_desc.project_config['blob'])
            # config['local_repository'] = self._scheduler_config.repository()
            blob_manager = BlobManagerFactory.get_blob_manager(config)
            project_path: Text = blob_manager\
                .download_blob(workflow_id=workflow.workflow_id,
                               remote_path=workflow.project_desc.project_config.get('uploaded_project_path'),
                               local_path=self._scheduler_config.repository())

            project_desc: ProjectDesc = get_project_description_from(
                project_path)
            project_name = project_desc.project_name
            # update workflow
            workflow.project_desc = project_desc
            for n, j in workflow.jobs.items():
                j.job_config.project_desc = project_desc
                j.job_config.project_path = project_path

            workflow_info = self._scheduler.submit_workflow(
                workflow, project_desc)
            if workflow_info is None:
                return WorkflowInfoResponse(result=ResultProto(
                    status=StatusProto.ERROR,
                    error_message='{}, {} do not exist!'.format(
                        project_name, workflow.workflow_name)))
            return WorkflowInfoResponse(
                result=ResultProto(status=StatusProto.OK),
                workflow=workflow_to_proto(workflow_info))
        except Exception as err:
            return WorkflowInfoResponse(
                result=ResultProto(status=StatusProto.ERROR,
                                   error_message=traceback.format_exc()))
예제 #4
0
 def test_json(self):
     channel = DataEdge("a", 0)
     json_text = json_utils.dumps(channel)
     c2: DataEdge = json_utils.loads(json_text)
     self.assertEqual(channel.target_node_id, c2.target_node_id)
     self.assertEqual(channel.port, c2.port)
 def __init__(self, job_file, *args, **kwargs):
     super(KubernetesFlinkOperator, self).__init__(*args, **kwargs)
     with open(file=job_file, mode='rt') as f:
         job = f.read()
         self.job = json_utils.loads(job)
     self.plugin = KubernetesFlinkJobPlugin()
예제 #6
0
 def test_json(self):
     node = BaseNode(name="a")
     json_text = json_utils.dumps(node)
     print(json_text)
     n2: BaseNode = json_utils.loads(json_text)
     self.assertEqual(node.name, n2.name)
예제 #7
0
 def __init__(self, job, *args, **kwargs):
     super(KubernetesCMDOperator, self).__init__(*args, **kwargs)
     self.job = json_utils.loads(job)
     self.plugin = KubernetesCMDJobPlugin()
예제 #8
0
def load_graph(json_text: str) -> Graph:
    graph: Graph = loads(json_text)
    return graph