def is_peer_job_inheritance_matched(workflow): # TODO: Move it to workflow service if workflow.forked_from is None: return True job_flags = workflow.get_create_job_flags() peer_job_flags = workflow.get_peer_create_job_flags() job_defs = workflow.get_config().job_definitions project = workflow.project if project is None: return True project_config = project.get_config() # TODO: Fix for multi-peer client = RpcClient(project_config, project_config.participants[0]) parent_workflow = db.session.query(Workflow).get(workflow.forked_from) resp = client.get_workflow(parent_workflow.name) if resp.status.code != common_pb2.STATUS_SUCCESS: emit_counter('get_workflow_failed', 1) raise InternalException(resp.status.msg) peer_job_defs = resp.config.job_definitions for i, job_def in enumerate(job_defs): if job_def.is_federated: for j, peer_job_def in enumerate(peer_job_defs): if job_def.name == peer_job_def.name: if job_flags[i] != peer_job_flags[j]: return False return True
def get(self, workflow_id): workflow = _get_workflow(workflow_id) project_config = workflow.project.get_config() peer_workflows = {} for party in project_config.participants: client = RpcClient(project_config, party) resp = client.get_workflow(workflow.name) if resp.status.code != common_pb2.STATUS_SUCCESS: raise InternalException() peer_workflows[party.name] = MessageToDict( resp, preserving_proto_field_name=True, including_default_value_fields=True) return {'data': peer_workflows}, HTTPStatus.OK
def get(self, workflow_id): workflow = _get_workflow(workflow_id) project_config = workflow.project.get_config() peer_workflows = {} for party in project_config.participants: client = RpcClient(project_config, party) # TODO(xiangyxuan): use uuid to identify the workflow resp = client.get_workflow(workflow.name) if resp.status.code != common_pb2.STATUS_SUCCESS: raise InternalException(resp.status.msg) peer_workflow = MessageToDict(resp, preserving_proto_field_name=True, including_default_value_fields=True) for job in peer_workflow['jobs']: if 'pods' in job: job['pods'] = json.loads(job['pods']) peer_workflows[party.name] = peer_workflow return {'data': peer_workflows}, HTTPStatus.OK
def _get_peer_workflow(self): project_config = self.project.get_config() # TODO: find coordinator for multiparty client = RpcClient(project_config, project_config.participants[0]) return client.get_workflow(self.name)