def GetExecutePipelineResults(self, request, context): sessioncontext = request.context if not sessioncontext.session_id in self.sessions: yield core_pb2.PipelineExecuteResult(\ response_info=core_pb2.Response(\ status=core_pb2.Status(\ code=core_pb2.FAILED_PRECONDITION, details="Unknown session id: %s" % sessioncontext.session_id))) return pipeline_ids = request.pipeline_ids logger.info("Got GetExecutePipelineResults request, session=%s", sessioncontext.session_id) file_uri_dict = get_predict_file_info_dict() for pipeline_id in pipeline_ids: time.sleep(1) progress_info = random.choice(\ [core_pb2.UPDATED, core_pb2.RUNNING, core_pb2.COMPLETED,]) yield core_pb2.PipelineExecuteResult( response_info=core_pb2.Response( status=core_pb2.Status(code=core_pb2.OK), ), progress_info=progress_info, pipeline_id=pipeline_id, result_uri=file_uri_dict.get(TEST_KEY_FILE_URI, 'no file uri'))
def ExecutePipeline(self, request, context): sessioncontext = request.context if not sessioncontext.session_id in self.sessions: yield core_pb2.PipelineExecuteResult(\ response_info=core_pb2.Response(\ status=core_pb2.Status(\ code=core_pb2.FAILED_PRECONDITION, details="Unknown session id: %s" % sessioncontext.session_id))) return pipeline_id = request.pipeline_id logger.info("Got ExecutePipeline request, session=%s", sessioncontext.session_id) time.sleep(1) yield core_pb2.PipelineExecuteResult( response_info=core_pb2.Response( status=core_pb2.Status(code=core_pb2.OK), ), progress_info=core_pb2.RUNNING, pipeline_id=pipeline_id, ) time.sleep(1) # try to create a legit file uri file_uri_dict = get_predict_file_info_dict('CLASSIFICATION') file_uri_dict2 = get_predict_file_info_dict('REGRESSION') yield core_pb2.PipelineExecuteResult(response_info=core_pb2.Response( status=core_pb2.Status(code=core_pb2.OK), ), progress_info=core_pb2.COMPLETED, pipeline_id=pipeline_id, result_uri=file_uri_dict.get( TEST_KEY_FILE_URI, 'no file uri'))
def GetCreatePipelineResults(self, request, context): """Mock GetCreatePipelineResults response""" sessioncontext = request.context if not sessioncontext.session_id in self.sessions: yield core_pb2.PipelineCreateResult(\ response_info=core_pb2.Response(\ status=core_pb2.Status(\ code=core_pb2.FAILED_PRECONDITION, details="Unknown session id: %s" % sessioncontext.session_id))) return results = [ (core_pb2.COMPLETED, 'pipeline_1', True), (core_pb2.COMPLETED, 'pipeline_2', True), ] cnt = 0 for progress, pipeline_id, send_pipeline in results: #print('sleep 1 second...') #time.sleep(1) if not context.is_active(): logger.info("Client closed GetCreatePipelineResults stream") msg = core_pb2.PipelineCreateResult( response_info=core_pb2.Response( status=core_pb2.Status(code=core_pb2.OK), ), progress_info=progress, pipeline_id=pipeline_id, ) if send_pipeline: cnt += 1 # try to create a legit file uri file_uri_dict = get_predict_file_info_dict('CLASSIFICATION') msg.pipeline_info.CopyFrom( core_pb2.Pipeline( #predict_result_uris=['file:///out/predict1.csv'], predict_result_uri = file_uri_dict.get(\ TEST_KEY_FILE_URI, 'no file uri'), output=core_pb2.OUTPUT_TYPE_UNDEFINED, scores=[ core_pb2.Score( metric=core_pb2.ACCURACY, value=0.8, ), core_pb2.Score( metric=core_pb2.ROC_AUC, value=0.5, ), ], ) ) yield msg
def EndSession(self, request, context): if not request.session_id in self.sessions: return core_pb2.Response(\ status=core_pb2.Status(\ code=core_pb2.FAILED_PRECONDITION, details="Unknown session id: %s" % request.session_id)) logger.info("Session terminated: %s", request.session_id) self.sessions.remove(request.session_id) return core_pb2.Response(status=core_pb2.Status(code=core_pb2.OK), )
def EndSession(self, request, context): logging.info("Message received: EndSession") if request.session_id in self._sessions.keys(): _session = self._sessions.pop(request.session_id) logging.info("Session terminated: %s", request.session_id) return core_pb2.Response( status=core_pb2.Status(code=core_pb2.OK), ) else: logging.warning( "Client tried to end session %s which does not exist", request.session_id) return core_pb2.Response( status=core_pb2.Status(code=core_pb2.SESSION_UNKNOWN), )
def pipeline_export_parse(): req = core_pb2.PipelineExportRequest() req.context.session_id = 'session_01' req.pipeline_id = 'pipeline_1' req.pipeline_exec_uri = 'file:///ravens_volume/pipeline_1' content = MessageToJson(req, including_default_value_fields=True) print('JSON:\n') print(content) print('-' * 40) #content = content.replace('pipelineIds', 'pipeline_ids').replace('sessionId', 'session_id') print(content) print('-' * 40) print('gRPC:\n') json_parse(content, core_pb2.PipelineExportRequest) print('-' * 40) resp = core_pb2.Response() resp.status.code = core_pb2.OK resp.status.details = 'looking good' content = MessageToJson(resp, including_default_value_fields=True) print('JSON:\n') print(content) print('-' * 40) #content = content.replace('pipelineIds', 'pipeline_ids').replace('sessionId', 'session_id') print(content) print('-' * 40) print('gRPC:\n') json_parse(content, core_pb2.Response) print('-' * 40)
def SetProblemDoc(self, request, context): """Mock response for SetProblemDoc""" logger.info("SetProblemDoc 1") sessioncontext = request.context if not sessioncontext.session_id in self.sessions: return core_pb2.Response(\ status=core_pb2.Status(\ code=core_pb2.FAILED_PRECONDITION, details="Unknown session id: %s" % request.session_id)) test_msg = 'updates received: %s' % \ ([str(x).strip() for x in request.updates]) return core_pb2.Response( status=core_pb2.Status(\ code=core_pb2.OK, details='Problem Doc Updated. %s' % test_msg),)
def ExecutePipeline(self, request, context): "Placeholder." logging.info("Message received: ExecutePipelines") yield core_pb2.PipelineExecuteResult( response_info=core_pb2.Response( status=core_pb2.Status(code=core_pb2.OK), ), progress_info=core_pb2.COMPLETED, pipeline_id=request.pipeline_id, result_uri="file://TODO, return valid pipeline URI")
def CancelPipelines(self, request, context): """CancelPipelines response""" sessioncontext = request.context if not sessioncontext.session_id in self.sessions: return core_pb2.PipelineListResult(\ response_info=core_pb2.Response(\ status=core_pb2.Status(\ code=core_pb2.FAILED_PRECONDITION, details="Unknown session id: %s" % sessioncontext.session_id))) if hasattr(request, 'cancel_pipeline_ids') is False: return core_pb2.PipelineListResult(\ response_info=core_pb2.Response(\ status=core_pb2.Status(\ code=core_pb2.FAILED_PRECONDITION, details="'cancel_pipeline_ids' not found"))) if not request.cancel_pipeline_ids: err_msg = "No pipeline ids specified in 'cancel_pipeline_ids'" return core_pb2.PipelineListResult(\ response_info=core_pb2.Response(\ status=core_pb2.Status(\ code=core_pb2.FAILED_PRECONDITION, details=err_msg))) logger.info("Rcvd CancelPipelines request, session=%s", sessioncontext.session_id) res = core_pb2.PipelineListResult() res.response_info.status.code = core_pb2.OK if len(request.cancel_pipeline_ids) > 1: res.pipeline_ids.append(request.cancel_pipeline_ids[-1]) res.response_info.status.details = \ ("Test. All except last pipeline deleted--" "perhaps it's pending delete") else: res.response_info.status.details = \ ("Test. Single pipeline has been deleted") return res
def _response_session_invalid(self, session_id): "Returns a message that the given session does not exist" pipeline = core_pb2.Pipeline(predict_result_uri="invalid", output=core_pb2.OUTPUT_TYPE_UNDEFINED, scores=[]) msg = core_pb2.PipelineCreateResult(response_info=core_pb2.Response( status=core_pb2.Status(code=core_pb2.SESSION_UNKNOWN), ), progress_info=core_pb2.ERRORED, pipeline_id="invalid", pipeline_info=pipeline) return msg
def get_failed_precondition_sess_response(err_msg): """Return a SessionResponse object in JSON format with status FAILED_PRECONDITION""" resp = core_pb2.SessionResponse(\ response_info=core_pb2.Response(\ status=core_pb2.Status(\ code=core_pb2.FAILED_PRECONDITION, details=err_msg))) return MessageToJson(resp, including_default_value_fields=True)
def get_failed_precondition_sess_response(err_msg): """Return a SessionResponse object in JSON format with status FAILED_PRECONDITION""" grpc_resp = core_pb2.SessionResponse(\ response_info=core_pb2.Response(\ status=core_pb2.Status(\ code=core_pb2.FAILED_PRECONDITION, details=err_msg))) return MessageToJson(grpc_resp)
def get_failed_precondition_response(err_msg='failed precondition?'): """Return a SessionResponse object in JSON format with status FAILED_PRECONDITION""" err_msg = '%s (ta2 server: %s)' % (err_msg, settings.TA2_TEST_SERVER_URL) grpc_resp = core_pb2.Response(\ status=core_pb2.Status(\ code=core_pb2.FAILED_PRECONDITION, details=err_msg)) return MessageToJson(grpc_resp)
def get_reply_exception_response(err_msg='error in response'): """Return a SessionResponse object in JSON format with status FAILED_PRECONDITION""" err_msg = '%s (ta2 server: %s)' % (err_msg, settings.TA2_TEST_SERVER_URL) resp = core_pb2.Response(\ status=core_pb2.Status(\ code=core_pb2.UNKNOWN, details=err_msg)) return MessageToJson(resp, including_default_value_fields=True)
def StartSession(self, request, context): version = core_pb2.DESCRIPTOR.GetOptions().Extensions[\ core_pb2.protocol_version] print('version: %s' % version) print('request.version: %s' % request.version) session = 'session_%d' % len(self.sessions) session_start_time[session] = time.time() self.sessions.add(session) logger.info("Session started: %s (protocol version %s)",\ session, version) return core_pb2.SessionResponse( response_info=core_pb2.Response(status=core_pb2.Status( code=core_pb2.OK)), user_agent=request.user_agent, version=version, context=core_pb2.SessionContext(session_id=session), )
def StartSession(self, request, context): logging.info("Message received: StartSession %s", request) version = core_pb2.DESCRIPTOR.GetOptions().Extensions[ core_pb2.protocol_version] session_id = self._new_session_id() session = Session(session_id) self._sessions[session_id] = session # TODO: Check duplicates # session = "session_%d" % len(self.sessions) # self.sessions.add(session) logging.info("Session started: %s (protocol version %s)", session_id, version) return core_pb2.SessionResponse( response_info=core_pb2.Response(status=core_pb2.Status( code=core_pb2.OK)), user_agent="cmu_ta2 " + util.__version__, version=version, context=core_pb2.SessionContext(session_id=session_id), )
def ListPipelines(self, request, context): sessioncontext = request.context if not sessioncontext.session_id in self.sessions: return core_pb2.PipelineListResult(\ response_info=core_pb2.Response(\ status=core_pb2.Status(\ code=core_pb2.FAILED_PRECONDITION, details="Unknown session id: %s" % sessioncontext.session_id))) logger.info("Got ListPipelines request, session=%s", sessioncontext.session_id) res = core_pb2.PipelineListResult() res.response_info.status.code = core_pb2.OK res.response_info.status.details = "listing the pipelines!" res.pipeline_ids.append('pipeline_1') res.pipeline_ids.append('pipeline_2') return res
def CreatePipelines(self, request, context): logging.info("Message received: CreatePipelines: %s", request) session_id = request.context.session_id if session_id not in self._sessions: logging.warning( "Asked to create pipeline for session %s which does not exist", session_id) return self._response_session_invalid(session_id) session = self._sessions[session_id] # Setup pipeline specification dataset_uri = request.dataset_uri task_type = request.task # TODO: task_subtype is currently ignored. # TODO: task_description is currently ignored. metrics = request.metrics target_features = request.target_features predict_features = request.predict_features # We need to tell the TA1 where it can find output, # which will be in a created subdirectory of the dataset URI # This assumes the URI is always file:// but changing that will # be Hard so it's an ok assumption for now. dataset_directory = dataset_uri_path(request.dataset_uri) output_directory = os.path.join(dataset_directory, "output") # We describe a set of related pipelines with a ProblemDescription. # This basically is all the parameters for the problem that the client # wants us to solve, with the idea that it will do whatever metalearning # stuff it wants and then produce a set of PipelineDescription, # where each Pipeline is a particular attempt at solving that problem. spec = problem.ProblemDescription(session_id, dataset_uri, output_directory, task_type, metrics, target_features, predict_features) logging.info("Starting new problem for session %s", session_id) problem_id = session.new_problem(spec) pipelines = spec.find_solutions(dataset_uri) for pipeline in pipelines: pipeline_id = self._new_pipeline_id() output_file = pipeline_id + ".csv" output_uri = "file://" + output_directory + "/" + output_file pb2_pipeline = core_pb2.Pipeline( predict_result_uri=output_uri, output=core_pb2.OUTPUT_TYPE_UNDEFINED, scores=[]) msg = core_pb2.PipelineCreateResult( response_info=core_pb2.Response( status=core_pb2.Status(code=core_pb2.OK), ), progress_info=core_pb2.SUBMITTED, pipeline_id=pipeline_id, pipeline_info=pb2_pipeline) yield msg # Actually ask the problem description to start finding solutions. msg.progress_info = core_pb2.RUNNING pipeline.train(dataset_uri) yield msg pipeline.evaluate(dataset_uri, os.path.join(output_directory, output_file), target_features) msg.progress_info = core_pb2.COMPLETED yield msg
def DeletePipelines(self, request, context): "Placeholder." logging.info("Message received: DeletePipelines") return core_pb2.PipelineListResult(response_info=core_pb2.Response( status=core_pb2.Status(code=core_pb2.OK)), pipeline_ids=[])
def GetExecutePipelineResults(self, request, context): "Placeholder." logging.info("Message received: GetExecutePipelineResults") return core_pb2.Response(core_pb2.Status(code=core_pb2.OK))
def ExportPipeline(self, request, context): "Placeholder." logging.info("Message received: ExportPipeline") return core_pb2.Response(status=core_pb2.Status(code=core_pb2.OK))
def UpdateProblemSchema(self, request, context): "Placeholder." logging.info("Message received: UpdateProblemSchema") return core_pb2.Response(core_pb2.Status(code=core_pb2.OK))
def CreatePipelines(self, request, context): """Mock CreatePipelines response""" sessioncontext = request.context if not sessioncontext.session_id in self.sessions: yield core_pb2.PipelineCreateResult(\ response_info=core_pb2.Response(\ status=core_pb2.Status(\ code=core_pb2.FAILED_PRECONDITION, details="Unknown session id: %s" % sessioncontext.session_id))) return dataset_uri = request.dataset_uri task = request.task task_subtype = request.task_subtype task_description = request.task_description output = request.output metrics = request.metrics target_features = request.target_features predict_features = request.predict_features max_pipelines = request.max_pipelines logger.info("Got CreatePipelines request, session=%s", sessioncontext.session_id) results = [ (core_pb2.SUBMITTED, 'pipeline_1', False), (core_pb2.SUBMITTED, 'pipeline_2', False), (core_pb2.RUNNING, 'pipeline_2', False), (core_pb2.RUNNING, 'pipeline_1', False), (core_pb2.COMPLETED, 'pipeline_1', True), (core_pb2.COMPLETED, 'pipeline_2', True), ] cnt = 0 for progress, pipeline_id, send_pipeline in results: print('sleep 1 second...') time.sleep(1) if not context.is_active(): logger.info("Client closed CreatePipelines stream") msg = core_pb2.PipelineCreateResult( response_info=core_pb2.Response( status=core_pb2.Status(code=core_pb2.OK), ), progress_info=progress, pipeline_id=pipeline_id, ) if send_pipeline: cnt += 1 # try to create a legit file uri file_uri_dict = get_predict_file_info_dict('CLASSIFICATION') msg.pipeline_info.CopyFrom( core_pb2.Pipeline( #predict_result_uris=['file:///out/predict1.csv'], predict_result_uri=file_uri_dict.get(\ TEST_KEY_FILE_URI, 'no file uri'), output=output, scores=[ core_pb2.Score( metric=core_pb2.ACCURACY, value=0.8, ), core_pb2.Score( metric=core_pb2.ROC_AUC, value=0.5, ), ], ) ) yield msg
def _create_response(self, message, code="OK"): status = core.Status(code=core.StatusCode.Value(code), details=message) response = core.Response(status=status) return response