def createPipeline(port=None, session=None, data=None, predictor=None, response=None, task_type=None, task_subtype=None, output_type=None, metric=None): stub = get_stub(int(port)) data_uri = 'file://%s' % (data) predictor = json.loads(predictor) response = json.loads(response) resp = stub.CreatePipelines( cpb.PipelineCreateRequest( context=Parse(session, cpb.SessionContext()), train_features=[ cpb.Feature(feature_id=pred, data_uri=data_uri) for pred in predictor ], target_features=[ cpb.Feature(feature_id=targ, data_uri=data_uri) for targ in response ], task=cpb.TaskType.Value(task_type.upper()), task_subtype=cpb.TaskSubtype.Value(toConstCase(task_subtype)), output=cpb.OutputType.Value(toConstCase(output_type)), metrics=[cpb.Metric.Value(toConstCase(metric))], task_description='TA2 pipeline creation', max_pipelines=5)) return map(lambda x: json.loads(MessageToJson(x)), resp)
def StartSession(self, request, context): """Session management """ session = Session.new() session_context = core.SessionContext(session_id=session.id) response = self._create_response("Session started") session_response = core.SessionResponse(response_info=response, user_agent=request.user_agent, version=request.version, context=session_context) return session_response
def exportPipeline(port=None, session=None, pipeline=None): stub = get_stub(int(port)) exec_name = '%s-%s-%f.exe' % (session, pipeline, time.time()) exec_uri = 'file://%s' % (exec_name) resp = stub.ExportPipeline( cpb.PipelineExportRequest(context=Parse(session, cpb.SessionContext()), pipeline_id=pipeline, pipeline_exec_uri=exec_uri)) return map(lambda x: json.loads(MessageToJson(x)), resp)
def pipelineCreateResults(context=None, pipeline=None, data_uri=None): stub = get_stub() # add file descriptor if it is missing. some systems might be inconsistent, but file:// is the standard if data_uri[0:4] != 'file': data_uri = 'file://%s' % (data_uri) context_in = cpb.SessionContext(session_id=context) request_in = cpb.PipelineCreateResultsRequest(context=context_in, pipeline_id=pipeline) resp = stub.GetCreatePipelineResults(request_in) return map(lambda x: json.loads(MessageToJson(x)), resp)
def createPipeline(context=None, data_uri=None, task_type=None, task_subtype=None, target_features=None, predict_features=[], metrics=None, max_pipelines=10): stub = get_stub() problem_schema_path = os.environ.get('PROBLEM_ROOT') problem_supply = d3mds.D3MProblem(problem_schema_path) # get the target features into the record format expected by the API targets = problem_supply.get_targets() features = [] for entry in targets: tf = core_pb2.Feature(resource_id=entry['resID'], feature_name=entry['colName']) features.append(tf) # we are having trouble parsing the problem specs into valid API specs, so just hardcode # to certain problem types for now. We could fix this with a more general lookup table to return valid API codes task = taskTypeLookup(task_type) tasksubtype = subTaskLookup(task_subtype) # the metrics in the files are imprecise text versions of the enumerations, so just standardize. A lookup table # would help here, too metrics = [ core_pb2.F1_MICRO, core_pb2.ROC_AUC, core_pb2.ROOT_MEAN_SQUARED_ERROR, core_pb2.F1, core_pb2.R_SQUARED ] context_in = cpb.SessionContext(session_id=context) request_in = cpb.PipelineCreateRequest( context=context_in, dataset_uri=data_uri, task=task, task_subtype=tasksubtype, metrics=metrics, task_description='Modsquad pipeline create request', target_features=features, predict_features=[], max_pipelines=10) resp = stub.CreatePipelines(request_in) return map(lambda x: json.loads(MessageToJson(x)), resp)
def test_session(self): channel = grpc.insecure_channel('localhost:45042') stub = core_pb2_grpc.CoreStub(channel) msg = core_pb2.SessionRequest(user_agent="unittest", version="Foo") session = stub.StartSession(msg) self.assertTrue(session.response_info.status.code == core_pb2.OK) session_end_response = stub.EndSession(session.context) self.assertTrue(session_end_response.status.code == core_pb2.OK) # Try to end a session that does not exist fake_context = core_pb2.SessionContext(session_id="fake context") session_end_response = stub.EndSession(fake_context) self.assertTrue( session_end_response.status.code == core_pb2.SESSION_UNKNOWN)
def exportPipeline(context=None, pipeline=None): stub = get_stub() context_in = cpb.SessionContext(session_id=context) # be sure to make a URI that matches where the TA2 will be able to write out during execution executables_root = os.environ.get('EXECUTABLES_ROOT') exec_name = '%s/modsquad-%s-%s-%f.executable' % (executables_root, context, pipeline, time.time()) exec_uri = 'file://%s' % (exec_name) resp = stub.ExportPipeline( cpb.PipelineExportRequest(context=context_in, pipeline_id=pipeline, pipeline_exec_uri=exec_uri)) return json.loads(MessageToJson(resp))
def StartSession(self, request, context): version = core_pb2.DESCRIPTOR.GetOptions().Extensions[\ core_pb2.protocol_version] print('version: %s' % version) print('request.version: %s' % request.version) session = 'session_%d' % len(self.sessions) session_start_time[session] = time.time() self.sessions.add(session) logger.info("Session started: %s (protocol version %s)",\ session, version) return core_pb2.SessionResponse( response_info=core_pb2.Response(status=core_pb2.Status( code=core_pb2.OK)), user_agent=request.user_agent, version=version, context=core_pb2.SessionContext(session_id=session), )
def executePipeline(context=None, pipeline=None, data_uri=None): stub = get_stub() # add file descriptor if it is missing. some systems might be inconsistent, but file:// is the standard if data_uri[0:4] != 'file': data_uri = 'file://%s' % (data_uri) context_in = cpb.SessionContext(session_id=context) request_in = cpb.PipelineExecuteRequest(context=context_in, pipeline_id=pipeline, dataset_uri=data_uri) resp = stub.ExecutePipeline(request_in) executedPipes = map(lambda x: json.loads(MessageToJson(x)), resp) print executedPipes # now loop through the returned pipelines and copy their data map(lambda x: copyToWebRoot(x), executedPipes) return executedPipes
def StartSession(self, request, context): logging.info("Message received: StartSession %s", request) version = core_pb2.DESCRIPTOR.GetOptions().Extensions[ core_pb2.protocol_version] session_id = self._new_session_id() session = Session(session_id) self._sessions[session_id] = session # TODO: Check duplicates # session = "session_%d" % len(self.sessions) # self.sessions.add(session) logging.info("Session started: %s (protocol version %s)", session_id, version) return core_pb2.SessionResponse( response_info=core_pb2.Response(status=core_pb2.Status( code=core_pb2.OK)), user_agent="cmu_ta2 " + util.__version__, version=version, context=core_pb2.SessionContext(session_id=session_id), )
def executePipeline(port=None, session=None, pipeline=None, data=None, predictor=None): stub = get_stub(int(port)) data_uri = 'file://%s' % (data) predictor = json.loads(predictor) resp = stub.ExecutePipeline( cpb.PipelineExecuteRequest(context=Parse(session, cpb.SessionContext()), pipeline_id=pipeline, predict_features=[ cpb.Feature(feature_id=pred, data_uri=data_uri) for pred in predictor ])) return map(lambda x: json.loads(MessageToJson(x)), resp)
def pipeline_create_parse(): session_context = core_pb2.SessionContext() session_context.session_id = 'abc123' req = core_pb2.PipelineCreateRequest() req.context.session_id = 'session_0' req.train_features.add( feature_id='cylinders', data_uri='data/d3m/o_196seed/data/trainDatamerged.tsv') req.train_features.add( feature_id='cylinders', data_uri='data/d3m/o_196seed/data/trainDatamerged.tsv') req.task = core_pb2.REGRESSION req.task_subtype = core_pb2.UNIVARIATE req.output = core_pb2.REAL req.metrics.append(core_pb2.ROOT_MEAN_SQUARED_ERROR) req.target_features.add( feature_id='class', data_uri='data/d3m/o_196seed/data/trainDatamerged.tsv') req.max_pipelines = 10 msg_and_back(req, core_pb2.PipelineCreateRequest) print('-' * 40) content = MessageToJson(req, including_default_value_fields=True) print(content) print('-' * 40) json_parse(content, core_pb2.PipelineCreateRequest) print('-' * 40)
def __init__(self, session): self.session = session self.session_context = core.SessionContext(session_id = session.id)
def end_session(raven_json_str): """end session command This command needs a session id from the start_session cmd e.g. string: '{"session_id" : "123556"}' """ # The UI has sent JSON in string format that contains the session_id try: raven_dict = json.loads(raven_json_str) except json.decoder.JSONDecodeError as err_obj: err_msg = 'Failed to convert UI Str to JSON for end_session: %s' % ( err_obj) return get_failed_precondition_response(err_msg) # The protocol version always comes from the latest # version we have in the repo (just copied in for now) # if not KEY_SESSION_ID_FROM_UI in raven_dict: return get_failed_precondition_response(ERR_NO_SESSION_ID) # -------------------------------- # Convert back to string for TA2 call # -------------------------------- content = json.dumps(raven_dict) # -------------------------------- # convert the JSON string to a gRPC request # -------------------------------- try: req = Parse(content, core_pb2.SessionContext()) except ParseError as err_obj: err_msg = 'Failed to convert JSON to gRPC: %s' % (err_obj) return get_failed_precondition_response(err_msg) # In test mode, check if the incoming JSON is legit (in line above) # -- then return canned response below # if settings.TA2_STATIC_TEST_MODE: rnd_session_id = random_info.get_alphanumeric_string(7) tinfo = dict(session_id=rnd_session_id) #if random.randint(1, 3) == 3: # return get_grpc_test_json('test_responses/endsession_badassertion.json') return get_grpc_test_json('test_responses/endsession_ok.json', tinfo) if settings.TA2_STATIC_TEST_MODE: rnd_session_id = random_info.get_alphanumeric_string(7) tinfo = dict(session_id=rnd_session_id) if random.randint(1, 3) == 3: return get_grpc_test_json( request, 'test_responses/endsession_badassertion.json') return get_grpc_test_json(request, 'test_responses/endsession_ok.json', tinfo) # -------------------------------- # Get the connection, return an error if there are channel issues # -------------------------------- core_stub, err_msg = TA2Connection.get_grpc_stub() if err_msg: return get_failed_precondition_response(err_msg) # -------------------------------- # Send the gRPC request # -------------------------------- try: reply = core_stub.EndSession(req) except Exception as ex: return get_failed_precondition_response(str(ex)) # -------------------------------- # Convert the reply to JSON and send it back # -------------------------------- return MessageToJson(reply)