def GetProduceSolutionResults(self, request, context): """ TA2-3 API call """ logging.critical("Message received: GetProduceSolutionResults") request_id = request.request_id request_params = self._solution_score_map[request_id] start = solutiondescription.compute_timestamp() solution_id = request_params.fitted_solution_id solution = self._solutions[solution_id] inputs = self._get_inputs(solution.problem, request_params.inputs) try: output = solution.produce(inputs=inputs, solution_dict=self._solutions)[0] logging.critical("Produce predictions with rows = %s", len(output)) except: logging.critical("Exception in produce: %s", solution.primitives) logging.critical("Exception in produce: %s", sys.exc_info()[0]) output = None result = None search_id_str = self._solution_to_search[solution_id] outputDir = os.environ['D3MOUTPUTDIR'] + "/" + search_id_str if output is not None: uri = util.write_predictions(output, outputDir + "/predictions", request_id) uri = 'file://{uri}'.format(uri=os.path.abspath(uri)) result = value_pb2.Value(csv_uri=uri) else: result = value_pb2.Value(error=value_pb2.ValueError( message="Output is NULL")) self._solution_score_map.pop(request_id, None) msg = core_pb2.Progress(state=core_pb2.COMPLETED, status="", start=start, end=solutiondescription.compute_timestamp()) steps = [] for i in range(solution.num_steps()): steps.append(core_pb2.StepProgress(progress=msg)) exposed_outputs = {} if request_params.expose_outputs is not None and len( request_params.expose_outputs) > 0: last_step_output = request_params.expose_outputs[ len(request_params.expose_outputs) - 1] else: last_step_output = solution.outputs[0][2] exposed_outputs[last_step_output] = result yield core_pb2.GetProduceSolutionResultsResponse( progress=msg, steps=steps, exposed_outputs=exposed_outputs)
def GetScoreSolutionResults(self, request, context): """ TA2-3 API call """ logging.info("Message received: GetScoreSolutionResults") request_id = request.request_id request_params = self._solution_score_map[request_id] start=solutiondescription.compute_timestamp() solution_id = request_params.solution_id msg = core_pb2.Progress(state=core_pb2.RUNNING, status="", start=start, end=solutiondescription.compute_timestamp()) send_scores = [] if solution_id not in self._solutions: logging.info("GetScoreSolutionResults: Solution %s not found!", solution_id) msg = core_pb2.Progress(state=core_pb2.ERRORED, status="", start=start, end=solutiondescription.compute_timestamp()) # Clean up self._solution_score_map.pop(request_id, None) yield core_pb2.GetScoreSolutionResultsResponse(progress=msg, scores=[]) else: inputs = self._get_inputs(self._solutions[solution_id].problem, request_params.inputs) try: (score, optimal_params) = self._solutions[solution_id].score_solution(inputs=inputs, metric=request_params.performance_metrics[0].metric, primitive_dict=self._primitives, solution_dict=self._solutions) if optimal_params is not None and len(optimal_params) > 0: self._solutions[solution_id].set_hyperparams(optimal_params) except: score = 0.0 logging.info(self._solutions[solution_id].primitives) logging.info(sys.exc_info()[0]) outputDir = os.environ['D3MOUTPUTDIR'] util.write_pipeline_json(self._solutions[solution_id], self._primitives, outputDir + "/pipelines_scored") logging.info("Score = %f", score) send_scores.append(core_pb2.Score(metric=request_params.performance_metrics[0], fold=request_params.configuration.folds, targets=[], value=value_pb2.Value(raw=value_pb2.ValueRaw(double=score)))) yield core_pb2.GetScoreSolutionResultsResponse(progress=msg, scores=[]) # Clean up self._solution_score_map.pop(request_id, None) msg = core_pb2.Progress(state=core_pb2.COMPLETED, status="", start=start, end=solutiondescription.compute_timestamp()) yield core_pb2.GetScoreSolutionResultsResponse(progress=msg, scores=send_scores)
def GetScoreSolutionResults(self, request, context): """ TA2-3 API call """ logging.critical("Message received: GetScoreSolutionResults") request_id = request.request_id request_params = self._solution_score_map[request_id] start = solutiondescription.compute_timestamp() solution_id = request_params.solution_id msg = core_pb2.Progress(state=core_pb2.RUNNING, status="", start=start, end=solutiondescription.compute_timestamp()) send_scores = [] from timeit import default_timer as timer if solution_id not in self._solutions: logging.critical("GetScoreSolutionResults: Solution %s not found!", solution_id) msg = core_pb2.Progress( state=core_pb2.ERRORED, status="", start=start, end=solutiondescription.compute_timestamp()) # Clean up self._solution_score_map.pop(request_id, None) yield core_pb2.GetScoreSolutionResultsResponse(progress=msg, scores=[]) else: inputs = self._get_inputs(self._solutions[solution_id].problem, request_params.inputs) score = 0.0 try: s = timer() (score, optimal_params) = self._solutions[solution_id].score_solution( inputs=inputs, metric=request_params.performance_metrics[0].metric, posLabel=request_params.performance_metrics[0].pos_label, primitive_dict=self._primitives, solution_dict=self._solutions) if optimal_params is not None and len(optimal_params) > 0: self._solutions[solution_id].set_hyperparams( self._solutions, optimal_params) e = timer() logging.critical("Time taken = %s sec", e - s) except: score = 0.0 logging.critical("Exception in score: %s", self._solutions[solution_id].primitives) logging.critical("Exception in score: %s", sys.exc_info()[0]) search_id_str = self._solution_to_search[solution_id] outputDir = os.environ['D3MOUTPUTDIR'] + "/" + search_id_str try: util.write_pipeline_json(self._solutions[solution_id], self._primitives, self._solutions, outputDir + "/pipelines_scored", outputDir + "/subpipelines") except: logging.critical(sys.exc_info()[0]) logging.critical(self._solutions[solution_id].primitives) logging.critical("Score = %f", score) send_scores.append( core_pb2.Score(metric=request_params.performance_metrics[0], fold=0, value=value_pb2.Value(raw=value_pb2.ValueRaw( double=score)), random_seed=0)) yield core_pb2.GetScoreSolutionResultsResponse(progress=msg, scores=[]) # Clean up self._solution_score_map.pop(request_id, None) msg = core_pb2.Progress( state=core_pb2.COMPLETED, status="", start=start, end=solutiondescription.compute_timestamp()) yield core_pb2.GetScoreSolutionResultsResponse(progress=msg, scores=send_scores)
def GetSearchSolutionsResults(self, request, context): """ TA2-3 API call """ logging.critical("Message received: GetSearchSolutionsRequest") search_id_str = request.search_id start = solutiondescription.compute_timestamp() msg = core_pb2.Progress(state=core_pb2.PENDING, status="", start=start, end=solutiondescription.compute_timestamp()) yield core_pb2.GetSearchSolutionsResultsResponse(progress=msg, done_ticks=0, all_ticks=0, solution_id="", internal_score=0.0, scores=[]) request_params = self._solution_score_map[search_id_str] count = 0 inputs = self._get_inputs(request_params.problem, request_params.inputs) (solutions, time_used) = self.search_solutions(request_params, inputs[0]) self._search_solutions[search_id_str] = [] # Fully specified solution if request_params.template != None and isinstance(request_params.template, pipeline_pb2.PipelineDescription) \ and len(request_params.template.steps) > 0 and len(solutions) == 1: msg = core_pb2.Progress( state=core_pb2.COMPLETED, status="", start=start, end=solutiondescription.compute_timestamp()) count = count + 1 id = solutions[0].id self._solutions[id] = solutions[0] self._search_solutions[search_id_str].append(id) self._solution_to_search[id] = search_id_str yield core_pb2.GetSearchSolutionsResultsResponse( progress=msg, done_ticks=1, all_ticks=1, solution_id=id, internal_score=0.0, scores=[]) else: # Evaluate potential solutions index = 0 msg = core_pb2.Progress( state=core_pb2.RUNNING, status="", start=start, end=solutiondescription.compute_timestamp()) metric = request_params.problem.problem.performance_metrics[ 0].metric posLabel = request_params.problem.problem.performance_metrics[ 0].pos_label solutions_dict = copy.deepcopy(self._solutions) results = [ self.async_message_thread.apply_async( search.evaluate_solution_score, ( inputs, sol, self._primitives, metric, posLabel, solutions_dict, )) for sol in solutions ] logging.critical("Search timeout = %d", request_params.time_bound_search) timeout = request_params.time_bound_search * 60 if timeout <= 0: timeout = None elif timeout > 60: timeout = timeout - 120 timeout = timeout - time_used if timeout <= 0: timeout = 1 if timeout is not None: logging.critical("Timeout = %d sec", timeout) outputDir = os.environ['D3MOUTPUTDIR'] + "/" + search_id_str valid_solution_scores = {} # Evaluate potential solutions asynchronously and get end-result for r in results: try: start_solution = timer() (score, optimal_params) = r.get(timeout=timeout) count = count + 1 id = solutions[index].id self._solutions[id] = solutions[index] self._search_solutions[search_id_str].append(id) self._solution_to_search[id] = search_id_str valid_solution_scores[index] = score if optimal_params is not None and len(optimal_params) > 0: solutions[index].set_hyperparams( self._solutions, optimal_params) util.write_pipeline_json(solutions[index], self._primitives, self._solutions, outputDir + "/pipelines_searched", outputDir + "/subpipelines") end_solution = timer() time_used = end_solution - start_solution timeout = timeout - time_used if timeout <= 0: timeout = 3 yield core_pb2.GetSearchSolutionsResultsResponse( progress=msg, done_ticks=count, all_ticks=len(solutions), solution_id=id, internal_score=0.0, scores=[]) except TimeoutError: logging.critical(solutions[index].primitives) logging.critical(sys.exc_info()[0]) logging.critical("Solution terminated: %s", solutions[index].id) timeout = 3 except: logging.critical(solutions[index].primitives) logging.critical(sys.exc_info()[0]) logging.critical("Solution terminated: %s", solutions[index].id) index = index + 1 # Sort solutions by their scores and rank them sorted_x = search.rank_solutions(valid_solution_scores, metric) sol_rank = 1 for (index, score) in sorted_x: id = solutions[index].id #self._solutions[id] = solutions[index] #self._search_solutions[search_id_str].append(id) #self._solution_to_search[id] = search_id_str self._solutions[id].rank = sol_rank logging.critical("Rank %d", sol_rank) print("Score ", score) rank = core_pb2.Score( metric=problem_pb2.ProblemPerformanceMetric(metric="RANK"), value=value_pb2.Value(raw=value_pb2.ValueRaw( double=sol_rank))) search_rank = core_pb2.SolutionSearchScore( scoring_configuration=core_pb2.ScoringConfiguration(), scores=[rank]) sscore = core_pb2.Score( metric=problem_pb2.ProblemPerformanceMetric(metric=metric), value=value_pb2.Value(raw=value_pb2.ValueRaw( double=score))) search_score = core_pb2.SolutionSearchScore( scoring_configuration=core_pb2.ScoringConfiguration(), scores=[sscore]) yield core_pb2.GetSearchSolutionsResultsResponse( progress=msg, done_ticks=count, all_ticks=len(solutions), solution_id=id, internal_score=0.0, scores=[search_rank, search_score]) sol_rank = sol_rank + 1 msg = core_pb2.Progress( state=core_pb2.COMPLETED, status="", start=start, end=solutiondescription.compute_timestamp()) yield core_pb2.GetSearchSolutionsResultsResponse( progress=msg, done_ticks=count, all_ticks=count, solution_id="", internal_score=0.0, scores=[]) self._solution_score_map.pop(search_id_str, None) logging.critical("No. of sol = %d", count)
def GetFitSolutionResults(self, request, context): """ TA2-3 API call """ logging.info("Message received: GetFitSolutionResults") request_id = request.request_id request_params = self._solution_score_map[request_id] start=solutiondescription.compute_timestamp() solution_id = request_params.solution_id if solution_id not in self._solutions: logging.info("GetFitSolutionResults: Solution %s not found!", solution_id) msg = core_pb2.Progress(state=core_pb2.ERRORED, status="", start=start, end=solutiondescription.compute_timestamp()) # Clean up self._solution_score_map.pop(request_id, None) yield core_pb2.GetFitSolutionResultsResponse(progress=msg, steps=[], exposed_outputs=[], fitted_solution_id=None) else: solution = self._solutions[solution_id] msg = core_pb2.Progress(state=core_pb2.RUNNING, status="", start=start, end=solutiondescription.compute_timestamp()) fitted_solution = copy.deepcopy(solution) fitted_solution.id = str(uuid.uuid4()) fitted_solution.create_pipeline_json(self._primitives) self._solutions[fitted_solution.id] = fitted_solution inputs = self._get_inputs(solution.problem, request_params.inputs) try: output = fitted_solution.fit(inputs=inputs, solution_dict=self._solutions) except: logging.info(fitted_solution.primitives) logging.info(sys.exc_info()[0]) output = None result = None outputDir = os.environ['D3MOUTPUTDIR'] if isinstance(output, np.ndarray): output = pd.DataFrame(data=output) if output is not None: uri = util.write_predictions(output, outputDir + "/predictions", fitted_solution) uri = 'file://{uri}'.format(uri=os.path.abspath(uri)) result = value_pb2.Value(csv_uri=uri) else: result = value_pb2.Value(error = value_pb2.ValueError(message="Output is NULL")) yield core_pb2.GetFitSolutionResultsResponse(progress=msg, steps=[], exposed_outputs=[], fitted_solution_id=fitted_solution.id) msg = core_pb2.Progress(state=core_pb2.COMPLETED, status="", start=start, end=solutiondescription.compute_timestamp()) steps = [] for i in range(fitted_solution.num_steps()): steps.append(core_pb2.StepProgress(progress=msg)) exposed_outputs = {} if request_params.expose_outputs is not None and len(request_params.expose_outputs) > 0: last_step_output = request_params.expose_outputs[len(request_params.expose_outputs)-1] else: last_step_output = fitted_solution.outputs[0][2] exposed_outputs[last_step_output] = result # Clean up self._solution_score_map.pop(request_id, None) yield core_pb2.GetFitSolutionResultsResponse(progress=msg, steps=steps, exposed_outputs=exposed_outputs, fitted_solution_id=fitted_solution.id)
def GetSearchSolutionsResults(self, request, context): """ TA2-3 API call """ logging.info("Message received: GetSearchSolutionsRequest") search_id_str = request.search_id start=solutiondescription.compute_timestamp() msg = core_pb2.Progress(state=core_pb2.PENDING, status="", start=start, end=solutiondescription.compute_timestamp()) yield core_pb2.GetSearchSolutionsResultsResponse(progress=msg, done_ticks=0, all_ticks=0, solution_id="", internal_score=0.0, scores=[]) request_params = self._solution_score_map[search_id_str] count = 0 inputs = self._get_inputs(request_params.problem, request_params.inputs) solutions = self.search_solutions(request_params, inputs[0]) self._search_solutions[search_id_str] = [] # Fully specified solution if request_params.template != None and isinstance(request_params.template, pipeline_pb2.PipelineDescription) and len(request_params.template.steps) > 0: msg = core_pb2.Progress(state=core_pb2.COMPLETED, status="", start=start, end=solutiondescription.compute_timestamp()) count = count + 1 id = solutions[0].id self._solutions[id] = solutions[0] self._search_solutions[search_id_str].append(id) yield core_pb2.GetSearchSolutionsResultsResponse(progress=msg, done_ticks=1, all_ticks=1, solution_id=id, internal_score=0.0, scores=[]) else: # Evaluate potential solutions index = 0 msg = core_pb2.Progress(state=core_pb2.RUNNING, status="", start=start, end=solutiondescription.compute_timestamp()) results = [self.async_message_thread.apply_async(evaluate_solution, (inputs, sol, None,)) for sol in solutions] timeout = request_params.time_bound * 60 if timeout <= 0: timeout = None elif timeout > 60: timeout = timeout - 60 outputDir = os.environ['D3MOUTPUTDIR'] # Evaluate potential solutions asynchronously and get end-result for r in results: try: val = r.get(timeout=timeout) if val == 0: count = count + 1 id = solutions[index].id self._solutions[id] = solutions[index] self._search_solutions[search_id_str].append(id) util.write_pipeline_json(solutions[index], self._primitives, outputDir + "/pipelines_searched") yield core_pb2.GetSearchSolutionsResultsResponse(progress=msg, done_ticks=count, all_ticks=len(solutions), solution_id=id, internal_score=0.0, scores=[]) except: logging.info(solutions[index].primitives) logging.info(sys.exc_info()[0]) logging.info("Solution terminated: %s", solutions[index].id) index = index + 1 self._solution_score_map.pop(search_id_str, None) logging.info("No. of sol = %d", count) msg = core_pb2.Progress(state=core_pb2.COMPLETED, status="", start=start, end=solutiondescription.compute_timestamp()) yield core_pb2.GetSearchSolutionsResultsResponse(progress=msg, done_ticks=count, all_ticks=count, solution_id="", internal_score=0.0, scores=[])