def predict_and_quality(model, data_free, data_pred, start_fct, problem_size, g, d, nb_fct): w = data_free[0:nb_fct] offs = data_free[nb_fct:] score_freeeval = eval_to.free_eval(start_fct, offs, problem_size) tcheby_freeeval = eval_to.g_tcheby(w, score_freeeval, train_to.getTrainingZstar()) tcheby_predict = model.predict(data_pred) qual_to.add(g, d, tcheby_predict[0], tcheby_freeeval) return tcheby_predict[0]
def applyFreeevals(offspring_list, objective_functions, problem_size): freeeval_tab =[] for offspring in offspring_list: current_tmp = eval_to.free_eval(objective_functions, offspring, problem_size) freeeval_tab.append(current_tmp) return freeeval_tab
def runTcheby(): global param, approx_pareto_front, archiveOK, NO_FILE_TO_WRITE ############################################################################ # PARAMETER # clf = SVR(C=1.0, epsilon=0.1, kernel="rbf") clf = NuSVR() clf2 = -1 two_models_bool = False isReals = True start_fct, nb_functions = param[0:2] nb_iterations, neighboring_size = param[2:4] init_decisions, problem_size = param[4:6] max_decisions_maj, delta_neighbourhood = param[6:8] CR, search_space = param[8:10] F, distrib_index_n = param[10:12] pm, operator_fct = param[12:14] nb_samples, training_neighborhood_size = param[14:16] strategy, file_to_write = param[16:18] filter_strat, free_eval = param[18:20] param_print_every, file_to_writeR2 = param[20:22] filenameDIR, filenameSCORE = param[22:24] nb_objectives = len(start_fct) # get separatly offspring operator fct crossover_fct, mutation_fct, repair_fct = operator_fct best_decisions = copy.deepcopy(init_decisions) sampling_param = [ crossover_fct, mutation_fct, repair_fct, best_decisions, F, problem_size, CR, search_space, distrib_index_n, pm, ] ############################################################################ # INITIALISATION qual_tools.resetGlobalVariables(filenameDIR, filenameSCORE, nb_iterations, nb_functions) eval_to.resetEval() # get the directions weight for both starting functions directions = dec.getDirections(nb_functions, nb_objectives) # init the neighboring constant nt.initNeighboringTab(nb_functions, neighboring_size, directions, nb_objectives) # giving global visibility to the best_decisions to get the result at the end approx_pareto_front = best_decisions # initial best decisions scores best_decisions_scores = [eval_to.free_eval(start_fct, best_decisions[i], problem_size) for i in range(nb_functions)] pop_size = nb_functions # current optimal scores for both axes z_opt_scores = gt.getMinTabOf(best_decisions_scores) eval_to.initZstar(z_opt_scores) # get the first training part of the item we will learn on model_directions = train_to.getDirectionsTrainingMatrix(directions) # if the data shall be write in a file writeOK = False if file_to_write != NO_FILE_TO_WRITE: writeOK = True writeR2OK = False if file_to_writeR2 != NO_FILE_TO_WRITE: writeR2OK = True ############################################################################ # MAIN ALGORITHM if writeOK: iot.printObjectives(file_to_write, eval_to.getNbEvals(), 0, best_decisions_scores, problem_size, nb_objectives) # IDs tab to allow a random course through the directions in the main loop id_directions = [i for i in range(nb_functions)] # iterations loop for itera in range(nb_iterations): if not free_eval: # Update model training_inputs, training_outputs, training_set_size, training_scores = train_to.getTrainingSet( model_directions, best_decisions, best_decisions_scores, eval_to.getZstar_with_decal(), strategy, nb_functions, training_neighborhood_size, ) clf.fit(training_inputs, training_outputs) """ if(writeR2OK and not free_eval): training_inputs_tcheby = eval_to.getManyTcheby(training_inputs, training_scores, eval_to.getZstar_with_decal(), training_set_size) random_index = numpy.arange(0,training_set_size) numpy.random.shuffle(random_index) n_folds = 10 folds_sizes = (training_set_size // n_folds) * numpy.ones(n_folds, dtype=numpy.int) folds_sizes[:training_set_size % n_folds] += 1 training_inputs_array = numpy.array(training_inputs) training_tcheby_array = numpy.array(training_inputs_tcheby) R2_cv = [] MSE_cv = [] MAE_cv = [] MDAE_cv = [] clfCV = NuSVR() current = 0 for fold_size in folds_sizes: start, stop = current, current + fold_size mask = numpy.ones(training_set_size, dtype=bool) mask[start:stop] = 0 current = stop clfCV.fit(training_inputs_array[random_index[mask]], training_tcheby_array[random_index[mask]]) test_fold_tcheby = training_tcheby_array[random_index[start:stop]] test_fold_predict = clfCV.predict(training_inputs_array[random_index[start:stop]]) R2_cv .append(r2_score (test_fold_tcheby, test_fold_predict)) MSE_cv .append(mean_squared_error (test_fold_tcheby, test_fold_predict)) MAE_cv .append(mean_absolute_error (test_fold_tcheby, test_fold_predict)) MDAE_cv.append(median_absolute_error(test_fold_tcheby, test_fold_predict)) R2 = clf.score(training_inputs, training_outputs) MSE_cv_mean = numpy.mean(MSE_cv) RMSE_cv_mean = math.sqrt(MSE_cv_mean) MAE_cv_mean = numpy.mean(MAE_cv) MDAE_cv_mean = numpy.mean(MDAE_cv) R2_cv_mean = numpy.mean(R2_cv) iot.printR2(file_to_writeR2, eval_to.getNbEvals(), itera, R2, R2_cv_mean, MSE_cv_mean , MAE_cv_mean, MDAE_cv_mean, RMSE_cv_mean, problem_size, print_every=1) """ # random course through the directions random.shuffle(id_directions) # functions loop for f in id_directions: # get all the indice of neighbors of a function in a certain distance of f and include f in f_neighbors, current_neighbourhing_size = nt.getNeighborsOf(f, delta_neighbourhood) # get a list of offspring from the neighbors list_offspring = samp_to.extended_sampling(f, f_neighbors, sampling_param, nb_samples) # apply a filter on the offspring list and select the best one filter_param = [ itera, f, clf, clf2, two_models_bool, f_neighbors, list_offspring, model_directions, start_fct, problem_size, eval_to.getZstar_with_decal(), best_decisions_scores, best_decisions, nb_objectives, ] best_candidate = filt_to.model_based_filtring(filter_strat, free_eval, filter_param) # evaluation of the newly made solution mix_scores = eval_to.eval(start_fct, best_candidate, problem_size) # MAJ of the z_star point has_changed = eval_to.min_update_Z_star(mix_scores, nb_objectives) # retraining of the model with the new z_star if has_changed and not free_eval: train_to.updateTrainingZstar(eval_to.getZstar_with_decal()) training_outputs = train_to.retrainSet( training_inputs, training_scores, eval_to.getZstar_with_decal(), training_set_size, nb_objectives ) clf.fit(training_inputs, training_outputs) # boolean that is True if the offspring has been add to the archive added_to_S = False # count how many best decisions has been changed by the newly offspring cmpt_best_maj = 0 # random course through the neighbors list random.shuffle(f_neighbors) # course through the neighbors list for j in f_neighbors: # stop if already max number of remplacement reach if cmpt_best_maj >= max_decisions_maj: break # compute g_tcheby # wj = (directions[0][j],directions[1][j]) wj = [directions[obj][j] for obj in range(0, nb_objectives)] g_mix = eval_to.g_tcheby(wj, mix_scores, eval_to.getZstar_with_decal()) g_best = eval_to.g_tcheby(wj, best_decisions_scores[j], eval_to.getZstar_with_decal()) # if the g_tcheby of the new solution is less distant from the z_optimal solution than the current best solution of the function j if g_mix < g_best: cmpt_best_maj += 1 best_decisions[j] = best_candidate best_decisions_scores[j] = mix_scores # if we manage the archive and the solution have not been add already if archiveOK and not (added_to_S): arch_to.archivePut(best_candidate, mix_scores) added_to_S = True # print("Update", itera, "done.") # if manage archive if archiveOK: arch_to.maintain_archive() # if write the result in a file if writeOK: iot.printObjectives( file_to_write, eval_to.getNbEvals(), itera + 1, best_decisions_scores, problem_size, nb_objectives, print_every=param_print_every, ) continue # graphic update # yield arch_to.getArchiveScore(), best_decisions_scores, itera+1, eval_to.getNbEvals(), eval_to.getZstar_with_decal(), pop_size, isReals if not free_eval and writeR2OK: qual_tools.computeQualityEvaluation() qual_tools.generateDiffPredFreeFile() return
def runTcheby(): global param, approx_pareto_front, archiveOK, NO_FILE_TO_WRITE ############################################################################ # PARAMETER isReals = True start_fct, nb_functions = param[0:2] nb_iterations, neighboring_size = param[2:4] init_decisions, problem_size = param[4:6] max_decisions_maj, delta_neighbourhood = param[6:8] CR, search_space = param[8:10] F, distrib_index_n = param[10:12] pm, operator_fct = param[12:14] file_to_write, param_print_every = param[14:16] nb_objectives = len(start_fct) #get separatly offspring operator fct crossover_fct, mutation_fct, repair_fct = operator_fct best_decisions = copy.deepcopy(init_decisions) sampling_param = [crossover_fct, mutation_fct, repair_fct, best_decisions, F, problem_size, CR, search_space, distrib_index_n, pm] ############################################################################ # INITIALISATION eval_to.resetEval() #get the directions weight for both starting functions directions = dec.getDirections(nb_functions, nb_objectives) #init the neighboring constant nt.initNeighboringTab(nb_functions, neighboring_size, directions, nb_objectives) #giving global visibility to the best_decisions to get the result at the end approx_pareto_front = best_decisions #initial best decisions scores best_decisions_scores = [eval_to.free_eval(start_fct, best_decisions[i], problem_size) for i in range(nb_functions)] pop_size = nb_functions #current optimal scores for both axes z_opt_scores = gt.getMinTabOf(best_decisions_scores) eval_to.initZstar(z_opt_scores) #if the data shall be write in a file writeOK = False if(file_to_write != NO_FILE_TO_WRITE): writeOK = True ############################################################################ # MAIN ALGORITHM if(writeOK): iot.printObjectives(file_to_write, eval_to.getNbEvals(), 0, best_decisions_scores, problem_size, nb_objectives) #IDs tab to allow a random course through the directions in the main loop id_directions = [i for i in range(nb_functions)] #iterations loop for itera in range(nb_iterations): #random course through the directions random.shuffle(id_directions) #functions loop for f in id_directions: #get all the indice of neighbors of a function in a certain distance of f and include f in f_neighbors, current_neighbourhing_size = nt.getNeighborsOf(f, delta_neighbourhood) #generate a new valide offspring mix_ter = samp_to.sampling(f, f_neighbors, sampling_param) #evaluation of the newly made solution mix_scores = eval_to.eval(start_fct, mix_ter, problem_size) #MAJ of the z_star point has_changed = eval_to.min_update_Z_star(mix_scores, nb_objectives) #boolean that is True if the offspring has been add to the archive added_to_S = False #count how many best decisions has been changed by the newly offspring cmpt_best_maj = 0 #random course through the neighbors list random.shuffle(f_neighbors) #course through the neighbors list for j in f_neighbors: #stop if already max number of remplacement reach if(cmpt_best_maj >= max_decisions_maj): break #compute g_tcheby #wj = (directions[0][j],directions[1][j]) wj = [directions[obj][j] for obj in range(0,nb_objectives)] g_mix = eval_to.g_tcheby(wj, mix_scores, eval_to.getZstar_with_decal()) g_best = eval_to.g_tcheby(wj, best_decisions_scores[j], eval_to.getZstar_with_decal()) #if the g_tcheby of the new solution is less distant from the z_optimal solution than the current best solution of the function j if( g_mix < g_best): cmpt_best_maj += 1 best_decisions[j] = mix_ter best_decisions_scores[j] = mix_scores #if we manage the archive and the solution have not been add already if(archiveOK and not(added_to_S)): arch_to.archivePut(mix_ter, mix_scores) added_to_S = True #print("Update", itera, "done.") #if manage archive if(archiveOK): arch_to.maintain_archive() #if write the result in a file if(writeOK): iot.printObjectives(file_to_write, eval_to.getNbEvals(), itera+1, best_decisions_scores, problem_size, nb_objectives, print_every=param_print_every) continue #graphic update #yield arch_to.getArchiveScore(), best_decisions_scores, itera, eval_to.getNbEvals(), eval_to.getZstar_with_decal(), pop_size, isReals return
def computeTchebyFreeEval(data, start_fct, problem_size, z_star, nb_fct): w = data[0:nb_fct] offs = data[nb_fct:] score_eval = eval_to.free_eval(start_fct, offs, problem_size) return eval_to.g_tcheby(w, score_eval, z_star)
def by_direction_score(free_eval, param): global MAX_INTEGER current_g, current_f, model, model2, two_models_bool, f_neighbors, list_offspring, model_directions, start_fct, problem_size, z_star, population_scores, population_indiv, nb_fct = param id_offspring = -1 score_best_pred = MAX_INTEGER score_best_free = MAX_INTEGER save_best_pred_free_score = MAX_INTEGER save_best_free_pred_score = MAX_INTEGER index_best_free_list = [-1] index_best_pred_list = [-1] current_f_w = model_directions[current_f].tolist()[0] for offspring in list_offspring: id_offspring += 1 tmp_free = -1 tmp_pred = -1 f_input_data = [] f_input_data.extend(current_f_w) f_input_data.extend(offspring) if(not free_eval): f_input_data_pred = np.matrix(f_input_data) tmp_pred = predict_and_quality(model, f_input_data, f_input_data_pred, start_fct, problem_size, current_g, current_f, nb_fct) score_eval = eval_to.free_eval(start_fct, offspring, problem_size) tmp_free = eval_to.g_tcheby(current_f_w, score_eval, z_star) if(index_best_pred_list[0] == -1): score_best_pred = tmp_pred save_best_pred_free_score = tmp_free index_best_pred_list = [id_offspring] elif(tmp_pred < score_best_pred): score_best_pred = tmp_pred save_best_pred_free_score = tmp_free index_best_pred_list = [id_offspring] elif(tmp_pred == score_best_pred): index_best_pred_list.append(id_offspring) else: pass if(free_eval): score_eval = eval_to.free_eval(start_fct, offspring, problem_size) tmp_free = eval_to.g_tcheby(current_f_w, score_eval, z_star) if(index_best_free_list[0] == -1): score_best_free = tmp_free save_best_free_pred_score = tmp_pred index_best_free_list = [id_offspring] elif(tmp_free < score_best_free): score_best_free = tmp_free save_best_free_pred_score = tmp_pred index_best_free_list = [id_offspring] elif(tmp_free == score_best_free): index_best_free_list.append(id_offspring) else : pass index_best_pred = random.choice(index_best_pred_list) index_best_free = random.choice(index_best_free_list) index_best = -1 if(free_eval): index_best = index_best_free else : index_best = index_best_pred diffFreePredict(current_g, current_f, score_best_pred, save_best_pred_free_score, index_best_pred, score_best_free, save_best_free_pred_score, index_best_free) return list_offspring[index_best]