Ejemplo n.º 1
0
    def _fit(self):
        """
		Run the hyper-parameter optimization process

		Returns
		-------
		tested_parameters_ : ndarray, the parameters tested during the process

		cv_scores_ : if score_format == 'cv', list of all the CV results of the 
			parameters tested; if score_format == 'avg', array of the 
			mean CV results of the parameters tested
		"""

        n_tested_parameters = 0
        tested_parameters = np.zeros((self.n_iter, self.n_parameters))
        cv_scores = []
        if (self.detailed_res == 2):
            search_path = np.zeros((self.n_iter, self.n_parameters))

        ###    Initialize with random candidates    ###
        init_candidates = utils.sample_candidates(self.n_init,
                                                  self.param_bounds,
                                                  self.param_isInt)
        self.n_init = init_candidates.shape[0]

        if (self.verbose):
            print('Start random init')

        for i in range(self.n_init):
            dict_candidate = self.vector_to_dict(init_candidates[i, :])
            cv_score = self.score(dict_candidate)

            if (self.verbose):
                print('Step ' + str(i) + ' - Hyperparameter ' +
                      str(dict_candidate) + ' ' + str(np.mean(cv_score)))

            is_in, idx = utils.is_in_ndarray(
                init_candidates[i, :],
                tested_parameters[:n_tested_parameters, :])
            if not is_in:
                tested_parameters[n_tested_parameters, :] = init_candidates[
                    i, :]
                cv_scores.append(cv_score)
                n_tested_parameters += 1
            else:
                if (self.verbose):
                    print('Hyperparameter already tesed')
                cv_scores[idx] += cv_score

            if (self.detailed_res == 2):
                search_path[i, :] = init_candidates[i, :]

        ###               Smart Search               ###
        if (self.verbose):
            print('Start smart search')

        i_mod_10 = 0
        for i in range(self.n_iter - self.n_init - self.n_final_iter):

            if (i == 20 and cluster_evol == 'step'):
                self.GCP_args[1] = n_clusters

            if (i / 10 > (i_mod_10 + 2) and self.cluster_evol == 'variable'):
                self.GCP_args[0] = self.GCP_args[0]
                self.GCP_args[1] = min(self.GCP_args[1] + 1,
                                       self.n_clusters_max)
                i_mod_10 += 3

            # Sample candidates and predict their corresponding acquisition values
            candidates = utils.sample_candidates(self.n_candidates,
                                                 self.param_bounds,
                                                 self.param_isInt)

            # Model and retrieve the candidate that maximezes the acquisiton function
            best_candidate = utils.find_best_candidate(
                self.model, tested_parameters[:n_tested_parameters, :],
                cv_scores, self.GCP_args, candidates, self.verbose,
                self.acquisition_function)

            dict_candidate = self.vector_to_dict(best_candidate)
            cv_score = self.score(dict_candidate)

            if (self.verbose):
                print('Step ' + str(i + self.n_init) + ' - Hyperparameter ' +
                      str(dict_candidate) + ' ' + str(np.mean(cv_score)))

            is_in, idx = utils.is_in_ndarray(
                best_candidate, tested_parameters[:n_tested_parameters, :])
            if not is_in:
                tested_parameters[n_tested_parameters, :] = best_candidate
                cv_scores.append(cv_score)
                n_tested_parameters += 1
            else:
                if (self.verbose):
                    print('Hyperparameter already tesed')
                cv_scores[idx] += cv_score

            if (self.detailed_res == 2):
                search_path[i + self.n_init, :] = best_candidate

        ###               Final steps               ###
        self.acquisition_function = 'Simple'

        for i in range(self.n_final_iter):

            # Sample candidates and predict their corresponding acquisition values
            candidates = utils.sample_candidates(self.n_candidates,
                                                 self.param_bounds,
                                                 self.param_isInt)

            # Model and retrieve the candidate that maximezes the acquisiton function
            best_candidate = utils.find_best_candidate(
                self.model, tested_parameters[:n_tested_parameters, :],
                cv_scores, self.GCP_args, candidates, self.verbose,
                self.acquisition_function)

            dict_candidate = self.vector_to_dict(best_candidate)
            cv_score = self.score(dict_candidate)

            if (self.verbose):
                print('Step ' + str(i + self.n_iter - self.n_final_iter) +
                      ' - Hyperparameter ' + str(dict_candidate) + ' ' +
                      str(np.mean(cv_score)))

            is_in, idx = utils.is_in_ndarray(
                best_candidate, tested_parameters[:n_tested_parameters, :])
            if not is_in:
                tested_parameters[n_tested_parameters, :] = best_candidate
                cv_scores.append(cv_score)
                n_tested_parameters += 1
            else:
                if (self.verbose):
                    print('Hyperparameter already tesed')
                cv_scores[idx] += cv_score

            if (self.detailed_res == 2):
                search_path[i + self.n_iter -
                            self.n_final_iter, :] = best_candidate

        # compute the averages of CV results
        mean_scores = []
        for o in cv_scores:
            mean_scores.append(np.mean(o))

        # find the max
        best_idx = np.argmax(mean_scores)
        vector_best_param = tested_parameters[best_idx]
        best_parameter = self.vector_to_dict(vector_best_param)

        # store
        self.best_parameter_ = best_parameter
        self.tested_parameters_ = tested_parameters[:n_tested_parameters, :]

        if (self.verbose):
            print('\nTested ' + str(n_tested_parameters) + ' parameters')
            print('Max cv score ' + str(mean_scores[best_idx]))
            print('Best parameter ' + str(tested_parameters[best_idx]))
            print(best_parameter)

        if (self.detailed_res == 1):
            self.cv_scores_ = list(cv_scores)
            return tested_parameters[:n_tested_parameters, :], cv_scores
        elif (self.detailed_res == 2):
            self.cv_scores_ = list(cv_scores)
            return tested_parameters[:
                                     n_tested_parameters, :], search_path, cv_scores, mean_scores
        else:
            self.cv_scores_ = mean_scores
            return tested_parameters[:n_tested_parameters, :], mean_scores
Ejemplo n.º 2
0
	def _fit(self):
		"""
		Run the hyper-parameter optimization process

		Returns
		-------
		`tested_parameters_` : ndarray, the parameters tested during the process

		`cv_scores_` : if score_format == 'cv', list of all the CV results of the 
			parameters tested; if score_format == 'avg', array of the 
			mean CV results of the parameters tested
		"""

		n_tested_parameters = 0
		tested_parameters = np.zeros((self.n_iter,self.n_parameters))
		cv_scores = []
		if(self.detailed_res ==2):
			search_path = np.zeros((self.n_iter,self.n_parameters))

		###    Initialize with random candidates    ### 
		init_candidates = utils.sample_candidates(self.n_init,self.param_bounds,self.param_isInt)
		self.n_init = init_candidates.shape[0]

		if(self.verbose):
			print('Start random init')

		for i in range(self.n_init):
			dict_candidate = self.vector_to_dict(init_candidates[i,:])
			cv_score = self.score(dict_candidate)

			if(self.verbose):
				print ('Step ' + str(i) + ' - Hyperparameter ' + str(dict_candidate) + ' ' + str(np.mean(cv_score)))

			is_in,idx = utils.is_in_ndarray(init_candidates[i,:],tested_parameters[:n_tested_parameters,:])
			if not is_in:
				tested_parameters[n_tested_parameters,:] = init_candidates[i,:]
				cv_scores.append(cv_score)
				n_tested_parameters += 1
			else:
				if(self.verbose):
					print('Hyperparameter already tesed')
				cv_scores[idx] +=  cv_score

			if(self.detailed_res ==2):
				search_path[i,:] = init_candidates[i,:]


		###               Smart Search               ### 
		if(self.verbose):
			print('Start smart search')

		i_mod_10 = 0  
		for i in range(self.n_iter - self.n_init - self.n_final_iter):

			if(i==20 and self.cluster_evol=='step'):
				self.GCP_args[1] = n_clusters

			if(i/10 > (i_mod_10+2) and self.cluster_evol=='variable'):
				self.GCP_args[0] = self.GCP_args[0]
				self.GCP_args[1] = min(self.GCP_args[1]+1,self.n_clusters_max)
				i_mod_10 += 3
			
			# Sample candidates and predict their corresponding acquisition values
			candidates = utils.sample_candidates(self.n_candidates,self.param_bounds,self.param_isInt)

			# Model and retrieve the candidate that maximezes the acquisiton function
			best_candidate = utils.find_best_candidate(self.model,
														tested_parameters[:n_tested_parameters,:],
														cv_scores,
														self.GCP_args,
												 		candidates,
												 		self.verbose,
												 		self.acquisition_function)

			dict_candidate = self.vector_to_dict(best_candidate)
			cv_score = self.score(dict_candidate)

			if(self.verbose):
				print ('Step ' + str(i+self.n_init) + ' - Hyperparameter ' + str(dict_candidate) + ' ' + str(np.mean(cv_score)))

			is_in,idx = utils.is_in_ndarray(best_candidate,tested_parameters[:n_tested_parameters,:])
			if not is_in:
				tested_parameters[n_tested_parameters,:] = best_candidate
				cv_scores.append(cv_score)
				n_tested_parameters += 1
			else:
				if(self.verbose):
					print('Hyperparameter already tesed')
				cv_scores[idx] += cv_score

			if(self.detailed_res ==2):
				search_path[i + self.n_init,:] = best_candidate


		###               Final steps               ###      
		self.acquisition_function = 'Simple'

		for i in range(self.n_final_iter):

			# Sample candidates and predict their corresponding acquisition values
			candidates = utils.sample_candidates(self.n_candidates,self.param_bounds,self.param_isInt)

			# Model and retrieve the candidate that maximezes the acquisiton function
			best_candidate = utils.find_best_candidate(self.model,
														tested_parameters[:n_tested_parameters,:],
														cv_scores,
														self.GCP_args,
												 		candidates,
												 		self.verbose,
												 		self.acquisition_function)

			dict_candidate = self.vector_to_dict(best_candidate)
			cv_score = self.score(dict_candidate)

			if(self.verbose):
				print ('Step ' + str(i+self.n_iter - self.n_final_iter) + ' - Hyperparameter ' + str(dict_candidate) + ' ' + str(np.mean(cv_score)))

			is_in,idx = utils.is_in_ndarray(best_candidate,tested_parameters[:n_tested_parameters,:])
			if not is_in:
				tested_parameters[n_tested_parameters,:] = best_candidate
				cv_scores.append(cv_score)
				n_tested_parameters += 1
			else:
				if(self.verbose):
					print('Hyperparameter already tesed')
				cv_scores[idx] += cv_score

			if(self.detailed_res ==2):
				search_path[i + self.n_iter - self.n_final_iter,:] = best_candidate


		# compute the averages of CV results
		mean_scores = []
		for o in cv_scores:
			mean_scores.append(np.mean(o))

		# find the max
		best_idx = np.argmax(mean_scores)
		vector_best_param = tested_parameters[best_idx]
		best_parameter = self.vector_to_dict(vector_best_param)

		# store
		self.best_parameter_ = best_parameter
		self.tested_parameters_ = tested_parameters[:n_tested_parameters,:]

		if(self.verbose):
			print ('\nTested ' + str(n_tested_parameters) + ' parameters')
			print ('Max cv score ' + str(mean_scores[best_idx]))
			print ('Best parameter ' + str(tested_parameters[best_idx]))
			print(best_parameter)

		if(self.detailed_res == 1):
			self.cv_scores_ = list(cv_scores)
			return tested_parameters[:n_tested_parameters,:], cv_scores
		elif(self.detailed_res == 2):
			self.cv_scores_ = list(cv_scores)
			return tested_parameters[:n_tested_parameters,:], search_path, cv_scores, mean_scores		
		else:
			self.cv_scores_ = mean_scores
			return tested_parameters[:n_tested_parameters,:], mean_scores
Ejemplo n.º 3
0
fig = plt.figure()

abs = range(0, 400)
f_plot = [scoring_function(i) for i in abs]
n_rows = nb_GCP_steps / 3
if not (nb_GCP_steps % 3 == 0):
    n_rows += 1

if (save_plots):
    save_data = np.asarray([np.asarray(abs), np.asarray(f_plot)[:, 0]]).T
    np.savetxt('data_plots/data_plot.csv', save_data, delimiter=',')

#-------------------- Random initialization --------------------#

# sample n_random_init random parameters to initialize the process
init_rand_candidates = utils.sample_candidates(n_random_init, parameter_bounds,
                                               isInt)
for i in range(init_rand_candidates.shape[0]):
    print i
    rand_candidate = init_rand_candidates[i]
    new_output = scoring_function(rand_candidate)

    if (verbose):
        print('Random try ' + str(rand_candidate) + ', score : ' +
              str(np.mean(new_output)))

    if (parameters is None):
        parameters = np.asarray([rand_candidate])
        raw_outputs = [new_output]
        mean_outputs = [np.mean(new_output)]
        std_outputs = [np.std(new_output)]
    else:
Ejemplo n.º 4
0
fig = plt.figure()

abs = range(0,400)
f_plot = [scoring_function(i) for i in abs]
n_rows = nb_GCP_steps/3
if not(nb_GCP_steps% 3 == 0):
	n_rows += 1

if(save_plots):
	save_data = np.asarray([np.asarray(abs),np.asarray(f_plot)[:,0]]).T
	np.savetxt('data_plots/data_plot.csv',save_data,delimiter=',')

#-------------------- Random initialization --------------------#

# sample n_random_init random parameters to initialize the process
init_rand_candidates = utils.sample_candidates(n_random_init,parameter_bounds,isInt)
for i in range(init_rand_candidates.shape[0]):
	print i
	rand_candidate = init_rand_candidates[i]
	new_output = scoring_function(rand_candidate)
	
	if(verbose):
		print('Random try '+str(rand_candidate)+', score : '+str(np.mean(new_output)))
		
	if(parameters is None):
		parameters = np.asarray([rand_candidate])
		raw_outputs = [new_output]
		mean_outputs = [np.mean(new_output)]
		std_outputs = [np.std(new_output)]
	else:
		parameters,raw_outputs,mean_outputs,std_outputs = \