-
Notifications
You must be signed in to change notification settings - Fork 0
/
hyperfine.py
222 lines (164 loc) · 7.98 KB
/
hyperfine.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
# -*- coding: utf-8 -*-
"""
This is a wrapper module to search for optimal hyperparameters using tree of parzen estimators (tpe)
or random search and then to fine tune the parameters using grid search. tpe and random search are
implemented using hyperopt library (Bergstra, James S., et al, https://github.com/hyperopt/hyperopt)
while grid search was written by myself.
"""
import numpy as np
from hyperopt import fmin, tpe, rand,hp, Trials
class search_routine:
def __init__(self,obj_func, space, algo, max_evals, verbose):
self.obj_func=obj_func
self.algo=algo
self.space=space
self.max_evals=max_evals
self.verbose=verbose
self.hyperopt_space=None
def __prepare_grid(self,current_opt_params):
#Function to prepare the grid of parameters
grid_points_list=[]
i_cnt=0
grid_size_ind=0
grid_size_flag=False
param_names=[]
# Calculate 1D points
for param in self.space:
param_details=self.space[param]
fine_tune_flag=param_details[2]
sampling=param_details[0]
opt_param=current_opt_params[param]
if fine_tune_flag==False:
oneD_points=np.array([opt_param])
else:
if (sampling=='quniform') or (sampling=='qloguniform') or (sampling=='randint') :
grid_params=param_details[2]
radius=(grid_params[0])/100
N_points=grid_params[1]
lb=np.ceil(opt_param-radius*opt_param)
lb=max(1,lb)
ub=np.floor(opt_param+radius*opt_param)
if lb==ub:
ub=ub+1e-4 # handling randint break down when lb==ub
oneD_points=np.linspace(lb,ub,N_points)
oneD_points.astype(int)
elif sampling=='choice':
choice_list = param_details[1]
oneD_points = np.arange(0,len(choice_list))
else:
grid_params=param_details[2]
radius=(grid_params[0])/100
N_points=grid_params[1]
lb=opt_param-radius*opt_param
ub=opt_param+radius*opt_param
oneD_points=np.linspace(lb,ub,N_points)
if grid_size_flag==False:
grid_size_flag=True
grid_size_ind=i_cnt
grid_points_list.append(oneD_points)
param_names.append(param)
i_cnt+=1
# Create grid
grid = np.meshgrid(*[points for points in grid_points_list])
# Flatten grid
num_params=len(self.space)
grid_size=grid[grid_size_ind].size
flattened_grid=np.zeros((grid_size,num_params))
i_cnt=0
for uni_grid in grid:
uni_grid_flat=uni_grid.flatten()
flattened_grid[:,i_cnt]=uni_grid_flat
i_cnt+=1
return flattened_grid, param_names
def __construct_grid_param_dict(self,param_vals,param_names):
# Function to construct a parameter dict for obj function from grid values
param_dict={}
i_cnt=0
for param in param_names:
param_details=self.space[param]
sampling=param_details[0]
if sampling=='choice':
option=int(param_vals[i_cnt])
choices=param_details[1]
option_str=choices[option]
param_dict[param]=option_str
else:
param_dict[param]=param_vals[i_cnt]
i_cnt+=1
return param_dict
def __grid_fine_tune(self,final_dict):
# Function for grid tuning
print("Initiate grid fine tuning...")
min_cost=final_dict['results']['loss']
cur_opt_params=final_dict['best_params']
flat_grid, param_names=self.__prepare_grid(cur_opt_params)
n_evaluations=flat_grid.shape[0]
for i_eval in range(n_evaluations):
param_vals=list(flat_grid[i_eval,:])
param_dict=self.__construct_grid_param_dict(param_vals,param_names)
returned_dict= self.obj_func(param_dict)
cost=returned_dict['loss']
if cost<min_cost:
min_cost=cost
final_dict={'results':returned_dict}
final_dict['best_params']=param_dict
if self.verbose==1:
print('Fine tune evaluation no '+ str(i_eval+1)+'/'+str(n_evaluations)+ ': Current cost = ', cost, ', Minimum cost = ',min_cost)
return final_dict
def __construct_hyperopt_space(self):
# Function to contruct the space for fmin in hyperopt
hyperopt_space={}
for param in self.space:
param_details=self.space[param]
label=param
sampling_type=param_details[0]
bounds=param_details[1]
if sampling_type=='choice':
hyperopt_space[param]=hp.choice(label,bounds)
elif sampling_type=='uniform':
hyperopt_space[param]=hp.uniform(label,bounds[0], bounds[1])
elif sampling_type=='randint':
hyperopt_space[param]=hp.randint(label,bounds[0])
elif sampling_type=='quniform':
hyperopt_space[param]=hp.quniform(label,bounds[0], bounds[1], bounds[2])
elif sampling_type=='loguniform':
hyperopt_space[param]=hp.loguniform(label,bounds[0], bounds[1])
elif sampling_type=='qloguniform':
hyperopt_space[param]=hp.qloguniform(label,bounds[0], bounds[1],bounds[2])
return hyperopt_space
def __convert_choices2str(self,final_dict):
# Function to convert choice ints to choice strs
best_params=final_dict['best_params']
for param in best_params:
param_value=best_params[param]
param_space=self.space[param]
if param_space[0]=='choice' and isinstance(param_value, str)==False:
choice_list=param_space[1]
best_params[param]=choice_list[int(param_value)]
return final_dict
def search(self):
# Main callabale fubction
algo=self.algo.split('+')
main_algo=algo[0]
if len(algo)>1:
fine_algo=algo[1]
else:
fine_algo=None
algo_dict={'tpe': tpe.suggest,'random':rand.suggest}
if main_algo not in algo_dict:
print('Error: Please enter either tpe or random as the main algorithm')
return False
print("\nHyperparameter search initiated...")
self.hyperopt_space=self.__construct_hyperopt_space()
trials=Trials()
best_params = fmin(self.obj_func, self.hyperopt_space, algo=algo_dict[main_algo], max_evals=self.max_evals, trials=trials,verbose=self.verbose)
best_trials_dict=trials.best_trial
final_dict={'best_params': best_params, 'results': best_trials_dict['result']}
if fine_algo=="grid":
final_dict=self.__grid_fine_tune(final_dict)
else:
final_dict={'best_params': best_params, 'results': best_trials_dict['result']}
print("\nHyperparameter search over...")
# convert to choice strings
final_dict=self.__convert_choices2str(final_dict)
return final_dict