def __init__(self, model_in, model_out, model_architecture, sample_fraction): ## This creates an instance of an object of type model ## Of the given inputs model_in, model_out and model_architecture are strings ## sample fractions is a float of value between 0.0-1.0 self.sample_fraction = sample_fraction self.model_architecture = model_architecture ## Notice how the class variables sample_fraction and model_architecture ## are attributed to their respective inputs self.model_name = model_in + '_' + model_out + '_' + model_architecture + '_' + str( sample_fraction) ## model_name is a string combination of all the inputs. self.trials_file = './trials/' + self.model_name + '.pkl' self.stats_file = './model_stats/' + self.model_name + '.pkl' self.plotpairs_file = './plotpairs/' + self.model_name + '.pkl' ## The above class variables are string pathways to respective pickle files needed to construct the model ## These class variables are also dependent on the previously mentioned variable model_name self.figure_file = './figures/' + self.model_name + '.png' ## figure_file is a string pathway to access the corresponding model image given the model_name self.model_loc = './models/' + self.model_name ## model_loc is a string pathway showing the location of the model. self._model = get_model(model_architecture) #ridge,forest,svm,nn(emb,) ## This sets a self._model to the regression typoe used to build the model, model_architercture is a string input ## If-else statement checks if the following model has already been run. ## if it does exsist then it access them and assigns them to respective class variable ## else it sets them to a default value. if (self.load_hyp() == True): self.model_stats = pickle.load(open(self.stats_file, "rb")) [self.plotpairs_cv, self.plotpairs_test] = pickle.load(open(self.plotpairs_file, 'rb')) else: print('No previous trial data, model data or plot data available') self.model_stats = { 'cv_avg_loss': np.inf, 'cv_std_loss': [], 'test_avg_loss': np.inf, 'test_std_loss': [] } [self.model_plotpairs_cv, self.model_plotpairs_test] = [[[], [], []], [[], [], []]] self.plot_type = None
print(X_train.shape[0], 'train samples') print(X_test.shape[0], 'test samples') if Train: if not(os.path.exists('models')): os.mkdir('models') if not(os.path.exists('models/'+dataset)): os.mkdir('models/'+dataset) for resid_levels in range(2,3): #range(1,4): print 'training with', resid_levels,'levels' sess=K.get_session() model=get_model(dataset,resid_levels,LUT,BINARY,trainable_means) #model.summary() #gather all binary dense and binary convolution layers: binary_layers=[] for l in model.layers: if isinstance(l,binary_dense) or isinstance(l,binary_conv): binary_layers.append(l) #gather all residual binary activation layers: resid_bin_layers=[] for l in model.layers: if isinstance(l,Residual_sign): resid_bin_layers.append(l) lr=0.01 decay=1e-6
def build_architecture(self, model_architecture): 'load architecture class which sets hyp space' self._model = get_model(model_architecture)
def build_architecture(self, model_architecture): ## This function is used to create a model_architecure model object and make it a protected class variable ## also acts aas a setter function 'load architecture class which sets hyp space' self._model = get_model(model_architecture)
def step_decay(epoch): initial_lrate = 0.025 drop = 0.5 epochs_drop = 50.0 lrate = initial_lrate * math.pow(drop, math.floor((1+epoch)/epochs_drop)) return lrate if Train: if not(os.path.exists('models')): os.mkdir('models') if not(os.path.exists('models/'+dataset)): os.mkdir('models/'+dataset) for resid_levels in range(1): print 'training with', resid_levels,'levels' sess=K.get_session() model=get_model(dataset,resid_levels) #model.summary() #gather all binary dense and binary convolution layers: binary_layers=[] for l in model.layers: if isinstance(l,binary_dense) or isinstance(l,binary_conv): binary_layers.append(l) #gather all residual binary activation layers: resid_bin_layers=[] for l in model.layers: if isinstance(l,Residual_sign): resid_bin_layers.append(l) lr=0.001 #opt = keras.optimizers.Adam(lr=lr,decay=1e-6)#SGD(lr=lr,momentum=0.9,decay=1e-5)