def DBL_model_test1(basepath, cutoff=[-1, -1], pklname='', newdata=None): # data ishape = Conv2DSpace(shape=[48, 48], num_channels=1) preproc = [0, 0] nclass = 7 DBL = DBL_model(basepath, nclass, np.append(ishape.shape, 1), preproc, cutoff) # create layers nk = [30] #nk = [40,30,20] ks = [[8, 8], [5, 5], [3, 3]] ir = [0.05, 0.05, 0.05] ps = [[4, 4], [4, 4], [2, 2]] pd = [[2, 2], [2, 2], [2, 2]] kn = [0.9, 0.9, 0.9] layers = DBL_ConvLayers(nk, ks, ir, ps, pd, kn) layer_soft = Softmax( layer_name='y', #max_col_norm = 1.9365, n_classes=nclass, init_bias_target_marginals=DBL.ds_train, #istdev = .05 irange=.0) layers.append(layer_soft) # create DBL_model model = MLP(layers, input_space=ishape) if pklname != '' and os.path.isfile(pklname): # load and rebuild model layer_params = cPickle.load(open(pklname + '.cpu')) layer_id = 0 for layer in model.layers: if layer_id < len(layers) - 1: layer.set_weights(layer_params[layer_id][0]) layer.set_biases(layer_params[layer_id][1]) else: layer.set_weights(layer_params[layer_id][1]) layer.set_biases(layer_params[layer_id][0]) layer_id = layer_id + 1 DBL.model = model DBL.test_raw(newdata) else: algo_term = EpochCounter(500) # number of epoch iteration algo = SGD(learning_rate=0.001, batch_size=500, init_momentum=.5, monitoring_dataset=DBL.ds_valid, termination_criterion=algo_term) DBL.run_model(model, algo) # save the model if pklname != '': layer_params = [] for layer in layers: param = layer.get_params() print param print param[0].get_value() layer_params.append( [param[0].get_value(), param[1].get_value()]) #cPickle.dump(DBL,open(pklname, 'wb')) #cPickle.dump(layer_params, open(pklname + '.cpu', 'wb')) cPickle.dump(layer_params, open(pklname + '.cpu', 'wb')) print DBL.result_valid[1], DBL.result_test[1] return DBL.result_valid[1], DBL.result_test[1]
class CNN_NET(): def __init__(self,ishape): self.ishape = ishape def loaddata(self,basepath,id_data,nclass,ind_train,ind_valid,id_pre=0): """ from pylearn2.datasets.preprocessing import GlobalContrastNormalization pre = GlobalContrastNormalization(sqrt_bias = 10,use_std = 1) """ print "load" pre = None if id_pre == 1: from pylearn2.datasets.preprocessing import GlobalContrastNormalization pre = GlobalContrastNormalization(sqrt_bias = 10,use_std = 1) if id_data==0: self.data_train = Occ('train',nclass,basepath,ind_train,pre,self.ishape) self.data_valid = Occ('train',nclass,basepath,ind_valid,pre,self.ishape) elif id_data==1: self.data_train = ICML_emotion('train',nclass,basepath,ind_train,pre,self.ishape) self.data_valid = ICML_emotion('train',nclass,basepath,ind_valid,pre,self.ishape) elif id_data==2: print self.ishape self.data_train = Denoise('train',nclass,basepath,ind_train,pre,self.ishape) self.data_valid = Denoise('train',nclass,basepath,ind_valid,pre,self.ishape) def setup(self, p_layers, p_algo): # create conv layers self.DBL = DBL_model(self.ishape, p_layers, p_algo,{'valid': self.data_valid, 'train': self.data_train}) def train(self,pklname='tmp.pkl'): # load data #print data_valid.X.shape,data_valid.y.shape #print data_train.X.shape,data_train.y.shape self.DBL.train() # save the model if pklname!='': layer_params = [] for layer in self.DBL.layers: param = layer.get_params() print param print param[0].get_value() layer_params.append([param[0].get_value(), param[1].get_value()]) #cPickle.dump(DBL,open(pklname, 'wb')) #cPickle.dump(layer_params, open(pklname + '.cpu', 'wb')) cPickle.dump(layer_params, open(pklname + '.cpu', 'wb')) def test(pklname): # create DBL_model # load and rebuild model layer_params = cPickle.load(open(pklname + '.cpu')) layer_id = 0 for layer in model.layers: if layer_id < len(layers) - 1: layer.set_weights(layer_params[layer_id][0]) layer.set_biases(layer_params[layer_id][1]) else: layer.set_weights(layer_params[layer_id][1]) layer.set_biases(layer_params[layer_id][0]) layer_id = layer_id + 1
def setup(self, p_layers, p_algo): # create conv layers self.DBL = DBL_model(self.ishape, p_layers, p_algo,{'valid': self.data_valid, 'train': self.data_train})
def DBL_model_test1(basepath,cutoff=[-1,-1],pklname='',newdata=None): # data ishape = Conv2DSpace( shape = [48, 48], num_channels = 1 ) preproc=[0,0] nclass = 7 DBL = DBL_model(basepath,nclass,np.append(ishape.shape,1),preproc,cutoff) # create layers nk = [30] #nk = [40,30,20] ks = [[8,8],[5,5],[3,3]] ir = [0.05,0.05,0.05] ps = [[4,4],[4,4],[2,2]] pd = [[2,2],[2,2],[2,2]] kn = [0.9,0.9,0.9] layers = DBL_ConvLayers(nk,ks,ir,ps,pd,kn) layer_soft = Softmax( layer_name='y', #max_col_norm = 1.9365, n_classes = nclass, init_bias_target_marginals=DBL.ds_train, #istdev = .05 irange = .0 ) layers.append(layer_soft) # create DBL_model model = MLP(layers, input_space=ishape) if pklname!='' and os.path.isfile(pklname): # load and rebuild model layer_params = cPickle.load(open(pklname + '.cpu')) layer_id = 0 for layer in model.layers: if layer_id < len(layers) - 1: layer.set_weights(layer_params[layer_id][0]) layer.set_biases(layer_params[layer_id][1]) else: layer.set_weights(layer_params[layer_id][1]) layer.set_biases(layer_params[layer_id][0]) layer_id = layer_id + 1 DBL.model = model DBL.test_raw(newdata) else: algo_term = EpochCounter(500) # number of epoch iteration algo = SGD(learning_rate = 0.001, batch_size = 500, init_momentum = .5, monitoring_dataset = DBL.ds_valid, termination_criterion=algo_term ) DBL.run_model(model,algo) # save the model if pklname!='': layer_params = [] for layer in layers: param = layer.get_params() print param print param[0].get_value() layer_params.append([param[0].get_value(), param[1].get_value()]) #cPickle.dump(DBL,open(pklname, 'wb')) #cPickle.dump(layer_params, open(pklname + '.cpu', 'wb')) cPickle.dump(layer_params, open(pklname + '.cpu', 'wb')) print DBL.result_valid[1], DBL.result_test[1] return DBL.result_valid[1], DBL.result_test[1]