Пример #1
0
def shared_label():
 
   savepath = '/research1/YOON/ECCV2016/keras/result/42x42_dropout/'
   batch_size = 128
   model=makenetwork.sharednet_label_dropout()
   #sharednet,model1,model2,model3 = makenetwork.eccvmodel_label()
   sgd =SGD(lr=0.01, decay=1e-6, momentum=0.9, nesterov=True) 
   model.compile(optimizer='sgd',loss={'out':'categorical_crossentropy'})


   # load trained network
   trainednet = glob.glob(savepath+'*hdf5')
   if len(trainednet):
       trainednet.sort()
       print('load pretrained net:',trainednet[-1])
       model.load_weights(trainednet[-1])

   
   h5trainpath = '/research1/YOON/ECCV2016/42x42_2/h5_train/'
   h5files = glob.glob(h5trainpath+'*.h5')
   h5files.sort()
   nbh5files = len(h5files)
   

   h5valpath = '/research1/YOON/ECCV2016/42x42_2/h5_test/'
   h5valfiles = glob.glob(h5valpath+'*.h5')
   h5valfiles.sort()
   nbh5valfiles = len(h5valfiles)
   for idx in range(nbh5valfiles):
       f =h5py.File(h5valfiles[idx],'r')
       if idx == 0:
           input1_val = f['data1'][()]
           input2_val = f['data2'][()]
           label_val = f['label'][()]        
           label_val=label_val.astype('int32')	
	   datasize = input1_val.shape[0]
           label_val = np.reshape(label_val,[datasize])
	   label_val = np_utils.to_categorical(label_val, 13)
           f.close()
       else:
	   tmpinput1 = f['data1'][()]
           tmpinput2 = f['data2'][()]
           tmplabel = f['label'][()]        
           tmplabel = tmplabel.astype('int32')	
	   datasize = tmpinput1.shape[0]
           tmplabel = np.reshape(tmplabel,[datasize])
	   tmplabel = np_utils.to_categorical(tmplabel, 13)
	   input1_val=np.concatenate((input1_val,tmpinput1),axis=0)
           input2_val=np.concatenate((input2_val,tmpinput2),axis=0)
           label_val=np.concatenate((label_val,tmplabel),axis=0)
	   f.close()
 
   for iter in range(nb_epoch):
       s =0
       total_trainacc=0.0
       traincount =0
       print('---------------epoch %d------------') % iter
       for idx in xrange(5,nbh5files,5):
           input1 =[]
           input2=[]
           label =[]
           count =0
           print (' %d/%d') % (idx,nbh5files)
           
           traincount +=1 	
           order = np.random.permutation(range(s,idx))
	   for idx2 in order:
               f =h5py.File(h5files[idx2],'r')
               if count == 0:
                   input1 = f['data1'][()]
	           input2 = f['data2'][()]
	           label = f['label'][()]        
        	   label=label.astype('int32')	
		   datasize = input1.shape[0]
	           label = np.reshape(label,[datasize])
		   label = np_utils.to_categorical(label, 13)
	           f.close()
	           count+=1
               else:
	           tmpinput1 = f['data1'][()]
        	   tmpinput2 = f['data2'][()]
         	   tmplabel = f['label'][()]        
	           tmplabel = tmplabel.astype('int32')	
    		   datasize = tmpinput1.shape[0]
	           tmplabel = np.reshape(tmplabel,[datasize])
		   tmplabel = np_utils.to_categorical(tmplabel, 13)
		   input1=np.concatenate((input1,tmpinput1),axis=0)
	           input2=np.concatenate((input2,tmpinput2),axis=0)
	           label=np.concatenate((label,tmplabel),axis=0)
		   f.close()
                   count+=1
           s= idx 	

           model.fit({'input1':input1,'input2':input2,'out':label},batch_size=batch_size,nb_epoch=1,shuffle=False,verbose=1)
           train_out= model.predict({'input1':input1,'input2':input2},batch_size=batch_size,verbose=1)      
           out = np.argmax(train_out['out'],axis=-1)
           train_acc =getAP.loss(out,label) 
           total_trainacc +=train_acc
       total_trainacc/=traincount

       print('train acc:',total_trainacc)
       val_out= model.predict({'input1':input1_val,'input2':input2_val},batch_size=batch_size,verbose=1)      
       val_out = np.argmax(val_out['out'],axis=-1)
       val_acc =getAP.loss(val_out,label_val) 
       print('val_acc:',val_acc)
       #val_out= model.evaluate({'input1':input1_val,'input2':input2_val,'out':label_val},batch_size=batch_size,verbose=1)      
       #curAP,acc = getAP(val_out,label_val)
       #val_out = val_out['out']
       #val_loss= model.evaluate({'input1':input1_val,'input2':input2_val,'out':label_val},batch_size=batch_size,verbose=1)      
       
       #print('val loss:',val_loss)
       savenum = iter + len(trainednet) 
       savename = "%05d" % savenum
       model.save_weights(savepath+'model_'+savename+'.hdf5', overwrite=True)
       gc.collect()
Пример #2
0
def loadnet(trainednet= None):
  model = makenetwork.sharednet_label_dropout()
  if trainednet:
   	model.load_weights(trainednet)
	print('load pretrained network:',trainednet) 
  return model