def conTestImageBoundaryGenerator(filepath, batch_size, depth, num_classes, modality): X_test = data.load_con_video_list(filepath) X_teidx = np.asarray(np.arange(0, len(X_test)), dtype=np.int32) while 1: for X_indices,_ in minibatches(X_teidx, X_teidx, batch_size, shuffle=False): # Read data for each batch video_label = [] idx = X_indices[0] video_path = X_test[idx].split(' ')[0] segcnt = len(X_test[idx].split(' ')) starti = endi = 0 for i in range(1, segcnt): seginfo = X_test[idx].split(' ')[i] starti = int(seginfo.split(',')[0]) if starti <= endi: starti = endi + 1 endi = int(seginfo.split(',')[1].split(':')[0]) label = int(seginfo.split(',')[1].split(':')[1])-1 for j in range(starti, endi+1): video_label.append(label) if endi != len(video_label): print 'invalid: endi - %d, len(video_label) - %d'%(endi, len(video_label)) video_fcnt = len(video_label) if len(video_label)<=depth: video_olen = len(video_label) else: video_olen = depth is_training = False # Testing if modality==0: #RGB X_data_t,y_label = data.prepare_con_rgb_data(video_path, video_fcnt, video_olen, video_label, is_training) if modality==1: #Depth X_data_t,y_label = data.prepare_con_depth_data(video_path, video_fcnt, video_olen, video_label, is_training) if modality==2: #Flow X_data_t,y_label = data.prepare_con_flow_data(video_path, video_fcnt, video_olen, video_label, is_training) y_bound = np.zeros((len(y_label),), dtype=np.int32) for idx in range(2,len(y_label)-2): if y_label[idx-1]==y_label[idx] and y_label[idx+1]==y_label[idx+2] and y_label[idx]!=y_label[idx+1]: y_bound[idx-1]=1 y_bound[idx]=1 y_bound[idx+1]=1 y_bound[idx+2]=1 y_bound[0]=y_bound[1]=1 y_bound[len(y_label)-1]=y_bound[len(y_label)-2]=1 yield (np.reshape(X_data_t,(1,video_olen,112,112,3)), y_bound)
def conTestImageGenerator(filepath, batch_size, depth, num_classes, modality): X_test = data.load_con_video_list(filepath) X_teidx = np.asarray(np.arange(0, len(X_test)), dtype=np.int32) while 1: for X_indices,_ in minibatches(X_teidx, X_teidx, batch_size, shuffle=False): # Read data for each batch image_path = [] image_fcnt = [] image_olen = [] image_start = [] is_training = [] y_label_t = [] for data_a in range(batch_size): X_index_a = X_indices[data_a] # Read data for each batch idx = X_indices[data_a] video_path = X_test[idx].split(' ')[0] starti = int(X_test[idx].split(' ')[1].split(',')[0]) endi = int(X_test[idx].split(' ')[1].split(',')[1].split(':')[0]) label = int(X_test[idx].split(' ')[1].split(',')[1].split(':')[1])-1 image_path.append(video_path) image_fcnt.append(endi-starti+1) image_olen.append(depth) image_start.append(starti) is_training.append(False) # Testing y_label_t.append(label) image_info = zip(image_path,image_fcnt,image_olen,image_start,is_training) if modality==0: #RGB X_data_t = threading_data([_ for _ in image_info], data.prepare_iso_rgb_data) elif modality==1: #Depth X_data_t = threading_data([_ for _ in image_info], data.prepare_iso_depth_data) elif modality==2: #Flow X_data_t = threading_data([_ for _ in image_info], data.prepare_iso_flow_data) y_hot_label_t = keras.utils.to_categorical(y_label_t, num_classes=num_classes) yield (X_data_t, y_hot_label_t)
from datetime import datetime RGB = 0 nb_epoch = 10 init_epoch = 0 depth = 32 batch_size = 8 num_classes = 249 weight_decay = 0.00005 dataset_name = 'congr_rcm_rgb' training_datalist = './dataset_splits/ConGD/train_rgb_isolist.txt' testing_datalist = './dataset_splits/ConGD/valid_rgb_isolist.txt' model_prefix = '.' weights_file = '%s/trained_models/rcm/%s_weights.{epoch:02d}-{val_loss:.2f}.h5'%(model_prefix,dataset_name) train_data = data.load_con_video_list(training_datalist) train_steps = len(train_data)/batch_size test_data = data.load_con_video_list(testing_datalist) test_steps = len(test_data)/batch_size print 'nb_epoch: %d - depth: %d - batch_size: %d - weight_decay: %.6f' %(nb_epoch, depth, batch_size, weight_decay) def lr_polynomial_decay(global_step): learning_rate = 0.001 end_learning_rate=0.000001 decay_steps=train_steps*nb_epoch power = 0.9 p = float(global_step)/float(decay_steps) lr = (learning_rate - end_learning_rate)*np.power(1-p, power)+end_learning_rate if global_step>0: curtime = '%s' % datetime.now() info = ' - lr: %.6f @ %s %d' %(lr, curtime.split('.')[0], global_step)