def __init__(self, model_def_file, image_height, image_width, raw_scale, gpu_mode): logging.info('Loading net and associated files...') if not gpu_mode: os.environ['CUDA_VISIBLE_DEVICES'] = "" self.low_level = [27, 32, 50, 56]#, 61, 62, 63, 64 self.mid_level = [0, 6, 7, 8, 9, 11, 12, 13, 17, 20, 21, 22, 23, 24, 25, 26, 28, 29, 30, 33, 35, 36, 37, 38, 39, 41, 42, 43, 44, 45, 46, 47, 48, 49, 51, 52, 53, 54, 55, 57, 58, 59, 60] self.high_level = [1, 2, 3, 4, 5, 10, 14, 15, 16, 18, 19, 31, 34, 40] self.net = hiarBayesGoogLeNet.build(image_height, image_width, 3, [len(self.low_level), len(self.mid_level), len(self.high_level)], weights="None") self.net.load_weights(model_def_file) self.labels = ['accessoryHeadphone', 'personalLess15', 'personalLess30', 'personalLess45', 'personalLess60', 'personalLarger60', 'carryingBabyBuggy', 'carryingBackpack', 'hairBald', 'footwearBoots', 'lowerBodyCapri', 'carryingOther', 'carryingShoppingTro', 'carryingUmbrella', 'lowerBodyCasual', 'upperBodyCasual', 'personalFemale', 'carryingFolder', 'lowerBodyFormal', 'upperBodyFormal', 'accessoryHairBand', 'accessoryHat', 'lowerBodyHotPants', 'upperBodyJacket', 'lowerBodyJeans', 'accessoryKerchief', 'footwearLeatherShoes', 'upperBodyLogo', 'hairLong', 'lowerBodyLongSkirt', 'upperBodyLongSleeve', 'lowerBodyPlaid', 'lowerBodyThinStripes', 'carryingLuggageCase', 'personalMale', 'carryingMessengerBag', 'accessoryMuffler', 'accessoryNothing', 'carryingNothing', 'upperBodyNoSleeve', 'upperBodyPlaid', 'carryingPlasticBags', 'footwearSandals', 'footwearShoes', 'hairShort', 'lowerBodyShorts', 'upperBodyShortSleeve', 'lowerBodyShortSkirt', 'footwearSneakers', 'footwearStocking', 'upperBodyThinStripes', 'upperBodySuit', 'carryingSuitcase', 'lowerBodySuits', 'accessorySunglasses', 'upperBodySweater', 'upperBodyThickStripes', 'lowerBodyTrousers', 'upperBodyTshirt', 'upperBodyOther', 'upperBodyVNeck', 'footwear', 'hair', 'lowerbody', 'upperbody']
[len(low_level), len(mid_level), len(high_level)]) loss_func = 'binary_crossentropy' #weighted_categorical_crossentropy(alpha) loss_weights = None metrics = ['accuracy'] elif args.model == "hiarGoogLeNet_low": model = hiarGoogLeNet_low.build( image_width, image_height, 3, [len(low_level), len(mid_level), len(high_level)]) loss_func = 'binary_crossentropy' #weighted_categorical_crossentropy(alpha) loss_weights = None metrics = ['accuracy'] elif args.model == "hiarBayesGoogLeNet": model = hiarBayesGoogLeNet.build( image_height, image_width, 3, [len(low_level), len(mid_level), len(high_level)]) loss_func = 'binary_crossentropy' #bayes_binary_crossentropy(alpha, y_train)#weighted_categorical_crossentropy(alpha) loss_func = weighted_binary_crossentropy(alpha) loss_weights = None metrics = ['accuracy'] elif args.model == "hiarBayesInception_v4": model = hiarBayesInception_v4( image_width, image_height, 3, [len(low_level), len(mid_level), len(high_level)]) loss_func = 'binary_crossentropy' #bayes_binary_crossentropy(alpha, y_train)#weighted_categorical_crossentropy(alpha) loss_func = weighted_binary_crossentropy(alpha) loss_weights = None metrics = ['accuracy'] metrics = [weighted_acc]
def train(observation, action, save_name, batch_size_p=32, nb_epoch_p=50, ma_as_reward=True): #print(int(save_name[save_name.rindex('_')+1:])) heavy_augmentation = True if heavy_augmentation: datagen = ImageDataGenerator( featurewise_center=False, samplewise_center=False, featurewise_std_normalization=False, samplewise_std_normalization=False, zca_whitening=False, rotation_range=45, width_shift_range=0.25, height_shift_range=0.25, horizontal_flip=True, vertical_flip=False, zoom_range=0.5, channel_shift_range=0.5, fill_mode='nearest') else: datagen = ImageDataGenerator( featurewise_center=False, samplewise_center=False, featurewise_std_normalization=False, samplewise_std_normalization=False, zca_whitening=False, rotation_range=0, width_shift_range=0.125, height_shift_range=0.125, horizontal_flip=True, vertical_flip=False, fill_mode='nearest') image_width = 160 image_height = 75 class_num = action.shape[0] filename = r"../results/PETA.csv" data = np.array(pd.read_csv(filename))[:, 1:] length = len(data) data_x = np.zeros((length, image_width, image_height, 3)) data_y = np.zeros((length, class_num)) low_level = [] mid_level = [] high_level = [] for i in range(action.shape[0]): if action[i] == 0: low_level.append(i) elif action[i] == 1: mid_level.append(i) elif action[i] == 2: high_level.append(i) else: print("ERROR ACTION!!!") print(low_level) print(mid_level) print(high_level) labels_list_file = r"/home/anhaoran/data/pedestrian_attributes_PETA/PETA/labels.txt" labels_list_data = open(labels_list_file) lines = labels_list_data.readlines() tmp_list = [] for line in lines: dat = line.split() #print(attr) tmp_list.append(dat[1]) #attributes_list = list(np.array(tmp_list)) attributes_list = list(np.array(tmp_list)[[0,1,2,3,4,5,6,7,8,9,27,32,40,50,56]]) print("-------------------------------------------------------------------------") print("***Low level attributes: ") for i in low_level: print(attributes_list[i], end=", ") print("\n***Medium level attributes: ") for i in mid_level: print(attributes_list[i], end=", ") print("\n***High level attributes: ") for i in high_level: print(attributes_list[i], end=", ") print() print("-------------------------------------------------------------------------") if np.shape(low_level)[0] == 0 or np.shape(mid_level)[0] == 0 or np.shape(high_level)[0] == 0: reward = -1 return action, reward, reward>=0.9 or int(save_name[save_name.rindex('_')+1:]) >= 10 for i in range(length): #img = image.load_img(path + m) img = image.load_img(data[i, 0], target_size=(image_width, image_height, 3)) data_x[i] = image.img_to_array(img) #data_y[i] = np.array(data[i, 1:1+class_num], dtype="float32") data_y[i] = np.array(data[i, [1,2,3,4,5,6,7,8,9,10,28,33,41,51,57]], dtype="float32") data_y = data_y[:, list(np.hstack((low_level, mid_level, high_level)))] X_train = data_x[:9500] X_test = data_x[9500:11400] y_train = data_y[:9500]#, len(low_level)+len(mid_level): y_test = data_y[9500:11400]#, len(low_level)+len(mid_level): XX = data_x[11400:] yy = data_y[11400:] print("The shape of the X_train is: ", X_train.shape) print("The shape of the y_train is: ", y_train.shape) print("The shape of the X_test is: ", X_test.shape) print("The shape of the y_test is: ", y_test.shape) model = hiarBayesGoogLeNet.build(image_width, image_height, 3, [len(low_level), len(mid_level), len(high_level)]) model.compile(loss='binary_crossentropy', optimizer='adam', loss_weights=None, metrics=['accuracy']) batch_size = batch_size_p nb_epoch = nb_epoch_p train_generator = datagen.flow(X_train, y_train, batch_size=batch_size) val_generator = datagen.flow(X_test, y_test, batch_size=batch_size) model.fit_generator(train_generator, steps_per_epoch = int(X_train.shape[0] / batch_size), epochs = nb_epoch, validation_data = val_generator, validation_steps = int(X_test.shape[0] / batch_size)) #model.save_weights('../models/imagenet_models/' + save_name + '_final_model.h5') predictions_prob = model.predict(XX) predictions = np.array(predictions_prob >= 0.5, dtype="float64") label = yy reward = mA(predictions, label) return action, reward, True