Exemple #1
0
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))

model.add(Flatten())  # this converts our 3D feature maps to 1D feature vectors
model.add(Dense(64))
model.add(Activation('relu'))
model.add(Dropout(0.5))
model.add(Dense(1))
model.add(Activation('sigmoid'))
'''
input_shape = (150, 150, 3)
num_classes = 4

file = open('/home/ubuntu/BTP_git/logttt.txt', 'a')

model = mini_XCEPTION(input_shape, num_classes)

if ite:
    model.load_weights("/home/ubuntu/BTP_git/weights/testtt/" + "model_%d.h5" %
                       (ite - 1))

model.compile(loss='binary_crossentropy',
              optimizer='adam',
              metrics=['accuracy'])

model.fit(imgs_d_train, final_label_train, batch_size=100, epochs=3)

test_acc = model.evaluate(imgs_d_test, final_label_test)
print(test_acc)
file.write("iter" + str(ite) + "\n")
file.write("acc" + str(test_acc) + "\n")
num_classes = 7
patience = 50
base_path = '../trained_models/emotion_models/'

# data generator
data_generator = ImageDataGenerator(
                        featurewise_center=False,
                        featurewise_std_normalization=False,
                        rotation_range=10,
                        width_shift_range=0.1,
                        height_shift_range=0.1,
                        zoom_range=.1,
                        horizontal_flip=True)

# model parameters/compilation
model = mini_XCEPTION(input_shape, num_classes)
model.compile(optimizer='adam', loss='categorical_crossentropy',
              metrics=['accuracy'])
model.summary()


datasets = ['fer2013']
for dataset_name in datasets:
    print('Training dataset:', dataset_name)

    # callbacks
    log_file_path = base_path + dataset_name + '_emotion_training.log'
    csv_logger = CSVLogger(log_file_path, append=False)
    early_stop = EarlyStopping('val_loss', patience=patience)
    reduce_lr = ReduceLROnPlateau('val_loss', factor=0.1,
                                  patience=int(patience/4), verbose=1)
Exemple #3
0
num_epochs = 10000
input_shape = (48, 48, 1)
validation_split = .2
verbose = 1
num_labels = 7
patience = 40
base_path = 'models/'

datagen = ImageDataGenerator(featurewise_center=True,
                             featurewise_std_normalization=True,
                             rotation_range=20,
                             width_shift_range=0.2,
                             height_shift_range=0.2,
                             horizontal_flip=True)

model = mini_XCEPTION(input_shape, num_labels)
model.compile(optimizer='adam',
              loss='categorical_crossentropy',
              metrics=['accuracy'])
model.summary()

log_file_path = base_path + '_emotion_training.log'
csv_logger = CSVLogger(log_file_path, append=False)
early_stop = EarlyStopping('val_loss', patience=patience)
reduce_lr = ReduceLROnPlateau('val_loss',
                              factor=0.1,
                              patience=int(patience / 4),
                              verbose=1)
trained_models_path = base_path + '_mini_XCEPTION'
model_names = trained_models_path + '.{epoch:02d}-{val_acc:.2f}.hdf5'
model_checkpoint = ModelCheckpoint(model_names,
def main():
    # parameters
    param = args()
    batch_size = param.batch_size
    num_epochs = param.num_epochs
    validation_split = param.val_ratio
    do_random_crop = False
    patience = param.patience
    dataset_name = param.dataset_name
    grayscale = param.graymode
    mode = param.mode
    anno_file = param.anno_file
    if mode == "gender":
        num_classes = 2
    elif mode == "age":
        num_classes = 101
    elif mode == "emotion":
        num_classes = 7
    else:
        num_classes = 5
    if grayscale:
        input_shape = (64, 64, 1)
    else:
        input_shape = (64, 64, 3)
    images_path = param.img_dir
    log_file_path = '../trained_models/%s_models/%s_model/raining.log' % (
        mode, dataset_name)
    trained_models_path = '../trained_models/%s_models/%s_model/%s_mini_XCEPTION' % (
        mode, dataset_name, mode)
    pretrained_model = param.load_model
    print("-------begin to load data------", input_shape)
    # data loader
    data_loader = DataManager(dataset_name, anno_file)
    ground_truth_data = data_loader.get_data()
    train_keys, val_keys = split_imdb_data(ground_truth_data, validation_split)
    print('Number of training samples:', len(train_keys))
    print('Number of validation samples:', len(val_keys))
    train_image_generator = ImageGenerator(ground_truth_data,
                                           batch_size,
                                           input_shape[:2],
                                           train_keys,
                                           path_prefix=images_path,
                                           grayscale=grayscale)
    val_image_generator = ImageGenerator(ground_truth_data,
                                         batch_size,
                                         input_shape[:2],
                                         val_keys,
                                         path_prefix=images_path,
                                         grayscale=grayscale)

    # model parameters/compilation
    if pretrained_model != None:
        model = load_model(pretrained_model, compile=False)
        print("pretrained model:", model.input_shape)
    else:
        model = mini_XCEPTION(input_shape, num_classes)
    model.compile(optimizer='adam',
                  loss='categorical_crossentropy',
                  metrics=['accuracy'])
    model.summary()

    # model callbacks
    early_stop = EarlyStopping('val_acc', patience=patience)
    reduce_lr = ReduceLROnPlateau('val_acc',
                                  factor=0.1,
                                  patience=int(patience),
                                  verbose=1,
                                  min_lr=0.0000001)
    csv_logger = CSVLogger(log_file_path, append=False)
    model_names = trained_models_path + '.{epoch:02d}-{val_acc:.2f}.hdf5'
    model_checkpoint = ModelCheckpoint(model_names,
                                       monitor='val_acc',
                                       verbose=1,
                                       save_best_only=True,
                                       save_weights_only=False)
    callbacks = [model_checkpoint, csv_logger, early_stop, reduce_lr]

    # training model
    print("-----begin to train model----")
    model.fit_generator(
        train_image_generator.flow(),
        steps_per_epoch=int(np.ceil(len(train_keys) / batch_size)),
        epochs=num_epochs,
        verbose=1,
        callbacks=callbacks,
        validation_data=val_image_generator.flow(),
        validation_steps=int(np.ceil(len(val_keys) / batch_size)))