early_stopping_callback = EarlyStoppingCallback(monitor='accuracy', min_delta=0, patience=patience, verbose=verbose) learning_rate_callback = LearningRateCallback(monitor='accuracy', min_delta=0, patience=patience_lr, verbose=verbose) callbacks = [ metrics_callback, checkpoint_callback, early_stopping_callback, learning_rate_callback ] # Generate the architecture model = model_generate(2) # Configure the learning process by compiling the network model.compile(optimizer='adam', loss='categorical_crossentropy') print(model.summary()) # Train the model for a fixed number of epochs model.fit(X_train, Y_train, batch_size=batch, verbose=verbose, callbacks=callbacks, validation_data=(X_test, Y_test), epochs=epochs)
Y_valid_age = '../data/Age/Valid/final_labels_data.npy' Y_valid_age = np.load(Y_valid_age) if __name__ == '__main__': # parameters batch = 256 epochs = 10000 verbose = 1 input_shape = (64, 64, 1) learning_rate = 0.01 decay = 1e-6 momentum = 0.9 # Generate the architecture emotion_model, gender_model, age_model = model_generate() # Stochastic gradient descent optimizer sgd = optimizers.SGD(lr=learning_rate, decay=decay, momentum=momentum, nesterov=True) # Configure the learning process by compiling the network emotion_model.compile(optimizer=sgd, loss='categorical_crossentropy') gender_model.compile(optimizer='adam', loss='categorical_crossentropy') age_model.compile(optimizer='adam', loss='mean_absolute_error') emotion_accuracy = -math.inf gender_accuracy = -math.inf age_mae = math.inf