import plot_service, data_service import optimization, opt_utils plot_service.visualize_dataset() train_X, train_Y = data_service.load_dataset() # train 3-layer model layers_dims = [train_X.shape[0], 5, 2, 1] learning_rate = 0.0007 optimizers = ['gd', 'momentum', 'adam'] for optimizer in optimizers: parameters, costs = optimization.model(train_X, train_Y, layers_dims, optimizer=optimizer) plot_service.plot_loss_per_iteration_for_learning_rate( costs, learning_rate) predictions = opt_utils.predict(train_X, train_Y, parameters) plot_service.plot_decision_boundary( lambda x: opt_utils.predict_dec(parameters, x.T), train_X, train_Y, "Model with {0} optimization".format(optimizer))
from res_net import ResNet50 import keras.backend as K import data_service from keras.utils import plot_model from IPython.display import SVG from keras.utils.vis_utils import model_to_dot K.set_image_data_format('channels_last') K.set_learning_phase(1) model = ResNet50(input_shape=(64, 64, 3), classes=6) model.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy']) X_train_orig, Y_train_orig, X_test_orig, Y_test_orig, classes = data_service.load_dataset( ) X_train, Y_train, X_test, Y_test = data_service.preprocess_data( X_train_orig, Y_train_orig, X_test_orig, Y_test_orig) model.fit(X_train, Y_train, epochs=2, batch_size=32) preds = model.evaluate(X_test, Y_test) print("Loss = " + str(preds[0])) print("Test Accuracy = " + str(preds[1])) model.summary() plot_model(model, to_file='plots/model.png') SVG(model_to_dot(model).create(prog='dot', format='svg'))
import keras.backend as K import data_service, plot_service from happy_model import HappyModel from keras.utils import plot_model from IPython.display import SVG from keras.utils.vis_utils import model_to_dot K.set_image_data_format('channels_last') X_train, Y_train, X_test, Y_test, classes = data_service.load_dataset() plot_service.plot_training_image(2, X_train, Y_train) X_train, Y_train, X_test, Y_test, = data_service.norm_and_reshape( X_train, Y_train, X_test, Y_test, ) print("number of training examples = " + str(X_train.shape[0])) print("number of test examples = " + str(X_test.shape[0])) print("X_train shape: " + str(X_train.shape)) print("Y_train shape: " + str(Y_train.shape)) print("X_test shape: " + str(X_test.shape)) print("Y_test shape: " + str(Y_test.shape)) m, rows, cols, channels = X_train.shape happyModel = HappyModel((rows, cols, channels)) happyModel.compile(optimizer="adam", loss="binary_crossentropy", metrics=["accuracy"])