def main(): (X_train, y_train), (X_val, y_val), (X_test, y_test) = load_data() print(X_train.shape, y_train.shape, X_val.shape, y_val.shape, X_test.shape, y_test.shape) if DO_TRAINING: print('Building model...') model = build_model(13, 20, 3) # # define the loss function & optimizer that model should # criterion = nn.CrossEntropyLoss() # optimizer = torch.optim.SGD(model.parameters(), lr=LEARNING_RATE, nesterov=True, # momentum=0.9, dampening=0, weight_decay=L2_REG) # model.compile(loss=criterion, optimizer=optimizer, metrics=['acc']) print(model.summary()) # train model print('Training model...') hist = model.fit(X_train, y_train, validation_data=(X_val, y_val), epochs=NUM_EPOCHS, batch_size=BATCH_SIZE) kru.show_plots(hist.history, metric='accuracy', plot_title='Training metrics') # evaluate model performance on train/eval & test datasets print('\nEvaluating model performance...') loss, acc = model.evaluate(X_train, y_train, verbose=0) print(' Training dataset -> loss: %.4f - acc: %.4f' % (loss, acc)) loss, acc = model.evaluate(X_val, y_val, verbose=0) print(' Cross-val dataset -> loss: %.4f - acc: %.4f' % (loss, acc)) oss, acc = model.evaluate(X_test, y_test, verbose=0) print(' Test dataset -> loss: %.4f - acc: %.4f' % (loss, acc)) # save model state kru.save_model(model, MODEL_SAVE_NAME) del model if DO_PREDICTION: print('\nRunning predictions...') # load model state from .pt file model = kru.load_model(MODEL_SAVE_NAME) print(model.summary()) print('\nEvaluating model performance...') loss, acc = model.evaluate(X_train, y_train) print(' Training dataset -> loss: %.4f - acc: %.4f' % (loss, acc)) loss, acc = model.evaluate(X_val, y_val) print(' Cross-val dataset -> loss: %.4f - acc: %.4f' % (loss, acc)) oss, acc = model.evaluate(X_test, y_test) print(' Test dataset -> loss: %.4f - acc: %.4f' % (loss, acc)) y_preds = np.argmax(model.predict(X_test), axis=1) # display all predictions print(f'Sample labels: {y_test}') print(f'Sample predictions: {y_preds}') print(f'We got {(y_preds == y_test).sum()}/{len(y_test)} correct!!')
def main(): (X_train, y_train), (X_val, y_val), (X_test, y_test) = load_data() print(f"X_train.shape = {X_train.shape} - y_train.shape = {y_train.shape} " + f"- X_val.shape = {X_val.shape} - y_val.shape = {y_val.shape} " + f"- X_test.shape = {X_test.shape} - y_test.shape = {y_test.shape}") if SHOW_SAMPLE: print(f"Displaying sample of {SAMPLE_SIZE} images...") rand_indexes = np.random.randint(0, len(X_test), SAMPLE_SIZE) sample_images = X_test[rand_indexes] sample_labels = y_test[rand_indexes] display_sample(sample_images, sample_labels, plot_title='Sample of %d images' % SAMPLE_SIZE) if DO_TRAINING: model = build_model(l2_loss_lambda=L2_REG) print(model.summary()) lr_scheduler = LearningRateScheduler(step_lr) hist = model.fit(X_train, y_train, epochs=NUM_EPOCHS, batch_size=BATCH_SIZE, validation_data=(X_val, y_val), callbacks=[lr_scheduler]) kru.show_plots(hist.history, metric='sparse_categorical_accuracy') # evaluate model performance print('\nEvaluating model performance...') loss, acc = model.evaluate(X_train, y_train) print(f' Training dataset -> loss: {loss:.4f} - acc: {acc:.4f}') loss, acc = model.evaluate(X_val, y_val) print(f' Cross-val dataset -> loss: {loss:.4f} - acc: {acc:.4f}') loss, acc = model.evaluate(X_test, y_test) print(f' Test dataset -> loss: {loss:.4f} - acc: {acc:.4f}') kru.save_model(model, MODEL_SAVE_PATH) del model if DO_PREDICTION: model = kru.load_model(MODEL_SAVE_PATH) print(model.summary()) y_pred = model.predict(X_test) y_pred = np.argmax(y_pred, axis=1) print('Sample labels (50): ', y_test[:50]) print('Sample predictions (50): ', y_pred[:50]) print('We got %d/%d incorrect!' % ((y_pred != y_test).sum(), len(y_test))) if SHOW_SAMPLE: # display sample predictions rand_indexes = np.random.randint(0, len(X_test), SAMPLE_SIZE) sample_images = X_test[rand_indexes] sample_labels = y_test[rand_indexes] sample_predictions = y_pred[rand_indexes] model_type = 'CNN' if USE_CNN else 'ANN' display_sample(sample_images, sample_labels, sample_predictions, num_rows=5, num_cols=10, plot_title=f'Keras {model_type} - {SAMPLE_SIZE} random predictions') del model
def main(): (X_train, y_train), (X_val, y_val), (X_test, y_test) = load_data() print( f"X_train.shape = {X_train.shape} - y_train.shape = {y_train.shape} " + f"- X_val.shape = {X_val.shape} - y_val.shape = {y_val.shape} " + f"- X_test.shape = {X_test.shape} - y_test.shape = {y_test.shape}") y_train, y_val, y_test = y_train.astype(np.float), y_val.astype( np.float), y_test.astype(np.float) if DO_TRAINING: model = build_model(NUM_FEATURES, 32, 32, NUM_CLASSES, L2_REG) print(model.summary()) print('Training model...') hist = model.fit(X_train, y_train, validation_split=0.20, epochs=NUM_EPOCHS, batch_size=BATCH_SIZE) kru.show_plots(hist.history, metric='acc') # evaluate model performance print('\nEvaluating model performance...') loss, acc = model.evaluate(X_train, y_train) print(f' Training dataset -> loss: {loss:.4f} - acc: {acc:.4f}') loss, acc = model.evaluate(X_val, y_val) print(f' Cross-val dataset -> loss: {loss:.4f} - acc: {acc:.4f}') loss, acc = model.evaluate(X_test, y_test) print(f' Test dataset -> loss: {loss:.4f} - acc: {acc:.4f}') kru.save_model(model, MODEL_SAVE_NAME) del model if DO_PREDICTION: print('\nRunning predictions...') model = kru.load_model(MODEL_SAVE_NAME) print(model.summary()) y_pred = np.argmax(np.round(model.predict(X_test)), axis=1) y_true = np.argmax(y_test, axis=1) # display output print('Sample labels: ', y_true) print('Sample predictions: ', y_pred) print('We got %d/%d correct!' % ((y_true == y_pred).sum(), len(y_true)))
def main(): # generate data with noise M, C = 1.8, 32.0 X, y = generate_temp_data(M, C, numelems=500, std=25) print(f"X.shape = {X.shape}, y.shape = {y.shape}") # display plot of generated data plt.figure(figsize=(8, 6)) plt.scatter(X, y, s=40, c='steelblue') plt.title(f'Original Data -> $y = {M:.2f} * X + {C:.2f}$') plt.show() model = build_model() print(model.summary()) print('Before training: ') print(f' Weight: {model.layers[0].get_weights()[0]} ' + f'Bias: {model.layers[0].get_weights()[1]}') # train the model print('Training....', flush=True) hist = model.fit(X, y, epochs=5000, batch_size=32, verbose=2) kru.show_plots(hist.history, metric='r2_score', plot_title="Performance Metrics") print('After training: ') print(f' Weight: {model.layers[0].get_weights()[0]} ' + f'bias: {model.layers[0].get_weights()[1]}') # display plot of prediction with gerated data plt.figure(figsize=(8, 6)) y_pred = model.predict(X) plt.scatter(X, y, s=40, c='steelblue') plt.plot(X, y_pred, lw=2, c='firebrick') Mp, Cp = model.layers[0].get_weights()[0][0], \ model.layers[0].get_weights()[1][0] plt.title(f'Prediction -> $y = {Mp:.2f} * X + {Cp:.2f}$') plt.show()
X_test = X_test / 255.0 model = tf.keras.models.Sequential([ tf.keras.layers.Flatten(input_shape=(28, 28)), tf.keras.layers.Dense(128, activation=tf.nn.relu), tf.keras.layers.Dense(64, activation=tf.nn.relu), tf.keras.layers.Dense(10, activation=tf.nn.softmax) ]) model.compile(loss='sparse_categorical_crossentropy', optimizer='adam', metrics=['accuracy']) print(model.summary()) hist = model.fit(X_train, y_train, validation_split=0.2, epochs=25, batch_size=32) kru.show_plots(hist.history, metric='accuracy') # evaluate performance loss, acc = model.evaluate(X_train, y_train) print(f"Training data -> loss: {loss:.3f} - acc: {acc:.3f}") loss, acc = model.evaluate(X_test, y_test) print(f"Testing data -> loss: {loss:.3f} - acc: {acc:.3f}") # save model kru.save_model(model, 'kr_fashion2') del model
class_weight=class_weight, batch_size=256, validation_data=(X_test, y_test), callbacks=[ EarlyStopping(monitor="val_f1", mode='max', patience=5, restore_best_weights=True) ]) results = model.evaluate(X_test, y_test) print(results) import kr_helper_funcs as kr from sklearn.metrics import classification_report, confusion_matrix kr.show_plots(history.history) predictions = model.predict_classes(X_test) print(classification_report(y_test, predictions)) kr.plot_cm(y_test, predictions, ["unpaid", "paid"]) plt.show() # from threading import Lock # lock = Lock() import os def save_model(name='without_postcode', model=model): # lock.aquire() # try:
plt.show() from tensorflow.keras.callbacks import EarlyStopping early_stop = EarlyStopping(monitor='val_loss', patience=5) model.fit(x=X_train_rus, y=y_train_rus, epochs=25, class_weight=class_weight, batch_size=256, validation_data=(X_test, y_test), callbacks=[early_stop]) import kr_helper_funcs as kr from sklearn.metrics import classification_report, confusion_matrix kr.show_plots(model.history.history) predictions = model.predict_classes(X_test) print(classification_report(y_test, predictions)) kr.plot_cm(y_test, predictions, ["unpaid", "paid"]) plt.show() from tensorflow.keras.models import load_model # model = tf.keras.models.load_model('lending-club.h5') import os name = 'without_postcode' if not os.path.exists(name): os.mkdir(name) tf.keras.models.save_model(model, '{}/lending-club.h5'.format(name)) pd.DataFrame.from_dict(model.history.history).to_csv( name + "/" + 'lending-club-history.csv'.format(name), index=False)
def show_plots(history): kru.show_plots(history)