Example #1
0
        # These file paths are specified so that model parameters can be saved after training
        model_name_json_path = os.path.join(os.path.abspath(__file__), '..',
                                            'data', 'Keras_best_model.json')
        model_name_h5_path = os.path.join(os.path.abspath(__file__), '..',
                                          'data', 'Keras_best_model.h5')

        y_train = train_labels.numpy()
        X_train = data_loaders_original["train"].dataset.imgs
        X_test = data_loaders_original["test"].dataset.imgs

        keras_model = KerasModel(model_name_json_path=model_name_json_path,
                                 model_name_h5_path=model_name_h5_path,
                                 X=X_train)
        keras_model.fit(X=X_train, y=y_train)

        y_pred = keras_model.predict(X_train)
        tr_error = np.mean(y_pred != y_train[:, None])
        print(f"Keras Model Training Error is: {tr_error}")
        test_labels = keras_model.predict(X_test)
        save_results_in_csv(test_labels)

    elif model_to_run == "TRANSFER_LEARNING":
        ### load Resnet152 pre-trained model
        model_conv = torchvision.models.resnet152(pretrained=True)

        model = TransferLearningModel(model_conv)

        #Train the model with pre-trained Resnet
        print("Training model...")
        model_conv = model.fit(data_loaders_transfer_learning,
                               dataset_sizes_tl,
Example #2
0
print("train_x", train_x.shape, train_x.dtype)

input_shape = (img_width, img_height, img_num_channels)

logdir = "logs/" + datetime.now().strftime("%Y%m%d-%H%M%S")
tensorboard_callback = keras.callbacks.TensorBoard(log_dir=logdir)

model = KerasModel(input_shape)
# tf.keras.utils.plot_model(model, to_file='model_plot.png', show_shapes=True, show_layer_names=True)
model.compile(
    loss=keras.losses.SparseCategoricalCrossentropy(from_logits=True),
    optimizer=keras.optimizers.Adam(0.001),
    metrics=[tf.keras.metrics.SparseCategoricalAccuracy()])

# Fit data to model
history = model.fit(train_x,
                    train_y,
                    callbacks=[tensorboard_callback],
                    batch_size=50,
                    epochs=6,
                    verbose=True,
                    validation_split=0.2)
model.summary()
score = model.evaluate(test_x, test_y, verbose=0)

model.save('saved_model/model')

probabilities = model.predict(test_x, verbose=True)
print(probabilities)
Example #3
0
    return train_feats, train_labels

X, Y, X_test, Y_test = get_data()

# PART 2 FIT MODEL

model = KerasModel()

model.fit(X, Y)
    
print("predicting on kfold validation")

# PART 5 EVALUATE ON UNSEEN
X_real, Y_real = get_real_data()

real_predict = model.predict(X_real)
print(f"Average f1s on unseen: {f1_score(Y_real, real_predict, average='micro')}")

# PART 6 PREPARE SUBMISSION
def get_data_for_submitting():
    #df_test = util.load_data_to_dataframe('dataset/test.json')
    #prepared_df = create_features(df_test)
    #prepared_df.to_csv('cache/test.csv', index=False)
    prepared_df = filterout_mac_features(pd.read_csv('cache/test.csv'))
    test_feats, _, test_ids = get_feats_labels_ids(prepared_df)
    return test_feats, test_ids

def dump_for_submitting():
    X_submit, test_ids = get_data_for_submitting()
    test_predictions = model.predict(X_submit)