def train(model, model_name): loader = DataLoader() pretrain_data, pretrain_labels, pretrain_names = loader.load_pretrain_datasets( ) # pretrain model model.fit(pretrain_data, pretrain_labels, batch_size=BATCH_SIZE, epochs=PRETRAIN_EPOCHS) deep_utils.create_directory("../models") model_filename = "../models/pretrained_" + model_name + ".h5" model.save(model_filename) train_data, train_labels, train_names = loader.load_train_datasets() test_data, test_labels, test_names = loader.load_test_datasets() # train model model.fit(train_data, train_labels, validation_data=(test_data, test_labels), batch_size=BATCH_SIZE, epochs=TRAIN_EPOCHS) deep_utils.create_directory("../models") model_filename = "../models/fine_tuned_" + model_name + ".h5" model.save(model_filename) # evaluate model scores = model.evaluate(test_data, test_labels, verbose=1) return scores
def train(model, model_name, train_datasets, test_datasets): train_data = train_datasets[0] train_labels = train_datasets[1] train_names = train_datasets[2] test_data = test_datasets[0] test_labels = test_datasets[1] test_names = test_datasets[2] # train model model.fit(train_data, train_labels, validation_data=(test_data, test_labels), batch_size=BATCH_SIZE, epochs=EPOCHS) # save model if SAVE_MODEL: deep_utils.create_directory("../models") model_filename = "../models/allmirbase_" + model_name + ".h5" model.save(model_filename) scores = model.evaluate(test_data, test_labels, verbose=1) return scores
def pretrain(model, model_name, pretrain_datasets): pretrain_data = pretrain_datasets[0] pretrain_labels = pretrain_datasets[1] # pretrain model model.fit(pretrain_data, pretrain_labels, batch_size=BATCH_SIZE, epochs=PRETRAIN_EPOCHS, verbose=2) deep_utils.create_directory("../models") model_filename = "../models/kfold_hsa_pretrain_" + model_name + ".h5" model.save(model_filename) return model_filename
def train(model, model_name): loader = DataLoader() train_data, train_labels, train_names = loader.load_train_datasets() test_data, test_labels, test_names = loader.load_test_datasets() model.fit(train_data, train_labels, batch_size=BATCH_SIZE, epochs=TRAIN_EPOCHS, validation_data=(test_data, test_labels), shuffle=True) # save trained model deep_utils.create_directory("../models") model_filename = "../models/base_" + model_name + ".h5" model.save(model_filename) scores = model.evaluate(test_data, test_labels, verbose=1) return scores
def train(model, model_name): # load data loader = DataLoaderAllmirbase() train_data, train_labels, train_names = loader.load_train_datasets() test_data, test_labels, test_names = loader.load_test_datasets() # train model model.fit(train_data, train_labels, validation_data=(test_data, test_labels), batch_size=BATCH_SIZE, epochs=EPOCHS) # save model if SAVE_MODEL: deep_utils.create_directory("../models") model_filename = "../models/allmirbase_" + model_name + ".h5" model.save(model_filename) # evaluate model scores = model.evaluate(test_data, test_labels, verbose=1) return scores
def train(model, pretrain_datasets, train_datasets, test_datasets, model_name): pretrain_data = pretrain_datasets[0] pretrain_labels = pretrain_datasets[1] pretrain_names = pretrain_datasets[2] train_data = train_datasets[0] train_labels = train_datasets[1] train_names = train_datasets[2] test_data = test_datasets[0] test_labels = test_datasets[1] test_names = test_datasets[2] model.fit(pretrain_data, pretrain_labels, batch_size=BATCH_SIZE, epochs=PRETRAIN_EPOCHS, shuffle=True) # train_model model.fit(train_data, train_labels, batch_size=BATCH_SIZE, epochs=TRAIN_EPOCHS, validation_data=(test_data, test_labels), shuffle=True) # save trained model if SAVE_MODEL: deep_utils.create_directory("../models") model_filename = "../models/hsa_" + model_name + ".h5" model.save(model_filename) scores = model.evaluate(test_data, test_labels, verbose=1) return scores