def train( model_type, path_args, training_args, model_args, output_args, load_dataset_args, checkpoint_args, extension_args, ): """ --------------------------------------------- Input: None Output: None Run the test harness for evaluating a model --------------------------------------------- """ pretrained = training_args.pop("pretrained") results_folder = training_args.pop("results_folder") data_format = load_dataset_args["data_format"] checkpoint_path = checkpoint_args["checkpoint_path"] paths = utils.get_paths(**path_args) utils.check_folders(paths, **extension_args) keras.backend.set_image_data_format(data_format) callbacks = [] callbacks.append(utils.get_early_stopping_callback()) callbacks.append(utils.get_tensorboard_directory_callback()) callbacks.append(utils.get_checkpoint_callback(checkpoint_path)) if pretrained: model = keras.models.load_model(checkpoint_path) elif model_type == "unet": model = unet.define_model(output_args, **model_args) train, val = datagen.load_dataset(paths, load_dataset_args) history = model.fit_generator(train, validation_data=val, callbacks=callbacks, **training_args) return (history, model)
import tensorflow as tf import numpy as np from models import build_model from datagen import load_dataset, genxy net_name = "Net" output_path = 'output' anno_file_path = '/Users/baulhoa/Documents/PythonProjects/datasets/faceali/train.txt' image_dir_path = '/Users/baulhoa/Documents/PythonProjects/datasets/faceali/train' ishape = [112, 112, 1] total_epoches = 1000 batch_size = 100 dataset = load_dataset(anno_file_path=anno_file_path) total_examples = len(dataset) total_batches = total_examples // batch_size model = build_model(ishape=ishape, mode='train', net_name=net_name) # model.summary() # model.load_weights('{}/weights_.h5'.format(output_path), by_name=True) min_loss = 2**32 for epoch in range(total_epoches): # tf.keras.backend.set_value(model.optimizer.learning_rate, 0.001) gen = genxy(dataset=dataset, image_dir_path=image_dir_path, ishape=ishape, total_batches=total_batches,
model = build_model(ishape=ishape, resnet_settings=resnet_settings, k=len(asizes), total_classes=total_classes, net_name=net_name) # model.summary() model.load_weights('{}/weights_.h5'.format(output_path), by_name=True) min_loss = 2**32 max_precision = 0 max_recall = 0 max_precision_recall = 0 gendata = genxy_com if combine is True else genxy train_dataset = load_dataset(anno_file_path=train_anno_file_path) test_dataset = load_dataset(anno_file_path=test_anno_file_path) for epoch in range(total_epoches): # tf.keras.backend.set_value(model.optimizer.learning_rate, 0.001) gen = gendata(dataset=train_dataset, image_dir=train_image_dir, ishape=ishape, abox_2dtensor=abox_2dtensor, iou_thresholds=iou_thresholds, total_examples=total_train_examples, total_classes=total_classes, anchor_sampling=anchor_sampling) print('\nTrain epoch {}'.format(epoch))
from datagen import load_dataset # Global import numpy as np import random as rd import matplotlib.pyplot as plt from tensorflow import keras as ks from tensorflow.keras.models import Sequential from tensorflow.keras.layers import Dense, LeakyReLU, Reshape, Conv2DTranspose, Conv2D, Flatten, Dropout ## Parameters and dataset Ldim = 100 P = 10 Shape = (28, 28, 1) X, Y = load_dataset() ## Gan Gan = Gan(ldim=Ldim, p=P, shape=Shape) Gan.load('C:/Users/meri2/Documents/Projects/MNSIT_GAN/Attempt_0') Gan.make_gan() losses, accuracies, times = Gan.train( X, Y, epochs=0, batch_size=256, ) Gan.samples(7)