# mvtec_ad, len = MVTecAd.get_train_dataset("bottle") # mvtec_ad = mvtec_ad.map(ImageDatasetUtil.resize(IMAGE_SIZE,IMAGE_SIZE)) # (train, len),(validation, validation_len) =ImageDatasetUtil.devide_train_validation(mvtec_ad,len,0.9) cats_vs_dogs, total_len = CatsVsDogs.get_train_dataset() cats_vs_dogs = cats_vs_dogs.map( ImageDatasetUtil.map_max_square_crop_and_resize(IMAGE_SIZE, IMAGE_SIZE)) (train, len), (validation, validation_len) = ImageDatasetUtil.devide_train_validation( cats_vs_dogs, total_len, 0.9) train = train.map(ImageDatasetUtil.image_reguralization(), num_parallel_calls=tf.data.experimental.AUTOTUNE ) # .map(ImageDatasetUtil.resize(64,64)) validation_r = validation.map(ImageDatasetUtil.image_reguralization(), num_parallel_calls=tf.data.experimental.AUTOTUNE ) # .map(ImageDatasetUtil.resize(64,64)) model = SSIMAutoEncoderModel.get_model(input_shape=(IMAGE_SIZE, IMAGE_SIZE, 3)) optimizer = OptimizerBuilder.get_optimizer("rmsprop") callback = CallbackBuilder.get_callbacks() loss = ssim_color_loss ImageTrain.train_image_autoencoder(train, len, BATCH_SIZE, validation_r, validation_len, 100, model, callback, optimizer, loss, EPOCHS, False) # model.load_weights(Context.get_model_path()) ImageTrain.show_autoencoder_results(model, validation, 15) ImageTrain.calucurate_reconstruction_error(model, validation, 10)
IMAGE_SIZE = 28 EPOCHS = 20 SHUFFLE_SIZE = 1000 train, train_len = Mnist.get_train_dataset() validation, validation_len = Mnist.get_test_dataset() train = train.map(ImageDatasetUtil.image_reguralization()).map( ImageDatasetUtil.one_hot(CLASS_NUM)) validation = validation.map(ImageDatasetUtil.image_reguralization()).map( ImageDatasetUtil.one_hot(CLASS_NUM)) optimizer = OptimizerBuilder.get_optimizer(name="rmsprop") model = SimpleClassificationModel.get_model(input_shape=(IMAGE_SIZE, IMAGE_SIZE, 1), classes=CLASS_NUM) callbacks = CallbackBuilder.get_callbacks(tensorboard=False, reduce_lr_on_plateau=True, reduce_patience=3, reduce_factor=0.25, early_stopping_patience=5) ImageTrain.train_image_classification(train_data=train, train_size=train_len, batch_size=BATCH_SIZE, validation_data=validation, validation_size=validation_len, shuffle_size=SHUFFLE_SIZE, model=model, callbacks=callbacks, optimizer=optimizer, loss="categorical_crossentropy", max_epoch=EPOCHS)