def train_vae(): # Instantiate and Train Autoencoder vae_kwargs = {} vae_kwargs["latent_dim"] = 2 vae_kwargs["hidden_dim"] = [15, 7] vae_kwargs["epochs"] = 8 vae_kwargs["batch_size"] = 128 # vae_kwargs["model_path"] = ae_model_path vae = VAEModel(in_train.shape[1], **vae_kwargs) vae.train(in_train, in_test) vae.save_model() inlier_scores = vae.compute_anomaly_score(in_test) outlier_scores = vae.compute_anomaly_score(out_test) print(inlier_scores) print(outlier_scores) metrics = eval_utils.evaluate_model( inlier_scores, outlier_scores, model_name="vae", show_plot=False) print(metrics) return metrics
try: os.mkdir(log_dir) logging.info('============================================================') logging.info(' Logging Directory: %s' %log_dir) logging.info('============================================================\n') except OSError: pass # ================================================================================================================================================================== # ============== LOAD THE MODELS =================================================================================================================================== # ================================================================================================================================================================== vae_network = VariationalAutoencoder model = VAEModel(vae_network, config, model_name, log_dir) # Load the vae model as our baseline path = os.path.join(project_code_root, config["model_directory"]) model.load_from_path(path, config["model_name"] , config["latest_model_epoch"]) # ================================================================================================================================================================== # ============== LOAD THE DATA ===================================================================================================================================== # ================================================================================================================================================================== if preprocess_enabled == "slice": logging.info('============================================================') logging.info('Loading training data from: ' + project_data_root) data_tr = data_freiburg_numpy_to_preprocessed_hdf5.load_cropped_data_sliced(basepath = project_data_root, idx_start = config['train_data_start_idx'], idx_end = config['train_data_end_idx'],
from models.vae import VAEModel from .bert import BERTModel MODELS = { BERTModel.code(): BERTModel, VAEModel.code(): VAEModel, } def model_factory(args, dataset_meta): model = MODELS[args.model_code] return model(args, dataset_meta)
end = one_subject.shape[0] - (one_subject.shape[0] - num_of_y_slices) / 2 temp_reshape[i * num_of_y_slices:(i + 1) * num_of_y_slices, :, :, :, :] = one_subject[ int(start):int(end), :, :, :, :] images_vl = temp_reshape # ==================================================================================== # Initialize the network architecture, training parameters, model_name, and logging directory # ==================================================================================== #Initialize the VAE vae_network = VariationalAutoencoder model = VAEModel(vae_network, config, model_name, log_dir) model.initialize() model.summarize() # Should we continue training an existing model or is it a new model if continue_training: path = os.path.join(project_code_root, config["model_directory"]) model.load_from_path(path, config["model_name"], config["latest_model_epoch"]) already_completed_epochs = config["latest_model_epoch"] else: model.summarize() already_completed_epochs = 0