def test_ae(): """ Test Autoencoder and variational auto encoder models for training/testing/generative network and classification networks """ # Sample data df = jaguar() # Hyperparameters batch_size = 10 num_past = 10 num_future = 5 # Prepare the dataloader data_loaders = dataset.MultiModalDataLoader(df, batch_size=batch_size, n_past=num_past, n_future=num_future, num_workers=1, train_split_ratio=0.5, validation_split_ratio=0.2) model_save_path = './model.pt' model = MultiModelAE(input_size=2, num_past=num_past, batch_size=batch_size, num_future=num_future, lstm_hidden_size=32, num_lstm_layers=2, output_size=2, latent_size=10, batch_first=True, dropout=0.1, reset_state=True, bidirectional=False, num_classifier_layers=4, classifier_hidden_size=32, num_classes=9) # Model Trainer # Model types; "ae" or "vae" trainer = HybridTrainer(model=model, optimizer_type='Adam', loss_type='huber') # Train the model trainer.fit(data_loaders, model_save_path, epochs=10, training_mode='forecasting') trainer.fit(data_loaders, model_save_path, epochs=10, training_mode='classification')
def test_lstm_jaguar(): """ Testing method for lstm model used for forecasting. """ # Sample data df = jaguar() # Hyperparameters batch_size = 10 num_past = 10 num_future = 10 # For timeseries prediction assert num_past == num_future # Prepare the dataloader data_loaders = dataset.MultiModalDataLoader( df, batch_size=batch_size, n_past=num_past, n_future=num_future, num_workers=1 ) model_save_path = "./model.pt" # Model init model = LSTM( input_size=2, hidden_size=32, num_layers=2, output_size=2, dropout=0.1, batch_size=batch_size, num_future=num_future, bidirectional=False, batch_first=True, reset_state=True, ) # Model Trainer trainer = HybridTrainer(model=model, optimizer_type='Adam', loss_type='huber') forecasting_loss_pre_training, _, _ = trainer.validate(data_loaders['train_loader']) print(f'Loss pre training: {forecasting_loss_pre_training}') # Train the model trainer.fit(data_loaders, model_save_path, epochs=2, training_mode="forecasting", validate_every=1, test_every=2) forecasting_loss_post_training, _, _ = trainer.validate(data_loaders['train_loader']) print(f'Loss post training: {forecasting_loss_post_training}') assert forecasting_loss_post_training < forecasting_loss_pre_training
def test_ae_jaguar(): """ Test autoencoder forecasting with the Jaguar dataset """ # Sample data df = jaguar() # Hyperparameters batch_size = 10 num_past = 10 num_future = 5 # Prepare the dataloader data_loaders = dataset.MultiModalDataLoader( df, batch_size=batch_size, n_past=num_past, n_future=num_future, num_workers=1, train_split_ratio=0.5, validation_split_ratio=0.2, ) model_save_path = "./model.pt" model = MultiModelAE( input_size=2, num_past=num_past, batch_size=batch_size, num_future=num_future, lstm_hidden_size=32, num_lstm_layers=2, output_size=2, latent_size=10, batch_first=True, dropout=0.1, reset_state=True, bidirectional=False, ) # Model Trainer # Model types; "ae" or "vae" trainer = HybridTrainer(model=model, optimizer_type="Adam", loss_type="huber") # Train the model trainer.fit(data_loaders, model_save_path, epochs=5, training_mode="forecasting", validate_every=2, test_every=5) trainer.fit(data_loaders, model_save_path, epochs=5, training_mode="forecasting", validate_every=None, test_every=5) trainer.fit(data_loaders, model_save_path, epochs=5, training_mode="forecasting", validate_every=2, test_every=None) trainer.validate(data_loaders["sequential_validation_loader"])
def test_plot_prediction(): # Hyperparameters batch_size = 10 num_past = 10 num_future = 10 input_size = 2 lstm_hidden_size = 512 lstm_num_layers = 4 batch_first = True reset_state = True output_size = 2 num_classes = 9 latent_size = 20 dropout = 0.1 bidirectional = False # Prepare the dataloader df = jaguar() data_loaders = dataset.MultiModalDataLoader(df, batch_size=batch_size, n_past=num_past, n_future=num_future, num_workers=1) model = MultiModelVAE(input_size=input_size, output_size=output_size, lstm_hidden_size=lstm_hidden_size, num_lstm_layers=lstm_num_layers, num_classes=num_classes, latent_size=latent_size, dropout=dropout, num_classifier_layers=4, classifier_hidden_size=32, batch_size=batch_size, num_future=num_future, num_past=num_past, bidirectional=bidirectional, batch_first=batch_first, reset_state=reset_state) trainer = HybridTrainer(model=model, optimizer_type='Adam', loss_type='huber') model_save_path = './model.pt' plot_prediction(model, data_loaders['sequential_test_loader'], 1)
def test_lstm(): """ Testing method for lstm model used for forecasting. """ # Sample data df = jaguar() # Hyperparameters batch_size = 10 num_past = 10 num_future = 10 # For timeseries prediction assert num_past == num_future # Prepare the dataloader data_loaders = dataset.MultiModalDataLoader(df, batch_size=batch_size, n_past=num_past, n_future=num_future, num_workers=1) model_save_path = './model.pt' # Model init model = LSTM(input_size=2, hidden_size=32, num_layers=2, output_size=2, dropout=0.1, batch_size=batch_size, num_future=num_future, bidirectional=False, batch_first=True, reset_state=True) # Model Trainer trainer = HybridTrainer(model=model, optimizer_type='Adam', loss_type='huber') # Train the model trainer.fit(data_loaders, model_save_path, epochs=10, training_mode='forecasting')
from traja.dataset import dataset from traja.dataset.example import jaguar from traja.models.generative_models.vae import MultiModelVAE from traja.models.predictive_models.ae import MultiModelAE from traja.models.predictive_models.lstm import LSTM from traja.models.train import HybridTrainer # Sample data df = jaguar() def test_aevae(): """ Test Autoencoder and variational auto encoder models for training/testing/generative network and classification networks """ # Sample data df = jaguar() # Hyperparameters batch_size = 10 num_past = 10 num_future = 5 # Prepare the dataloader data_loaders = dataset.MultiModalDataLoader(df, batch_size=batch_size, n_past=num_past, n_future=num_future, train_split_ratio=0.5,
def test_aevae_jaguar(): """ Test variational autoencoder forecasting with the Jaguar dataset """ # Sample data df = jaguar() # Hyperparameters batch_size = 10 num_past = 10 num_future = 5 # Prepare the dataloader data_loaders = dataset.MultiModalDataLoader( df, batch_size=batch_size, n_past=num_past, n_future=num_future, train_split_ratio=0.5, num_workers=1, split_by_id=False, ) model_save_path = "./model.pt" model = MultiModelVAE( input_size=2, output_size=2, lstm_hidden_size=32, num_lstm_layers=2, latent_size=10, dropout=0.1, batch_size=batch_size, num_future=num_future, num_past=num_past, bidirectional=False, batch_first=True, reset_state=True, ) # Test that we can run functions on our network. model.disable_latent_output() model.enable_latent_output() # Model Trainer # Model types; "ae" or "vae" trainer = HybridTrainer(model=model, optimizer_type="Adam", loss_type="huber") # Train the model trainer.fit(data_loaders, model_save_path, epochs=10, training_mode="forecasting", validate_every=5, test_every=10) scaler = data_loaders["train_loader"].dataset.scaler # Load the trained model given the path model_path = "./model.pt" hyperparams = "./hypers.json" model_hyperparameters = traja.models.read_hyperparameters(hyperparams) # For prebuild traja generative models generator = traja.models.inference.Generator( model_type="vae", model_hyperparameters=model_hyperparameters, model_path=model_path, model=None, ) out = generator.generate(num_future, classify=False, scaler=scaler, plot_data=False) trainer.validate(data_loaders["validation_loader"])