def _integration( data_with_covariates, tmp_path, gpus, cell_type="LSTM", data_loader_kwargs={}, clip_target: bool = False, **kwargs ): if clip_target: data_with_covariates["target"] = data_with_covariates["volume"].clip(1e-3, 1.0) else: data_with_covariates["target"] = data_with_covariates["volume"] data_loader_default_kwargs = dict( target="target", time_varying_known_reals=["price_actual"], time_varying_unknown_reals=["target"], static_categoricals=["agency"], add_relative_time_idx=True, ) data_loader_default_kwargs.update(data_loader_kwargs) dataloaders_with_covariates = make_dataloaders(data_with_covariates, **data_loader_default_kwargs) train_dataloader = dataloaders_with_covariates["train"] val_dataloader = dataloaders_with_covariates["val"] early_stop_callback = EarlyStopping(monitor="val_loss", min_delta=1e-4, patience=1, verbose=False, mode="min") logger = TensorBoardLogger(tmp_path) trainer = pl.Trainer( max_epochs=3, gpus=gpus, weights_summary="top", gradient_clip_val=0.1, callbacks=[early_stop_callback], checkpoint_callback=True, default_root_dir=tmp_path, limit_train_batches=2, limit_val_batches=2, logger=logger, ) net = DeepAR.from_dataset( train_dataloader.dataset, cell_type=cell_type, learning_rate=0.15, log_gradient_flow=True, log_interval=1000, n_plotting_samples=100, **kwargs, ) net.size() try: trainer.fit( net, train_dataloader=train_dataloader, val_dataloaders=val_dataloader, ) # check loading net = DeepAR.load_from_checkpoint(trainer.checkpoint_callback.best_model_path) # check prediction net.predict(val_dataloader, fast_dev_run=True, return_index=True, return_decoder_lengths=True) finally: shutil.rmtree(tmp_path, ignore_errors=True) net.predict(val_dataloader, fast_dev_run=True, return_index=True, return_decoder_lengths=True)
def model(dataloaders_with_covariates): dataset = dataloaders_with_covariates["train"].dataset net = DeepAR.from_dataset( dataset, learning_rate=0.15, log_gradient_flow=True, log_interval=1000, ) return net
def test_pickle(dataloaders_with_covariates, loss): dataset = dataloaders_with_covariates["train"].dataset model = DeepAR.from_dataset(dataset, hidden_size=5, learning_rate=0.15, log_gradient_flow=True, log_interval=1000, loss=loss) pkl = pickle.dumps(model) pickle.loads(pkl)
def _integration(dataloaders_with_covariates, tmp_path, gpus, cell_type="LSTM"): train_dataloader = dataloaders_with_covariates["train"] val_dataloader = dataloaders_with_covariates["val"] early_stop_callback = EarlyStopping(monitor="val_loss", min_delta=1e-4, patience=1, verbose=False, mode="min") logger = TensorBoardLogger(tmp_path) checkpoint = ModelCheckpoint(filepath=tmp_path) trainer = pl.Trainer( checkpoint_callback=checkpoint, max_epochs=3, gpus=gpus, weights_summary="top", gradient_clip_val=0.1, callbacks=[early_stop_callback], fast_dev_run=True, logger=logger, ) net = DeepAR.from_dataset( train_dataloader.dataset, cell_type=cell_type, learning_rate=0.15, log_gradient_flow=True, log_interval=1000, ) net.size() try: trainer.fit( net, train_dataloader=train_dataloader, val_dataloaders=val_dataloader, ) # check loading fname = f"{trainer.checkpoint_callback.dirpath}/epoch=0.ckpt" net = DeepAR.load_from_checkpoint(fname) # check prediction net.predict(val_dataloader, fast_dev_run=True, return_index=True, return_decoder_lengths=True) finally: shutil.rmtree(tmp_path, ignore_errors=True) net.predict(val_dataloader, fast_dev_run=True, return_index=True, return_decoder_lengths=True)