def _optimizer(c: Configs): optimizer = OptimizerConfigs() optimizer.parameters = c.model.parameters() optimizer.optimizer = 'Adam' optimizer.d_model = c.d_model return optimizer
def _generator_optimizer(c: Configs): opt_conf = OptimizerConfigs() opt_conf.optimizer = 'Adam' opt_conf.parameters = c.generator.parameters() opt_conf.learning_rate = 2.5e-4 # Setting exponent decay rate for first moment of gradient, # $\beta_`$ to `0.5` is important. # Default of `0.9` fails. opt_conf.betas = (0.5, 0.999) return opt_conf
def transformer_optimizer(c: Configs): """ Create a configurable optimizer. Parameters like learning rate can be changed by passing a dictionary when starting the experiment. """ optimizer = OptimizerConfigs() optimizer.parameters = c.model.parameters() optimizer.d_model = c.transformer.d_model optimizer.optimizer = 'Noam' return optimizer
def init(self): # Initialize encoder & decoder self.encoder = EncoderRNN(self.d_z, self.enc_hidden_size).to(self.device) self.decoder = DecoderRNN(self.d_z, self.dec_hidden_size, self.n_distributions).to(self.device) # Set optimizer. Things like type of optimizer and learning rate are configurable optimizer = OptimizerConfigs() optimizer.parameters = list(self.encoder.parameters()) + list( self.decoder.parameters()) self.optimizer = optimizer # Create sampler self.sampler = Sampler(self.encoder, self.decoder) # `npz` file path is `data/sketch/[DATASET NAME].npz` path = lab.get_data_path() / 'sketch' / f'{self.dataset_name}.npz' # Load the numpy file dataset = np.load(str(path), encoding='latin1', allow_pickle=True) # Create training dataset self.train_dataset = StrokesDataset(dataset['train'], self.max_seq_length) # Create validation dataset self.valid_dataset = StrokesDataset(dataset['valid'], self.max_seq_length, self.train_dataset.scale) # Create training data loader self.train_loader = DataLoader(self.train_dataset, self.batch_size, shuffle=True) # Create validation data loader self.valid_loader = DataLoader(self.valid_dataset, self.batch_size) # Add hooks to monitor layer outputs on Tensorboard hook_model_outputs(self.mode, self.encoder, 'encoder') hook_model_outputs(self.mode, self.decoder, 'decoder') # Configure the tracker to print the total train/validation loss tracker.set_scalar("loss.total.*", True) self.state_modules = []
def init(self): # Create a configurable optimizer. # Parameters like learning rate can be changed by passing a dictionary when starting the experiment. optimizer = OptimizerConfigs() optimizer.parameters = self.model.parameters() optimizer.d_model = self.transformer.d_model optimizer.optimizer = 'Noam' self.optimizer = optimizer # Create a sequential data loader for training self.train_loader = SequentialDataLoader(text=self.text.train, dataset=self.text, batch_size=self.batch_size, seq_len=self.seq_len) # Create a sequential data loader for validation self.valid_loader = SequentialDataLoader(text=self.text.valid, dataset=self.text, batch_size=self.batch_size, seq_len=self.seq_len) self.state_modules = [self.accuracy]
def _optimizer(c: Configs): from labml_helpers.optimizer import OptimizerConfigs opt_conf = OptimizerConfigs() opt_conf.parameters = c.model.parameters() return opt_conf
def _default_optimizer(c: SimpleTrainValidConfigs): from labml_helpers.optimizer import OptimizerConfigs opt_conf = OptimizerConfigs() opt_conf.parameters = c.model.parameters() return opt_conf
def optimizer(c: Configs): conf = OptimizerConfigs() conf.parameters = c.model.parameters() return conf
def _generator_optimizer(c: Configs): opt_conf = OptimizerConfigs() opt_conf.parameters = c.generator.parameters() return opt_conf
def _discriminator_optimizer(c: Configs): opt_conf = OptimizerConfigs() opt_conf.parameters = c.discriminator.parameters() return opt_conf