def sequential_valid_loader(c: NLPAutoRegressionConfigs): """ ### Sequential validation data loader """ return SequentialDataLoader(text=c.text.valid, dataset=c.text, batch_size=c.batch_size, seq_len=c.seq_len)
def sequential_train_loader(c: NLPAutoRegressionConfigs): """ ### Sequential training data loader """ return SequentialDataLoader(text=c.text.train, dataset=c.text, batch_size=c.batch_size, seq_len=c.seq_len)
def valid_loader(c: Configs): """ Create a sequential data loader for validation """ return SequentialDataLoader(text=c.text.valid, dataset=c.text, batch_size=c.batch_size, seq_len=c.seq_len)
def train_loader(c: Configs): """ Create a sequential data loader for training """ return SequentialDataLoader(text=c.text.train, dataset=c.text, batch_size=c.batch_size, seq_len=c.seq_len)
def init(self): # Create a configurable optimizer. # Parameters like learning rate can be changed by passing a dictionary when starting the experiment. optimizer = OptimizerConfigs() optimizer.parameters = self.model.parameters() optimizer.d_model = self.transformer.d_model optimizer.optimizer = 'Noam' self.optimizer = optimizer # Create a sequential data loader for training self.train_loader = SequentialDataLoader(text=self.text.train, dataset=self.text, batch_size=self.batch_size, seq_len=self.seq_len) # Create a sequential data loader for validation self.valid_loader = SequentialDataLoader(text=self.text.valid, dataset=self.text, batch_size=self.batch_size, seq_len=self.seq_len) self.state_modules = [self.accuracy]
def train_loader(c: Configs): return SequentialDataLoader(text=c.text.valid, dataset=c.text, batch_size=c.batch_size, seq_len=c.seq_len)