def train(self, dataset): self.training_control = utils.training_control(self.global_step, print_span=10, evaluation_span=100, max_step=self.max_step) # batch*evaluation_span = dataset size = one epoch for batch in dataset.training_generator(batch_size=self.batch_size, portion= 0.5): accuracy, loss, wm, _ = self.run([self.accuracy, self.loss, self.warmup, self.train_op], feed_dict={self.input_x: batch['x'], self.input_dev: batch['dev'], self.input_y: batch['y'], self.is_training: True}) step_control = self.run(self.training_control) if step_control['time_to_print']: print('train_loss = ' + str(loss) + ' train_acc= '+str(accuracy) + ' step ' + str(step_control['step']) + ' warmup' + str(wm)) if step_control['time_to_stop']: break if step_control['time_to_evaluate']: if_stop = self.evaluate(dataset.val_set) self.save_checkpoint() if if_stop: break
def train(self, dataset, lr): self.training_control = utils.training_control( self.global_step, print_span=10, evaluation_span=round(dataset.train_set.shape[0] / self.batch_size), max_step=100000 ) # batch*evaluation_span = dataset size = one epoch for batch in dataset.training_generator(batch_size=self.batch_size): results, loss, _ = self.run( [self.logits, self.loss, self.train_op], feed_dict={ self.input_x: batch['x'], self.input_y: batch['y'], self.is_training: True }) step_control = self.run(self.training_control) if step_control['time_to_print']: print('train_loss= ' + str(loss) + ' round' + str(step_control['step'])) if step_control['time_to_stop']: break if step_control['time_to_evaluate']: if_stop = self.evaluate(dataset.val_set) self.save_checkpoint() if if_stop: break