def __init__(self, model, **kwargs): super(LanguageModelTrainerTf, self).__init__() self.model = model self.loss = model.create_loss() self.test_loss = model.create_test_loss() self.global_step, self.train_op = optimizer(self.loss, **kwargs) self.nsteps = kwargs.get('nsteps', 500)
def __init__(self, model, **kwargs): super(TaggerTrainerTf, self).__init__() self.loss = model.create_loss() self.model = model span_type = kwargs.get('span_type', 'iob') verbose = kwargs.get('verbose', False) self.evaluator = TaggerEvaluatorTf(model, span_type, verbose) self.global_step, self.train_op = optimizer(self.loss, **kwargs) self.nsteps = kwargs.get('nsteps', six.MAXSIZE)
def __init__(self, model, **kwargs): super(Seq2SeqTrainerTf, self).__init__() self.sess = model.sess self.loss = model.create_loss() self.test_loss = model.create_test_loss() self.model = model self.tgt_rlut = kwargs['tgt_rlut'] self.base_dir = kwargs['basedir'] self.global_step, self.train_op = optimizer(self.loss, colocate_gradients_with_ops=True, **kwargs) self.nsteps = kwargs.get('nsteps', 500) self.beam = kwargs.get('beam', 10)
def __init__(self, model, **kwargs): super(LanguageModelTrainerTf, self).__init__() self.model = model self.loss = model.create_loss() self.test_loss = model.create_test_loss() if kwargs.get('eval_mode', False): # When reloaded a model creating the training op will break things. self.train_op = tf.no_op() else: self.global_step, self.train_op = optimizer(self.loss, **kwargs) self.nsteps = kwargs.get('nsteps', 500)
def __init__(self, model, **kwargs): super(Seq2SeqTrainerTf, self).__init__() self.sess = model.sess self.loss = model.create_loss() self.test_loss = model.create_test_loss() self.model = model self.tgt_rlut = kwargs['tgt_rlut'] self.base_dir = kwargs['basedir'] self.global_step, self.train_op = optimizer( self.loss, colocate_gradients_with_ops=True, **kwargs) self.nsteps = kwargs.get('nsteps', 500) self.beam = kwargs.get('beam', 10)
def __init__(self, model, **kwargs): super(ClassifyTrainerTf, self).__init__() self.sess = model.sess self.loss = model.create_loss() self.test_loss = model.create_test_loss() self.model = model self.global_step, train_op = optimizer(self.loss, colocate_gradients_with_ops=True, **kwargs) self.nsteps = kwargs.get('nsteps', six.MAXSIZE) decay = kwargs.get('ema_decay', None) if decay is not None: self.ema = True ema_op, self.ema_load, self.ema_restore = _add_ema(model, float(decay)) with tf.control_dependencies([ema_op]): self.train_op = tf.identity(train_op) else: self.ema = False self.train_op = train_op
def __init__(self, model, **kwargs): super(Seq2SeqTrainerTf, self).__init__() self.sess = model.sess if kwargs.get('eval_mode', False): # When reloading a model things like `decoder.preds` used in the loss are not present. # Trying to create the loss would break things. self.loss = tf.no_op() self.test_loss = tf.no_op() else: self.loss = model.create_loss() self.test_loss = model.create_test_loss() self.model = model self.tgt_rlut = kwargs['tgt_rlut'] self.base_dir = kwargs['basedir'] if kwargs.get('eval_mode', False): # When reloaded a model creating the training op will break things. self.train_op = tf.no_op() else: self.global_step, self.train_op = optimizer( self.loss, colocate_gradients_with_ops=True, **kwargs) self.nsteps = kwargs.get('nsteps', 500) self.beam = kwargs.get('beam', 10)