예제 #1
0
 def test_adam(self):
     """Test creating an Adam optimizer."""
     opt = optimizers.Adam(learning_rate=0.01)
     with self.test_session() as sess:
         global_step = tf.Variable(0)
         tfopt = opt._create_optimizer(global_step)
         assert isinstance(tfopt, tf.train.AdamOptimizer)
예제 #2
0
 def test_linearCosine_decay(self):
   """test creating an optimizer with a linear cosine decay to the learning rate"""
   rate = optimizers.LinearCosineDecay(initial_rate=0.1, decay_steps=10000)
   opt = optimizers.Adam(learning_rate=rate)
   with self.session() as sess:
     global_step = tf.Variable(0)
     tfopt = opt._create_optimizer(global_step)
 def test_polynomial_decay(self):
   """Test creating an optimizer with a polynomially decaying learning rate."""
   rate = optimizers.PolynomialDecay(
       initial_rate=0.001, final_rate=0.0001, decay_steps=10000)
   opt = optimizers.Adam(learning_rate=rate)
   with self.test_session() as sess:
     global_step = tf.Variable(0)
     tfopt = opt._create_optimizer(global_step)
예제 #4
0
 def test_exponential_decay(self):
   """Test creating an optimizer with an exponentially decaying learning rate."""
   rate = optimizers.ExponentialDecay(
       initial_rate=0.001, decay_rate=0.99, decay_steps=10000)
   opt = optimizers.Adam(learning_rate=rate)
   with self.session() as sess:
     global_step = tf.Variable(0)
     tfopt = opt._create_optimizer(global_step)
def train_seqtoseq(train_data, embedding_dimension, tokens, max_length, encoder_layers=1, 
                 decoder_layers=1, dropout=0.1, tb_folder='fingerprint', 
                 batch_size=32, n_epochs=100, steps_per_epoch=None):
        train_generator = generate_sequences(train_data, n_epochs)
        model = dc.models.SeqToSeq(tokens, tokens, max_length,
                                    encoder_layers=encoder_layers,
                                    decoder_layers=decoder_layers,
                                    embedding_dimension=embedding_dimension,
                                    batch_size=batch_size,
                                    verbose=True,
                                    tensorboard=True, 
                                    tensorboard_log_frequency=1,
                                    model_dir=tb_folder)
        if steps_per_epoch is None:
            steps_per_epoch = min(len(train_data), THRESHOLD_SIZE)/model.batch_size 

        model.set_optimizer(
            dcopt.Adam(learning_rate=dcopt.ExponentialDecay(0.001, 0.95, steps_per_epoch)))
        model.fit_sequences(train_generator, checkpoint_interval=steps_per_epoch)
        return model