Exemple #1
0
    def __init__(self, freq, activation, input, target_idx, task_loss, surrogate_loss,
                 hyperparameter, learning_rate, batch_generator, n_batches,
                 factor=1.5, n_updates=10):
        Extension.__init__(self, 'adapt_zloss', freq)

        self.batch_generator = batch_generator
        self.n_batches = n_batches
        self.learning_rate = learning_rate
        self.hyperparameter = hyperparameter
        self.factor = factor
        self.n_updates = n_updates

        # grad = theano.grad(surrogate_loss, activation)
        # new_activation = activation - learning_rate * grad
        self.fun_activation = theano.function([input], activation)

        activation_bis = tensor.matrix()
        surr_loss_bis = theano.clone(surrogate_loss,
                                     replace={activation: activation_bis})
        grad = theano.grad(surr_loss_bis, activation_bis)
        new_activation = activation_bis - 100*learning_rate * grad

        task_loss_bis = theano.clone(task_loss,
                                     replace={activation: new_activation})

        self.fun_update_task_loss = theano.function(
                [activation_bis, target_idx], [task_loss_bis, new_activation])
Exemple #2
0
 def __init__(self, freq, words, embedding_matrix, knn, vocab, inv_vocab):
     Extension.__init__(self, 'Nearest neighbours of words', freq)
     self.words = words
     self.embedding_matrix = embedding_matrix
     self.knn = knn
     self.vocab = vocab
     self.inv_vocab = inv_vocab
     self.word_ids = []
     for word in words:
         self.word_ids.append(self.vocab[word])
Exemple #3
0
 def __init__(self, freq, words, embedding_matrix, knn, vocab, inv_vocab):
     Extension.__init__(self, 'Nearest neighbours of words', freq)
     self.words = words
     self.embedding_matrix = embedding_matrix
     self.knn = knn
     self.vocab = vocab
     self.inv_vocab = inv_vocab
     self.word_ids = []
     for word in words:
         self.word_ids.append(self.vocab[word])
    def __init__(self,
                 freq,
                 activation,
                 input,
                 target_idx,
                 task_loss,
                 surrogate_loss,
                 hyperparameter,
                 learning_rate,
                 batch_generator,
                 n_batches,
                 factor=1.5,
                 n_updates=10):
        Extension.__init__(self, 'adapt_zloss', freq)

        self.batch_generator = batch_generator
        self.n_batches = n_batches
        self.learning_rate = learning_rate
        self.hyperparameter = hyperparameter
        self.factor = factor
        self.n_updates = n_updates

        # grad = theano.grad(surrogate_loss, activation)
        # new_activation = activation - learning_rate * grad
        self.fun_activation = theano.function([input], activation)

        activation_bis = tensor.matrix()
        surr_loss_bis = theano.clone(surrogate_loss,
                                     replace={activation: activation_bis})
        grad = theano.grad(surr_loss_bis, activation_bis)
        new_activation = activation_bis - 100 * learning_rate * grad

        task_loss_bis = theano.clone(task_loss,
                                     replace={activation: new_activation})

        self.fun_update_task_loss = theano.function(
            [activation_bis, target_idx], [task_loss_bis, new_activation])