Пример #1
0
 def test_softmax0(self):
     # is this a correct test? Should the gradients always be this small?
     my_graph = ad.Softmax(self.my_w4)
     tf_graph = tf.nn.softmax(self.tf_w4)
     wrt_vars = [self.my_w4]
     tf_vars = [self.tf_w4]
     utils.custom_test(self, my_graph, wrt_vars, tf_graph, tf_vars)
Пример #2
0
 def sample_char(
         logits):  # sample a character from the multinomial distribution
     next_char_onehot = np.random.multinomial(
         n=1, pvals=ad.Softmax(logits / temperature)()[0])
     next_char = text_loader.ind_to_char[np.argmax(next_char_onehot)]
     next_char_onehot = np.expand_dims(next_char_onehot, axis=0)
     return next_char_onehot, next_char
Пример #3
0
 def test_softmax2(self):
     my_graph = ad.Softmax(self.my_w6)
     tf_graph = tf.nn.softmax(self.tf_w6)
     wrt_vars = [self.my_w6]
     tf_vars = [self.tf_w6]
     utils.custom_test(self, my_graph, wrt_vars, tf_graph, tf_vars)
Пример #4
0
def network_output(x):
    probs = ad.Softmax(nn(x))
    return probs()