def build_graph(self): self.train_inputs = tf.placeholder(tf.int32, shape=[self.batch_size]) self.train_labels = tf.placeholder(tf.int32, shape=[self.batch_size]) self.learning_rate = tf.placeholder(tf.float32) # variables embeddings = tf.Variable( tf.random_uniform([self.vocabulary_size, self.embedding_size], -1.0, 1.0)) softmax_weights = tf.Variable( tf.truncated_normal([self.embedding_size, self.vocabulary_size], stddev=1.0 / math.sqrt(self.embedding_size))) softmax_biases = tf.Variable(tf.zeros([self.vocabulary_size])) self.gathered = tf.gather(embeddings, self.train_inputs) prediction = tf.matmul(self.gathered, softmax_weights) + softmax_biases self.loss = tf.reduce_mean( tf.nn.sparse_softmax_cross_entropy_with_logits( labels=self.train_labels, logits=prediction)) self.optimizer = tf.train.GradientDescentOptimizer( self.learning_rate).minimize(self.loss) self.sess = tf.Session() self.sess.run(tf.global_variables_initializer()) self.saver = tf.train.Saver()
def make_node(self): return gof.Apply( self, [], [ theano.Variable(Generic()), tensor(self.dtype, broadcastable=self.broadcastable), ], )
def make_node(self, request, data): return gof.Apply(self, [request, data], [theano.Variable(Generic())])
def make_node(self, data): return gof.Apply(self, [data], [theano.Variable(Generic()), data.type()])
def test_get_output_input_is_variable(self, layer): variable = theano.Variable("myvariable") assert layer.get_output(variable) is variable
def test_get_output_input_is_variable(self, layer, get_output): variable = theano.Variable("myvariable") assert get_output(layer, variable) is variable
def test_get_output_input_is_variable(self, layer, get_outputs): variable = theano.Variable("myvariable") assert get_outputs(layer, variable) == (variable, )