Пример #1
0
 def _get_b(self):
     common.print_func_name(self._get_b)
     layer = self.weighted_layers[0]
     print(self.sess.run(layer.b))
Пример #2
0
 def _init_weight_values(n_inputs, n_outputs, shape):
     common.print_func_name(Relu._init_weight_values)
     print(shape)
     w_values = tf.truncated_normal(shape=shape, stddev=1. / math.sqrt(float(shape[0])))
     return w_values
Пример #3
0
 def _get_b(self):
     common.print_func_name(self._get_b)
     layer = self.weighted_layers[0]
     print(self.sess.run(layer.b))