Exemplo n.º 1
0
  def __init__(self, name, inputs, targets, n_classes, tower_setup, global_average_pooling=False, dropout=0.0,
               loss="ce", l2=L2_DEFAULT):
    super(Softmax, self).__init__()
    self.measures = {}
    if global_average_pooling:
      inp, n_features_inp = prepare_input(inputs)
      inp = global_avg_pool(inp)
    else:
      inp, n_features_inp = prepare_collapsed_input_and_dropout(inputs, dropout)

    with tf.variable_scope(name):
      W = self.create_weight_variable("W", [n_features_inp, n_classes], l2, tower_setup)
      b = self.create_bias_variable("b", [n_classes], tower_setup)
      y_ref = tf.cast(targets, tf.int64)
      y_pred = tf.matmul(inp, W) + b
      self.outputs = [tf.nn.softmax(y_pred, -1, 'softmax')]
      errors = tf.not_equal(tf.argmax(y_pred, 1), y_ref)
      errors = tf.reduce_sum(tf.cast(errors, tower_setup.dtype))
      self.measures['errors'] = errors

      if loss == "ce":
        cross_entropy_per_example = tf.nn.sparse_softmax_cross_entropy_with_logits(
          logits=y_pred, labels=y_ref, name='cross_entropy_per_example')
        self.loss = tf.reduce_sum(cross_entropy_per_example, name='cross_entropy_sum')
      else:
        assert False, "Unknown loss " + loss

      self.add_scalar_summary(self.loss, "loss")
Exemplo n.º 2
0
 def __init__(self, name, inputs, tower_setup, activation="relu", batch_norm_decay=BATCH_NORM_DECAY_DEFAULT):
     super(Collapse, self).__init__()
     curr, n_features_inp = prepare_input(inputs)
     with tf.variable_scope(name):
         inp = self.create_and_apply_batch_norm(curr, n_features_inp, batch_norm_decay, tower_setup)
         h_act = get_activation(activation)(inp)
         out = global_avg_pool(h_act)
     self.outputs = [out]