Exemplo n.º 1
0
def _dualloss(logits, mid_logits, labels, class_hist, num_labels, is_training=True):
  #loss1 = losses.cross_entropy_loss(logits, labels, weights, num_labels)
  #loss2 = losses.cross_entropy_loss(mid_logits, labels, weights, num_labels)
  #max_weight = 10
  max_weight = 1
  loss1 = losses.weighted_cross_entropy_loss(logits, labels, class_hist,
                                             max_weight=max_weight)
  loss2 = losses.weighted_cross_entropy_loss(mid_logits, labels, class_hist,
                                             max_weight=max_weight)
  #loss1 = losses.weighted_cross_entropy_loss_dense(logits, labels, weights, num_labels,
  #    max_weight=max_weight)
  #loss2 = losses.weighted_cross_entropy_loss_dense(mid_logits, labels, weights, num_labels,
  #    max_weight=max_weight)
  #wgt = 0.4
  #xent_loss = loss1 + wgt * loss2
  wgt = 0.3 # best
  #wgt = 0.2
  #wgt = 0.4
  xent_loss = (1-wgt)*loss1 + wgt*loss2
  all_losses = [xent_loss]

  # get losses + regularization
  total_loss = losses.total_loss_sum(all_losses)

  if is_training:
    loss_averages_op = losses.add_loss_summaries(total_loss)
    with tf.control_dependencies([loss_averages_op]):
      total_loss = tf.identity(total_loss)

  return total_loss
Exemplo n.º 2
0
def _multiloss(logits, aux_logits, labels, num_labels, class_hist, is_training):
  max_weight = FLAGS.max_weight
  xent_loss = 0
  #main_wgt = 0.6
  if len(aux_logits) > 0:
    main_wgt = 0.7
    aux_wgt = (1 - main_wgt) / len(aux_logits)
  else:
    main_wgt = 1.0
    aux_wgt = 0
  xent_loss = main_wgt * losses.weighted_cross_entropy_loss(
      logits, labels, class_hist, max_weight=max_weight)
  for i, l in enumerate(aux_logits):
    print('loss' + str(i), ' --> ' , l)
    xent_loss += aux_wgt * losses.weighted_cross_entropy_loss(
      l, labels, class_hist, max_weight=max_weight)

  all_losses = [xent_loss]
  # get losses + regularization
  total_loss = losses.total_loss_sum(all_losses)
  if is_training:
    loss_averages_op = losses.add_loss_summaries(total_loss)
    with tf.control_dependencies([loss_averages_op]):
      total_loss = tf.identity(total_loss)
  return total_loss
Exemplo n.º 3
0
def inference(image, labels=None, constant_shape=True, is_training=False):
  global known_shape
  known_shape = constant_shape
  x = normalize_input(image)
  logits, aux_logits = _build(x, is_training=is_training)
  if labels:
    main_wgt = 0.7
    xent_loss = main_wgt * losses.weighted_cross_entropy_loss(logits, labels)
    xent_loss = (1-main_wgt) * losses.weighted_cross_entropy_loss(aux_logits, labels)
    return logits, aux_logits, xent_loss
  return logits, aux_logits
def get_loss(logits, labels, weights, is_training):
    #xent_loss = tf.reduce_mean(tf.nn.sparse_softmax_cross_entropy_with_logits(logits, labels))
    xent_loss = losses.weighted_cross_entropy_loss(logits, labels, weights)
    total_loss = total_loss_sum([xent_loss])
    if is_training:
        loss_averages_op = losses.add_loss_summaries(total_loss)
        with tf.control_dependencies([loss_averages_op]):
            total_loss = tf.identity(total_loss)

    return total_loss
Exemplo n.º 5
0
    def train_step(x, y_true_labels):
        print('Tracing training step...')

        # Forward pass
        with tf.GradientTape() as tape:
            y_pred_logits = network(
                x)  # (batch_size, img_h, img_w, num_classes)
            loss = weighted_cross_entropy_loss(y_true_labels, y_pred_logits,
                                               dataset.class_weights)

        # Backward pass
        grads = tape.gradient(loss, network.trainable_variables)
        opt.apply_gradients(zip(grads, network.trainable_variables))

        return loss