Esempio n. 1
0
def solve(global_step):
    """add solver to losses"""
    # learning reate
    lr = _configure_learning_rate(82783, global_step)
    optimizer = _configure_optimizer(lr)
    tf.summary.scalar('learning_rate', lr)

    # compute and apply gradient
    losses = tf.get_collection(tf.GraphKeys.LOSSES)
    regular_losses = tf.get_collection(tf.GraphKeys.REGULARIZATION_LOSSES)
    regular_loss = tf.add_n(regular_losses)
    out_loss = tf.add_n(losses)
    total_loss = tf.add_n(losses + regular_losses)

    tf.summary.scalar('total_loss', total_loss)
    tf.summary.scalar('out_loss', out_loss)
    tf.summary.scalar('regular_loss', regular_loss)
    ### add the center loss into the summary

    update_ops = []
    variables_to_train = _get_variables_to_train()
    # update_op = optimizer.minimize(total_loss)
    gradients = optimizer.compute_gradients(total_loss,
                                            var_list=variables_to_train)
    grad_updates = optimizer.apply_gradients(gradients,
                                             global_step=global_step)
    update_ops.append(grad_updates)

    # update moving mean and variance
    if FLAGS.update_bn:
        update_bns = tf.get_collection(tf.GraphKeys.UPDATE_OPS)
        update_bn = tf.group(*update_bns)
        update_ops.append(update_bn)

    return tf.group(*update_ops)
Esempio n. 2
0
def solve(global_step):
    """add solver to losses"""
    # learning reate
    lr = _configure_learning_rate(82783, global_step)
    optimizer = _configure_optimizer(lr)
    tf.summary.scalar('learning_rate', lr)

    # compute and apply gradient
    losses = tf.get_collection(tf.GraphKeys.LOSSES)
    regular_losses = tf.get_collection(tf.GraphKeys.REGULARIZATION_LOSSES)
    regular_loss = tf.add_n(regular_losses)
    out_loss = tf.add_n(losses)
    total_loss = tf.add_n(losses + regular_losses)

    tf.summary.scalar('total_loss', total_loss)
    tf.summary.scalar('out_loss', out_loss)
    tf.summary.scalar('regular_loss', regular_loss)

    update_ops = []
    variables_to_train = _get_variables_to_train()
    # update_op = optimizer.minimize(total_loss)
    gradients = optimizer.compute_gradients(total_loss, var_list=variables_to_train)
    grad_updates = optimizer.apply_gradients(gradients, 
            global_step=global_step)
    update_ops.append(grad_updates)
    
    # update moving mean and variance
    if FLAGS.update_bn:
        update_bns = tf.get_collection(tf.GraphKeys.UPDATE_OPS)
        update_bn = tf.group(*update_bns)
        update_ops.append(update_bn)

    return tf.group(*update_ops)
Esempio n. 3
0
        ## optimization
        learning_rate = _configure_learning_rate(82783, global_step)
        optimizer = _configure_optimizer(learning_rate)
        summaries.add(tf.summary.scalar('learning_rate', learning_rate))
        for loss in tf.get_collection(tf.GraphKeys.LOSSES):
            summaries.add(tf.summary.scalar('losses/%s' % loss.op.name, loss))

        loss = tf.get_collection(tf.GraphKeys.LOSSES)
        regular_loss = tf.get_collection(tf.GraphKeys.REGULARIZATION_LOSSES)
        total_loss = tf.add_n(loss + regular_loss)
        reg_loss = tf.add_n(regular_loss)
        summaries.add(tf.summary.scalar('total_loss', total_loss))
        summaries.add(tf.summary.scalar('regular_loss', reg_loss))

        variables_to_train = _get_variables_to_train()
        update_op = optimizer.minimize(total_loss)
        # gradients = optimizer.compute_gradients(total_loss, var_list=variables_to_train)
        # grad_updates = optimizer.apply_gradients(gradients,
        #                                          global_step=global_step)
        # update_op = tf.group(grad_updates)

        # summary_op = tf.summary.merge(list(summaries), name='summary_op')
        summary_op = tf.summary.merge_all()
        logdir = os.path.join(FLAGS.train_dir,
                              strftime('%Y%m%d%H%M%S', gmtime()))
        if not os.path.exists(logdir):
            os.makedirs(logdir)
        summary_writer = tf.summary.FileWriter(logdir, graph=sess.graph)

        init_op = tf.group(tf.global_variables_initializer(),