Exemplo n.º 1
0
    def bernoulli_loss_with_logits(self, x_gt, y_target):

        x_f = tfutils.flatten(x_gt)
        y_f = tfutils.flatten(y_target)

        return tf.reduce_mean(
            tf.reduce_sum(tf.nn.sigmoid_cross_entropy_with_logits(labels=x_f,
                                                                  logits=y_f),
                          axis=1))
Exemplo n.º 2
0
    def bernoulli_loss(self, x_gt, y_target):

        x_f = tfutils.flatten(x_gt)
        y_f = tfutils.flatten(y_target)

        return -tf.reduce_mean(
            tf.reduce_sum(x_f * tf.log(1e-10 + y_f) +
                          (1 - x_f) * tf.log(1e-10 + 1 - y_f),
                          axis=1))
Exemplo n.º 3
0
    def gaussian_loss(self, x_gt, y_target, sigma_r):

        x_f = tfutils.flatten(x_gt)
        y_f = tfutils.flatten(y_target)
        sigma_r_f = tfutils.flatten(
            sigma_r
        )  # we assume implicitly that network predicts sigma squared (for numerical stability)
        # Thus **2 are missing in the equation below

        return tf.reduce_mean(
            tf.reduce_sum(0.5 * tf.log(2 * np.pi * (sigma_r_f)) +
                          tf.divide(tf.square((x_f - y_f)), 2 * (sigma_r_f)),
                          axis=1))
Exemplo n.º 4
0
    def KL_two_gauss_with_diag_cov(self, mu0, sigma0, mu1, sigma1):

        sigma0_fs = tf.square(tfutils.flatten(sigma0))
        sigma1_fs = tf.square(tfutils.flatten(sigma1))

        logsigma0_fs = tf.log(sigma0_fs + 1e-10)
        logsigma1_fs = tf.log(sigma1_fs + 1e-10)

        mu0_f = tfutils.flatten(mu0)
        mu1_f = tfutils.flatten(mu1)

        return tf.reduce_mean(0.5 * tf.reduce_sum(
            tf.divide(sigma0_fs + tf.square(mu1_f - mu0_f),
                      sigma1_fs + 1e-10) + logsigma1_fs - logsigma0_fs - 1,
            axis=1))
Exemplo n.º 5
0
def dense_layer(bottom,
                name,
                hidden_units=512,
                activation=tf.nn.relu,
                weight_init='he_normal',
                add_bias=True):
    '''
    Dense a.k.a. fully connected layer
    '''

    bottom_flat = utils.flatten(bottom)
    bottom_rhs_dim = utils.get_rhs_dim(bottom_flat)

    weight_shape = [bottom_rhs_dim, hidden_units]
    bias_shape = [hidden_units]

    with tf.variable_scope(name):

        weights = get_weight_variable(weight_shape,
                                      name='W',
                                      type=weight_init,
                                      regularize=True)

        op = tf.matmul(bottom_flat, weights)

        biases = None
        if add_bias:
            biases = get_bias_variable(bias_shape, name='b')
            op = tf.nn.bias_add(op, biases)
        op = activation(op)

        # Add Tensorboard summaries
        _add_summaries(op, weights, biases)

        return op
def dense_layer(bottom,
                name,
                hidden_units=512,
                activation=STANDARD_NONLINEARITY,
                normalisation=tfnorm.batch_norm,
                normalise_post_activation=False,
                dropout_p=None,
                weight_init='he_normal',
                add_bias=True,
                **kwargs):
    '''
    Dense a.k.a. fully connected layer
    '''

    bottom_flat = utils.flatten(bottom)
    bottom_rhs_dim = utils.get_rhs_dim(bottom_flat)

    weight_shape = [bottom_rhs_dim, hidden_units]
    bias_shape = [hidden_units]

    with tf.variable_scope(name):

        weights = utils.get_weight_variable(weight_shape,
                                            name='W',
                                            type=weight_init,
                                            regularize=True)

        op = tf.matmul(bottom_flat, weights)

        biases = None
        if add_bias:
            biases = utils.get_bias_variable(bias_shape, name='b')
            op = tf.nn.bias_add(op, biases)

        if not normalise_post_activation:
            op = activation(normalisation(op, **kwargs))
        else:
            op = normalisation(activation(op), **kwargs)

        if dropout_p is not None:
            op = dropout(op, keep_prob=dropout_p, **kwargs)

        # Add Tensorboard summaries
        _add_summaries(op, weights, biases)

        return op
Exemplo n.º 7
0
def dense_layer(bottom,
                name,
                hidden_units=512,
                activation=tf.nn.relu,
                weight_init='he_normal'):
    '''
    Dense a.k.a. fully connected layer
    '''

    bottom_flat = utils.flatten(bottom)
    bottom_rhs_dim = utils.get_rhs_dim(bottom_flat)

    weight_shape = [bottom_rhs_dim, hidden_units]
    bias_shape = [hidden_units]

    with tf.name_scope(name):

        if weight_init == 'he_normal':
            N = bottom_rhs_dim
            weights = _weight_variable_he_normal(weight_shape,
                                                 N,
                                                 name=name + '_w')
        elif weight_init == 'simple':
            weights = _weight_variable_simple(weight_shape, name=name + '_w')
        else:
            raise ValueError('Unknown weight initialisation method %s' %
                             weight_init)

        biases = _bias_variable(bias_shape, name=name + '_b')

        op = tf.matmul(bottom_flat, weights)
        op = tf.nn.bias_add(op, biases)
        op = activation(op)

        # Tensorboard variables
        tf.summary.histogram(weights.name, weights)
        tf.summary.histogram(biases.name, biases)
        tf.summary.histogram(op.op.name + '/activations', op)

        return op