示例#1
0
    def __call__(self, prediction, target_is_real, is_updating_D=None):
        """Calculate loss given Discriminator's output and grount truth labels.

        Parameters:
            prediction (tensor) - - tpyically the prediction output from a discriminator
            target_is_real (bool) - - if the ground truth label is for real images or fake images
            is_updating_D (bool)  - - if we are in updating D step or not 

        Returns:
            the calculated loss.
        """
        if self.gan_mode in ['lsgan', 'vanilla']:
            target_tensor = self.get_target_tensor(prediction, target_is_real)
            loss = self.loss(prediction, target_tensor)
        elif self.gan_mode.find('wgan') != -1:
            if target_is_real:
                loss = -prediction.mean()
            else:
                loss = prediction.mean()
        elif self.gan_mode == 'hinge':
            if target_is_real:
                loss = F.relu(1 - prediction) if is_updating_D else -prediction
            else:
                loss = F.relu(1 + prediction) if is_updating_D else prediction
            loss = loss.mean()
        elif self.gan_mode == 'logistic':
            if target_is_real:
                loss = F.softplus(-prediction).mean()
            else:
                loss = F.softplus(prediction).mean()
        return loss
示例#2
0
def get_positive_expectation(p_samples, measure, average=True):
    """Get the expectation from positive samples for given measurement."""
    if measure == 'GAN':
        Ep = -F.softplus(-p_samples)
    elif measure == 'JSD':
        Ep = np.log(2.0) - F.softplus(-p_samples)
    elif measure == 'X2':
        Ep = p_samples * p_samples
    elif measure == 'KL':
        Ep = p_samples + 1.
    elif measure == 'RKL':
        Ep = -paddle.exp(-p_samples)
    elif measure == 'DV':
        Ep = p_samples
    elif measure == 'H2':
        Ep = 1. - paddle.exp(-p_samples)
    elif measure == 'W1':
        Ep = p_samples
    else:
        raise ValueError

    if average:
        return paddle.sum(Ep)
    else:
        return Ep
示例#3
0
    def forward(self, embedding, targets):
        if isinstance(embedding, dict):
            embedding = embedding['features']
        # Normalize embedding features
        embedding = F.normalize(embedding, axis=1)
        dist_mat = paddle.matmul(embedding, embedding, transpose_y=True)

        N = dist_mat.shape[0]
        is_pos = targets.reshape([N, 1]).expand([N, N]).equal(
            paddle.t(targets.reshape([N, 1]).expand([N, N]))).astype('float')
        is_neg = targets.reshape([N, 1]).expand([N, N]).not_equal(
            paddle.t(targets.reshape([N, 1]).expand([N, N]))).astype('float')

        # Mask scores related to itself
        is_pos = is_pos - paddle.eye(N, N)

        s_p = dist_mat * is_pos
        s_n = dist_mat * is_neg

        logit_p = -self.gamma * s_p + (-99999999.) * (1 - is_pos)
        logit_n = self.gamma * (s_n + self.margin) + (-99999999.) * (1 -
                                                                     is_neg)

        loss = F.softplus(
            paddle.logsumexp(logit_p, axis=1) +
            paddle.logsumexp(logit_n, axis=1)).mean()

        return {"PairwiseCosface": loss}
示例#4
0
    def _forward_log_det_jacobian(self, x):
        """We implicitly rely on _forward_log_det_jacobian rather than 
        explicitly implement ``_inverse_log_det_jacobian`` since directly using 
        ``-tf.math.log1p(-tf.square(y))`` has lower numerical precision.

        See details: https://github.com/tensorflow/probability/blob/master/tensorflow_probability/python/bijectors/tanh.py#L69-L80
        """
        return 2. * (math.log(2.) - x - F.softplus(-2. * x))
示例#5
0
def get_negative_expectation(q_samples, measure, average=True):
    """Get the expectation from negative samples fro given measurement."""
    if measure == 'GAN':
        Eq = F.softplus(-q_samples) + q_samples
    elif measure == 'JSD':
        Eq = F.softplus(-q_samples) + q_samples - np.log(2.)
    elif measure == 'X2':
        tmp = paddle.sqrt(q_samples * q_samples) + 1.
        Eq = -0.5 * (tmp * tmp)
    elif measure == 'KL':
        Eq = paddle.exp(q_samples)
    elif measure == 'RKL':
        Eq = q_samples - 1.
    elif measure == 'H2':
        Eq = paddle.exp(q_samples) - 1.
    elif measure == 'W1':
        Eq = q_samples
    else:
        raise ValueError

    if average:
        return paddle.sum(Eq)
    else:
        return Eq
示例#6
0
    def forward(self, pred, label):

        label = label.reshape(pred.shape)
        sample_weight = label != self._ignore_label
        label = paddle.where(sample_weight, label, paddle.zeros_like(label))

        if not self._from_sigmoid:
            loss = F.relu(pred) - pred * label + F.softplus(-paddle.abs(pred))
        else:
            eps = 1e-12
            loss = -(paddle.log(pred + eps) * label +
                     paddle.log(1. - pred + eps) * (1. - label))
        loss = self._weight * (loss * sample_weight)
        return paddle.mean(loss,
                           axis=misc.get_dims_with_exclusion(
                               len(loss.shape), self._batch_axis))
示例#7
0
def mish(x):
    return x * F.tanh(F.softplus(x))
示例#8
0
 def _forward_log_det_jacobian(self, x):
     return -F.softplus(-x) - F.softplus(x)
示例#9
0
 def __call__(self, x):
     return x * paddle.tanh(F.softplus(x))