Example #1
0
 def forward(self, input):
     b, c, h, w = input.size()
     F = input.view(b, c, h * w)
     mean_ = F.mean(dim=2, keepdim=True).detach()
     mean = torch.cat(h * w * [mean_], 2)
     F = F - mean.detach()
     G = torch.bmm(F, F.transpose(1, 2))
     G.div_(h * w)
     return G.squeeze(0).data, mean_.squeeze().data
Example #2
0
def correlation_matrix(F: np.ndarray,
                       a_min: float = -1.,
                       a_max: float = 1.) -> np.ndarray:
    F_c = F - F.mean(axis=1)[:, np.newaxis]
    cov = F_c @ F_c.T
    l2_norms = np.linalg.norm(F_c, axis=1)  #compute l2-norm across rows
    denom = np.outer(l2_norms, l2_norms)
    corr_mat = (cov / denom).clip(
        min=a_min, max=a_max)  #counteract potential rounding errors
    return corr_mat
Example #3
0
def correlation_matrix(F: np.ndarray,
                       a_min: float = -1.,
                       a_max: float = 1.) -> np.ndarray:
    """Compute dissimilarity matrix based on correlation distance (on the matrix-level)."""
    F_c = F - F.mean(axis=1)[:, np.newaxis]
    cov = F_c @ F_c.T
    # compute vector l2-norm across rows
    l2_norms = np.linalg.norm(F_c, axis=1)
    denom = np.outer(l2_norms, l2_norms)
    corr_mat = (cov / denom).clip(min=a_min, max=a_max)
    return corr_mat
def loss_dcgan_gen(dis_fake):
    loss = F.mean(F.softplus(-dis_fake))
    return loss
Example #5
0
def bar_entropy(data):
    return F.mean(tilde_entropy(data))