def _maximisation(self, x, indexes):
     centroids = x.new(self._n_c, x.size(-1))
     for i in range(self._n_c):
         lx = x[indexes == i]
         if (lx.shape[0] <= self._mec):
             lx = x[random.randint(0, len(x) - 1)].unsqueeze(0)
         centroids[i] = pa.barycenter(lx, normed=True)
     return centroids
 def update_mu(self, z, wik, lr_mu, tau_mu, g_index=-1, max_iter=50):
     N, D, M = z.shape + (wik.shape[-1], )
     # print(self._mu)
     if (g_index > 0):
         self._mu[g_index] = pa.barycenter(z,
                                           wik[:, g_index],
                                           lr_mu,
                                           tau_mu,
                                           max_iter=max_iter,
                                           normed=True).squeeze()
     else:
         self._mu = pa.barycenter(z.unsqueeze(1).expand(N, M, D),
                                  wik,
                                  lr_mu,
                                  tau_mu,
                                  max_iter=max_iter,
                                  normed=True).squeeze()
Esempio n. 3
0
def accuracy_supervised(z, y, mu, nb_set=5, verbose=True):
    n_example = len(z)
    n_distrib = len(mu)
    subset_index = torch.randperm(n_example)
    nb_value = n_example // nb_set
    I_CV = [
        subset_index[nb_value * i:min(nb_value * (i + 1), n_example)]
        for i in range(nb_set)
    ]
    # print(I_CV)
    acc_total = 0.
    for i, test_index in enumerate(I_CV):
        # create train dataset
        train_index = torch.cat(
            [subset for ci, subset in enumerate(I_CV) if (i != ci)], 0)
        Z_train = z[train_index]
        Y_train = torch.LongTensor([y[ic.item()] for ic in train_index])

        #create test datase
        Z_test = z[test_index]
        Y_test = torch.LongTensor([y[ic.item()] for ic in test_index])

        if (verbose):
            print("Set " + str(i) + " :")
            print("\t train size -> " + str(len(Z_train)))
            print("\t test size -> " + str(len(Z_test)))
            print("Obtaining centroids for each classes")

        from function_tools import poincare_alg as pa
        min_label = Y_train.min().item()
        max_label = Y_train.max().item()

        centroids = []
        for i in range(n_distrib):
            # print((Z_train[Y_train[:,0]== (min_label + i)]).size())
            centroids.append(
                pa.barycenter(Z_train[Y_train[:, 0] == (min_label + i)],
                              normed=False).tolist())

        centroids = torch.Tensor(centroids).squeeze()
        # predicting
        Z_test_reshape = Z_test.unsqueeze(1).expand(Z_test.size(0), n_distrib,
                                                    Z_test.size(-1))
        centroid_reshape = centroids.unsqueeze(0).expand_as(Z_test_reshape)

        d2 = poincare_function.distance(Z_test_reshape, centroid_reshape)**2

        predicted_labels = d2.min(-1)[1] + min_label

        acc = (predicted_labels == Y_test.squeeze()).float().mean()
        acc_total += acc.item()
        print(acc)
    return acc_total / (len(I_CV))