Example #1
0
def compute_wc_offline(dataset,
                       data_loader,
                       batch_size,
                       method_args,
                       name='',
                       verfication=False):
    print('Compute Witness Complex Pairings {name}'.format(name=name))

    dist_X_all = torch.ones((len(data_loader), batch_size, batch_size))
    pair_mask_X_all = torch.ones((len(data_loader), batch_size, batch_size))

    for batch, (img, label) in enumerate(data_loader):
        witness_complex = WitnessComplex(img, dataset[:][:][0])

        if method_args['n_jobs'] > 1:
            witness_complex.compute_simplicial_complex_parallel(
                d_max=1,
                r_max=method_args['r_max'],
                create_simplex_tree=False,
                create_metric=True,
                n_jobs=method_args['n_jobs'])
        else:
            witness_complex.compute_simplicial_complex(
                d_max=1,
                r_max=method_args['r_max'],
                create_simplex_tree=False,
                create_metric=True)

        if witness_complex.check_distance_matrix:
            pass
        else:
            print('WARNING: choose higher r_max')
        landmarks_dist = torch.tensor(witness_complex.landmarks_dist)
        sorted, indices = torch.sort(landmarks_dist)
        kNN_mask = torch.zeros(
            (batch_size, batch_size),
            device='cpu').scatter(1, indices[:, 1:(method_args['k'] + 1)], 1)
        dist_X_all[batch, :, :] = landmarks_dist
        pair_mask_X_all[batch, :, :] = kNN_mask

        if method_args['match_edges'] == 'verification' and verfication:
            ind_X = np.where(pair_mask_X_all[batch, :, :] == 1)
            ind_X = np.column_stack((ind_X[0], ind_X[1]))

            make_plot(img, ind_X, label, 'name', path_root=None, knn=False)

    return dist_X_all, pair_mask_X_all
Example #2
0
        ys = []
        for i, (X_batch, label_batch) in enumerate(dataloader):
            if i == 0:
                X0 = X_batch
            Xs.append(X_batch)
            ys.append(label_batch)

        data = torch.cat(Xs, dim=0)
        labels = torch.cat(ys, dim=0)

        model = UMAP(n_neighbors=NN, min_dist=m_dist)
        data_l, labels_l = model.get_latent_train(data.numpy(), labels.numpy())
        ind_plot = random.sample(range(data.shape[0]), 15000)
        make_plot(data_l[ind_plot, :],
                  None,
                  labels_l[ind_plot].astype(int).astype(str),
                  True,
                  path_to_save=os.path.join(root_to_save, 'all_base.pdf'))
        print(torch.all(torch.from_numpy(labels_l).eq(labels)))

        ks = [1, 2, 3, 4]
        for batch_i, (X_batch, label_batch) in enumerate(dataloader):
            if batch_i == 7 or batch_i == 33 or batch_i == 66:
                print('compute')
                try:
                    os.mkdir(os.path.join(root_to_save, '{}'.format(batch_i)))
                except:
                    pass
                #X,y = model.get_latent_test(X_batch.numpy(), label_batch.numpy())
                X, y = data_l[(bs * batch_i):bs *
                              (batch_i + 1), :], labels_l[(bs * batch_i):bs *
        witnesses_tensor = torch.from_numpy(witnesses)
        landmarks_tensor = torch.from_numpy(landmarks)

        witness_complex = WitnessComplex(landmarks_tensor, witnesses_tensor)
        witness_complex.compute_simplicial_complex(d_max=1,
                                                   r_max=10,
                                                   create_simplex_tree=False,
                                                   create_metric=True)

        for k in ks:

            print('{} out of {}'.format(counter, ntot))

            landmarks_dist = torch.tensor(witness_complex.landmarks_dist)
            sorted, indices = torch.sort(landmarks_dist)
            kNN_mask = torch.zeros((n_samples, n_samples), device='cpu').scatter(1, indices[:, 1:(k+1)], 1)
            pairings_i = np.where(kNN_mask.numpy() == 1)
            pairings = np.column_stack((pairings_i[0], pairings_i[1]))

            name = 'wc{nw}_k{k}_seed{seed}'.format(nw = N_WITNESSES,k = k, seed = seed)

            make_plot(landmarks, pairings, color,name = name, path_root = path_to_save, knn = False, show = True, dpi = 400, cmap = plt.cm.viridis)


            counter += 1





Example #4
0
        for seed in [30]:
            print('{} out of {}'.format(progress_count, tot_count))
            progress_count += 1
            name = 'vr_ns{}_seed{}'.format(n_samples, seed)

            data, color = dataset_sampler.sample(n_samples, seed=seed)
            data, pairings, color = make_data(data, color, name=name)

            # path_pairings = '{}pairings_{}.npy'.format(PATH_ROOT_SWISSROLL, name)
            # path_data = '{}data_{}.npy'.format(PATH_ROOT_SWISSROLL, name)
            # path_color = '{}color_{}.npy'.format(PATH_ROOT_SWISSROLL, name)
            # pairings, data, color = np.load(path_pairings), np.load(path_data), np.load(path_color)
            #
            make_plot(data,
                      pairings,
                      color,
                      name=name,
                      path_root=path_to_save,
                      cmap=plt.cm.viridis)

    # name = '512_1'
    # data, color = dataset_sampler.sample(512)
    # make_data(data, color, name = name)
    #
    # path_pairings = '{}pairings_{}.npy'.format(PATH_ROOT, name)
    # path_data = '{}data_{}.npy'.format(PATH_ROOT, name)
    # path_color = '{}color_{}.npy'.format(PATH_ROOT, name)
    # pairings, data, color = np.load(path_pairings), np.load(path_data), np.load(path_color)
    #
    # #
    # make_plot(data, pairings, color, name = name)
    #