from scripts.ssc.wc_offline.config_libraries.global_register_definitions import \ ( PATH_GR_SWISSROLL_EULER, PATH_GR_SWISSROLL_NOISE_EULER) from src.data_preprocessing.witness_complex_offline.config import ConfigWC_Grid from src.datasets.datasets import SwissRoll swissroll_nonoise = ConfigWC_Grid( dataset=[SwissRoll()], sampling_kwargs=[dict(n_samples=2560)], batch_size=[64, 128, 256, 512], wc_kwargs=[dict()], eval_size=[0.2], n_jobs=[2], seed=[36, 3851, 2570, 4304, 1935, 7954, 5095, 5310, 1577, 3288], global_register=PATH_GR_SWISSROLL_EULER, root_path= '/cluster/home/schsimo/MT/output/WitnessComplexes/SwissRoll/nonoise', verbose=True) swissroll_noise005 = ConfigWC_Grid( dataset=[SwissRoll()], sampling_kwargs=[dict(n_samples=2560, noise=0.05)], batch_size=[64, 128, 256, 512], wc_kwargs=[dict()], eval_size=[0.2], n_jobs=[2], seed=[6973, 5305, 6233, 1503, 3947, 1425, 3391, 2941, 1218, 7946], global_register=PATH_GR_SWISSROLL_NOISE_EULER, root_path= '/cluster/home/schsimo/MT/output/WitnessComplexes/SwissRoll/noise', verbose=True)
if k not in tmp: tmp[k] = {} tmp = tmp[k] tmp[kc[-1]] = kc_v ret.append(Config_Competitors(**ret_i)) return ret placeholder_config_competitors = Config_Competitors( model_class=tSNE, model_kwargs=dict(), dataset=SwissRoll(), sampling_kwargs={'n_samples': [2560]}, eval=[ ConfigEval( active=True, evaluate_on=None, save_eval_latent=True, save_train_latent=True, online_visualization=False, k_min=5, k_max=20, k_step=5, ) ], uid='uid', verbose=False,
import random import torch import numpy as np from scripts.ssc.persistence_pairings_visualization.utils_definitions import make_plot from src.datasets.datasets import SwissRoll from src.topology.witness_complex import WitnessComplex if __name__ == "__main__": dataset_sampler = SwissRoll() N_WITNESSES = 2048 n_samples = 128 path_to_save = '/Users/simons/PycharmProjects/MT-VAEs-TDA/output/visualisation_nnsys/wc{}_w{}/'.format( n_samples, N_WITNESSES) N_sim = 100 ks = [1, 2, 3, 4, 6, 8, 12, 16] ntot = int(len(ks) * N_sim) counter = 1 for seed in list(set(np.random.randint(1, 100000, N_sim))): witnesses, color_ = dataset_sampler.sample(N_WITNESSES, seed=seed) ind = random.sample(range(N_WITNESSES), n_samples) landmarks, color = witnesses[ind, :], color_[ind]
#dataset = SwissRoll() # small = [0.5,0.8] # large = [1, 1.3] # # area_l = math.pi*(large[1]**2-large[0]**2) # area_s = math.pi*(small[1]**2-small[0]**2) # # sample_ratio = area_l/area_s # # infty_sign = tadasets.infty_sign(n=3000, noise=0.1) for seed in [11, 22, 33, 44]: for n in [16, 24, 32, 40, 48, 56, 64, 72, 80, 88, 96, 112, 128]: name_plot_vr = 'VR_SwissRoll_n{n}_seed{seed}.pdf'.format(n=n, seed=seed) computeVR(data=SwissRoll().sample(n, seed=seed)[0], path_to_save=os.path.join(path, name_plot_vr)) # df_an1 = annulus(int(sample_ratio*n), large[0], large[1], seed=seed) # # df_an2 = annulus(n, small[0], small[1], label=1, seed=(seed+12)) # # df_an = df_an1.append(df_an2, ignore_index=True) # # make_scatter(df_an, name='/annulus_manifold_ns{ns}_nl{nl}_s{s}'.format(ns=n, nl = int(sample_ratio*n), s=seed), # # base_path=path) # # data = df_an[['x','y']].to_numpy() # # # # name_plot = 'VR_ns{ns}_nl{nl}_seed{s}.pdf'.format(ns=n, nl=int(sample_ratio*n), s=seed) # data = tadasets.infty_sign(n=n, noise=noise) # labels = np.ones(n) # name_plot_m = 'manifold_infty_n{n}_noise{noise}.pdf'.format(n=n, noise=noise) # plot_2Dscatter(data,labels, path_to_save = os.path.join(path, name_plot_m), show = True) # name_plot_vr = 'VR_infty_n{n}_noise{noise}.pdf'.format(n=n, noise=noise)
for v in itertools.product(*values): ret_i = dict(root_path=self.root_path, global_register=self.global_register, verbose=self.verbose) for kc, kc_v in zip(key_chains, v): tmp = ret_i for k in kc[:-1]: if k not in tmp: tmp[k] = {} tmp = tmp[k] tmp[kc[-1]] = kc_v ret.append(ConfigWC(**ret_i)) return ret placeholder_config_wc = ConfigWC(dataset=SwissRoll(), sampling_kwargs=dict(n_samples=2560), batch_size=64, wc_kwargs=dict(), eval_size=0.2, n_jobs=1, seed=1, global_register='', root_path='', verbose=False)
import numpy as np from torch.utils.data import TensorDataset, DataLoader from src.data_preprocessing.witness_complex_offline.config import ConfigWC from src.datasets.datasets import SwissRoll if __name__ == "__main__": n_samples = 5 # labels = torch.from_numpy(np.array(range(n_samples))) # # dataset_train = TensorDataset(labels) # dataset_train_2 = TensorDataset(labels) # # train_loader = DataLoader(dataset_train, batch_size=1, shuffle=False, # pin_memory=True, drop_last=True) # # path = '/Users/simons/PycharmProjects/MT-VAEs-TDA/output/tests' # torch.save(labels, os.path.join(path,'labels_tensor.pt')) path = '/Users/simons/PycharmProjects/MT-VAEs-TDA/output/tests' os.path.join(path, 'labels_tensor.pt') labels_loaded = torch.load(os.path.join(path, 'labels_tensor.pt')) print(torch.eq(labels_loaded, labels)) config = ConfigWC(SwissRoll(), dict(), dict(), 1, 1, 'global_bla', 'root_bla') print(config.uid)
wcae = '/Users/simons/MT_data/sync/euler_sync_scratch/schsimo/output/WCAE_swissroll_nonoise/SwissRoll-n_samples2560-seed1935-Autoencoder_MLP_topoae-32-32-lr1_100-bs256-nep1000-rlw1-tlw8192-mepush_active21_20-k3-rmax10-seed1935-1edc2a73/' eval_models_dict = { 'TopoAE64': topoae_64, 'TopoAE128': topoae_128, 'TopoAE256': topoae_256, 'WCTopoAE64': wctopoae_64, 'WCTopoAE128': wctopoae_128, 'WCTopoAE256': wctopoae_256, } eval_models_dict = {'wcae': wcae} # sample data n_samples = 2560 manifold = SwissRoll() data_manifold, data, labels = manifold.sample_manifold(n_samples=n_samples, seed=1) model_names = [] values = [] for model_name, path in eval_models_dict.items(): # load WC-AE model_kwargs = dict(input_dim=3, latent_dim=2, size_hidden_layers=[32, 32]) autoencoder = Autoencoder_MLP_topoae(**model_kwargs) model = WitnessComplexAutoencoder(autoencoder) state_dict = torch.load(os.path.join(path, 'model_state.pth')) model.load_state_dict(state_dict)