def test_upsampling_matrices(self): nodes_enc = [4,16,32,64,256] ups = gc.WEI # Contiguous nodes self.G_params['type_z'] = ds.CONT G = ds.create_graph(self.G_params, seed=SEED) cluster = gc.MultiResGraphClustering(G, nodes_enc, k=4, up_method=ups) out = np.unique(cluster.labels[0]) for U in cluster.Us: out = U.dot(out) self.assertTrue(np.array_equal(out,cluster.labels[0])) # Alternated nodes self.G_params['type_z'] = ds.ALT G = ds.create_graph(self.G_params, seed=SEED) cluster = gc.MultiResGraphClustering(G, nodes_enc, k=4, up_method=ups) out = np.unique(cluster.labels[0]) for U in cluster.Us: out = U.dot(out) self.assertTrue(np.array_equal(out,cluster.labels[0])) # Random nodes self.G_params['type_z'] = ds.RAND G = ds.create_graph(self.G_params, seed=SEED) cluster = gc.MultiResGraphClustering(G, nodes_enc, k=4, up_method=ups) out = np.unique(cluster.labels[0]) for U in cluster.Us: out = U.dot(out) self.assertTrue(np.array_equal(out,cluster.labels[0]))
def test_downsampling_matrices(self): nodes_enc = [256,64,32,16,4] ups = gc.WEI # Contiguous nodes self.G_params['type_z'] = ds.CONT G = ds.create_graph(self.G_params, seed=SEED) cluster = gc.MultiResGraphClustering(G, nodes_enc, k=4, up_method=ups) out = np.array(cluster.labels[-1]) for D in cluster.Ds: out = D.dot(out)/np.sum(D,1) self.assertTrue(np.array_equal(out, np.unique(cluster.labels[-1]))) # Alternated nodes self.G_params['type_z'] = ds.ALT G = ds.create_graph(self.G_params, seed=SEED) cluster = gc.MultiResGraphClustering(G, nodes_enc, k=4, up_method=ups) out = np.array(cluster.labels[-1]) for D in cluster.Ds: out = D.dot(out)/np.sum(D,1) self.assertTrue(np.array_equal(out, np.unique(cluster.labels[-1]))) # Random nodes self.G_params['type_z'] = ds.RAND G = ds.create_graph(self.G_params, seed=SEED) cluster = gc.MultiResGraphClustering(G, nodes_enc, k=4, up_method=ups) out = np.array(cluster.labels[-1]) for D in cluster.Ds: out = D.dot(out)/np.sum(D,1) self.assertTrue(np.array_equal(out, np.unique(cluster.labels[-1])))
def run(id, Gs, signals, lrn, deltas): Gx = ds.create_graph(Gs['params'], seed=SEED) Gy = ds.create_graph(Gs['params_y'], seed=SEED) data = ds.LinearDS2GS(Gx, Gy, signals['samples'], signals['L'], deltas, median=signals['median'], same_coeffs=signals['same_coeffs']) data.to_unit_norm() data.add_noise(signals['noise'], test_only=signals['test_only']) data.to_tensor() epochs = 0 mean_err = np.zeros(N_EXPS) med_err = np.zeros(N_EXPS) mse = np.zeros(N_EXPS) for i, exp in enumerate(EXPS): if exp['type'] == 'Linear': model = LinearModel(exp['N']) elif exp['type'] == 'Enc_Dec': clust_x = gc.MultiResGraphClustering(Gx, exp['n_enc'], k=exp['n_enc'][-1], up_method=exp['downs']) clust_y = gc.MultiResGraphClustering(Gy, exp['n_dec'], k=exp['n_enc'][-1], up_method=exp['ups']) net = GraphEncoderDecoder(exp['f_enc'], clust_x.sizes, clust_x.Ds, exp['f_dec'], clust_y.sizes, clust_y.Us, exp['f_conv'], As_dec=clust_y.As, As_enc=clust_x.As, act_fn=lrn['af'], last_act_fn=lrn['laf'], ups=exp['ups'], downs=exp['downs']) elif exp['type'] == 'AutoConv': net = ConvAutoencoder(exp['f_enc'], exp['kernel_enc'], exp['f_dec'], exp['kernel_dec']) elif exp['type'] == 'AutoFC': net = FCAutoencoder(exp['n_enc'], exp['n_dec'], bias=exp['bias']) else: raise RuntimeError('Unknown experiment type') if exp['type'] != 'Linear': model = Model(net, learning_rate=lrn['lr'], decay_rate=lrn['dr'], batch_size=lrn['batch'], epochs=lrn['epochs'], eval_freq=EVAL_F, max_non_dec=lrn['non_dec'], verbose=VERBOSE) epochs, _, _ = model.fit(data.train_X, data.train_Y, data.val_X, data.val_Y) mean_err[i], med_err[i], mse[i] = model.test(data.test_X, data.test_Y) print('G: {}, {}-{} ({}): epochs {} - mse {} - MedianErr: {}' .format(id, i, exp['type'], model.count_params(), epochs, mse[i], med_err[i])) return mean_err, med_err, mse
def setUp(self): self.G_params = {} self.G_params['type'] = ds.SBM self.G_params['N'] = 256 self.G_params['k'] = 4 self.G_params['p'] = 0.15 self.G_params['q'] = 0.01/4 self.G_params['type_z'] = ds.CONT self.G = ds.create_graph(self.G_params, seed=SEED)
def setUp(self): np.random.seed(SEED) G_params = {} G_params['type'] = ds.SBM G_params['N'] = 256 G_params['k'] = 4 G_params['p'] = 0.15 G_params['q'] = 0.01/4 G_params['type_z'] = ds.RAND G = ds.create_graph(G_params, seed=SEED) nodes_dec = [4,16,32,64,256] ups = gc.WEI self.N = nodes_dec[-1] self.k = nodes_dec[0] self.cluster = gc.MultiResGraphClustering(G, nodes_dec, k=4, up_method=ups) self.model = Sequential() for i,U in enumerate(self.cluster.Us): self.add_layer(GraphUpsampling(U, self.cluster.As[i+1],gamma=1))
def run(id, Gs, signals, lrn, p_n): # Gx, Gy = ds.perturbated_graphs(Gs['params'], Gs['pct_val'][0], # Gs['pct_val'][1], pct=Gs['pct'], # seed=SEED) Gx = ds.create_graph(Gs['params'], seed=SEED)
{'type': 'AutoConv', 'f_enc': [1, 1, 1, 1, 1, 2, 2, 2, 2], 'kernel_enc': 6, 'f_dec': [2, 2, 1], 'kernel_dec': 6}] N_EXPS = len(EXPS) def run(id, Gs, signals, lrn, p_n): # Gx, Gy = ds.perturbated_graphs(Gs['params'], Gs['pct_val'][0], # Gs['pct_val'][1], pct=Gs['pct'], # seed=SEED) Gx = ds.create_graph(Gs['params'], seed=SEED) Gy = ds.create_graph(Gs['params_y'], seed=SEED) data = ds.LinearDS2GS(Gx, Gy, signals['samples'], signals['L'], signals['deltas'], median=signals['median'], same_coeffs=signals['same_coeffs']) data.to_unit_norm() data.add_noise(p_n, test_only=signals['test_only']) data.to_tensor() epochs = 0 mean_err = np.zeros(N_EXPS) med_err = np.zeros(N_EXPS) mse = np.zeros(N_EXPS) for i, exp in enumerate(EXPS): if exp['type'] == 'Linear': model = LinearModel(exp['N']) elif exp['type'] == 'Enc_Dec':
def run(id, Gs, Signals, lrn, pert): if Gs['params']['type'] == ds.SBM: Gx, Gy = ds.nodes_perturbated_graphs(Gs['params'], pert, seed=SEED, perm=True) elif Gs['params']['type'] == ds.BA: Gx = ds.create_graph(Gs['params'], SEED) G_params_y = Gs['params'].copy() G_params_y['N'] = Gs['params']['N'] - pert Gy = ds.create_graph(G_params_y, 2 * SEED) else: raise RuntimeError("Choose a valid graph type") data = ds.LinearDS2GSNodesPert(Gx, Gy, Signals['samples'], Signals['L'], Signals['deltas'], median=Signals['median'], same_coeffs=Signals['same_coeffs'], neg_coeffs=Signals['neg_coeffs']) data.to_unit_norm() data.add_noise(Signals['noise'], test_only=Signals['test_only']) data.to_tensor() epochs = 0 params = np.zeros(N_EXPS) med_err = np.zeros(N_EXPS) mse = np.zeros(N_EXPS) for i, exp in enumerate(EXPS): if exp['type'] == 'Linear': model = LinearModel(exp['N']) elif exp['type'] == 'Enc_Dec': exp['n_dec'][-1] = Gy.N clust_x = gc.MultiResGraphClustering(Gx, exp['n_enc'], k=exp['n_enc'][-1], up_method=exp['downs']) clust_y = gc.MultiResGraphClustering(Gy, exp['n_dec'], k=exp['n_enc'][-1], up_method=exp['ups']) net = GraphEncoderDecoder(exp['f_enc'], clust_x.sizes, clust_x.Ds, exp['f_dec'], clust_y.sizes, clust_y.Us, exp['f_conv'], As_dec=clust_y.As, K_dec=exp['K_dec'], K_enc=exp['K_enc'], As_enc=clust_x.As, act_fn=lrn['af'], last_act_fn=lrn['laf'], ups=exp['ups'], downs=exp['downs']) elif exp['type'] == 'AutoConv': conv = exp['convs'][PERT.index(pert)] net = ConvAutoencoder(conv['f_enc'], conv['kernel_enc'], conv['f_dec'], conv['kernel_dec']) elif exp['type'] == 'AutoFC': exp['n_dec'][-1] = Gy.N net = FCAutoencoder(exp['n_enc'], exp['n_dec'], bias=exp['bias']) else: raise RuntimeError('Unknown experiment type') if exp['type'] != 'Linear': model = Model(net, learning_rate=lrn['lr'], decay_rate=lrn['dr'], batch_size=lrn['batch'], epochs=lrn['epochs'], eval_freq=EVAL_F, max_non_dec=lrn['non_dec'], verbose=VERBOSE, early_stop=exp['early_stop']) epochs, _, _ = model.fit(data.train_X, data.train_Y, data.val_X, data.val_Y) _, med_err[i], mse[i] = model.test(data.test_X, data.test_Y) params[i] = model.count_params() print('G: {}, {}-{} ({}): epochs {} - mse {} - MedianErr: {}'.format( id, i, exp['type'], params[i], epochs, mse[i], med_err[i])) return params, med_err, mse
def test_cluster_sizes_dec(self): self.G_params['type_z'] = ds.CONT G = ds.create_graph(self.G_params, seed=SEED)