Exemplo n.º 1
0
 def test_cont_nodes(self):
     self.G_params['type_z'] = ds.RAND
     G = ds.create_graph(self.G_params, seed=SEED)
     cluster = gc.MultiResGraphClustering(G, self.nodes_enc, k=4)
     out = np.unique(cluster.labels[0])
     for U in cluster.Us:
         out = U.dot(out)
     self.assertTrue(np.array_equal(out, cluster.labels[0]))
Exemplo n.º 2
0
 def setUp(self):
     self.G_params = {}
     self.G_params['type'] = ds.SBM
     self.G_params['N'] = 256
     self.G_params['k'] = 4
     self.G_params['p'] = 0.15
     self.G_params['q'] = 0.01 / 4
     self.G_params['type_z'] = ds.CONT
     self.G = ds.create_graph(self.G_params, seed=SEED)
Exemplo n.º 3
0
 def setUp(self):
     np.random.seed(SEED)
     self.G_params = {}
     self.G_params['type'] = ds.SBM
     self.G_params['N'] = 256
     self.G_params['k'] = 4
     self.G_params['p'] = 0.3
     self.G_params['q'] = 0.05
     self.G_params['type_z'] = ds.RAND
     self.Gx = ds.create_graph(self.G_params, seed=SEED)
Exemplo n.º 4
0
 def setUp(self):
     np.random.seed(SEED)
     G_params = {}
     G_params['type'] = ds.SBM
     G_params['N'] = 256
     G_params['k'] = 4
     G_params['p'] = 0.15
     G_params['q'] = 0.01 / 4
     G_params['type_z'] = ds.RAND
     G = ds.create_graph(G_params, seed=SEED)
     nodes_dec = [4, 16, 32, 64, 256]
     self.N = nodes_dec[-1]
     self.k = nodes_dec[0]
     self.cluster = gc.MultiResGraphClustering(G, nodes_dec, k=4)
     self.model = Sequential()
     for i, U in enumerate(self.cluster.Us):
         self.add_layer(MeanUps(U, self.cluster.As[i], gamma=1))
Exemplo n.º 5
0
 def setUp(self):
     G_params = {}
     G_params['type'] = ds.SBM
     G_params['N'] = 256
     G_params['k'] = 4
     G_params['p'] = 0.15
     G_params['q'] = 0.01 / 4
     G_params['type_z'] = ds.RAND
     G = ds.create_graph(G_params, seed=SEED)
     nodes = [4, 16, 32, 256]
     self.K = 3
     cluster = gc.MultiResGraphClustering(G, nodes, k=4)
     self.ups = []
     self.As = []
     self.Us = []
     for i in range(len(cluster.Us)):
         self.ups.append(GFUps(cluster.Us[i], cluster.As[i], self.K))
         self.As.append(Tensor(cluster.As[i]))
         self.Us.append(Tensor(cluster.Us[i]))
Exemplo n.º 6
0
def run(id, Gs, Signals, Net, p_miss):
    G = ds.create_graph(Gs, SEED)
    clts = compute_clusters(G, Gs['k'], Net['type_A'])
    signal = ds.GraphSignal.create(Signals['type'],
                                   G,
                                   Signals['non_lin'],
                                   Signals['L'],
                                   Signals['deltas'],
                                   to_0_1=Signals['to_0_1'])
    x_n = ds.GraphSignal.add_noise(signal.x, Signals['noise'])
    inp_mask = ds.GraphSignal.generate_inpaint_mask(signal.x, p_miss)
    mask_err = np.sum(np.square(signal.x - x_n * inp_mask))

    err = np.zeros(N_EXPS)
    node_err = np.zeros(N_EXPS)
    params = np.zeros(N_EXPS)
    for i in range(N_EXPS):
        dec = GraphDeepDecoder(EXPS[i]['fts'],
                               clts[i].sizes,
                               clts[i].Us,
                               As=clts[i].As,
                               act_fn=Net['af'],
                               ups=EXPS[i]['ups'],
                               gamma=EXPS[i]['gamma'],
                               batch_norm=Net['bn'],
                               last_act_fn=Net['laf'])
        model = Inpaint(dec,
                        inp_mask,
                        learning_rate=Net['lr'],
                        epochs=EXPS[i]['epochs'])
        model.fit(x_n)
        node_err[i], err[i] = model.test(signal.x)
        params[i] = model.count_params()
        print('Graph {}-{} ({}):\tEpochs: {}\tNode Err: {:.8f}\tErr: {:.6f}'.
              format(id, i + 1, params[i], EXPS[i]['epochs'], node_err[i],
                     err[i]))
    return node_err, err, mask_err, params
Exemplo n.º 7
0
if __name__ == '__main__':
    # Graph parameters
    G_params = {}
    G_params['type'] = ds.SBM  # SBM or ER
    G_params['N'] = N = 256
    G_params['k'] = 4
    G_params['p'] = 0.15
    G_params['q'] = 0.01 / 4
    G_params['type_z'] = ds.RAND

    # Set seeds
    np.random.seed(SEED)
    manual_seed(SEED)

    G = ds.create_graph(G_params, SEED)
    sizes, Us, As = compute_clusters(G_params['k'])

    print("CPUs used:", N_CPUS)
    start_time = time.time()
    error = np.zeros((len(N_P), n_signals, N_SCENARIOS))
    for i, n_p in enumerate(N_P):
        print('Noise:', n_p)
        results = []
        with Pool(processes=N_CPUS) as pool:
            for j in range(n_signals):
                signal = ds.DifussedSparseGS(G, L, G_params['k'])
                signal.signal_to_0_1_interval()
                signal.to_unit_norm()

                results.append(
Exemplo n.º 8
0
    
    # Graph parameters
    G_params = {}
    G_params['type'] = ds.SBM  # SBM or ER
    G_params['N'] = N = 256
    G_params['k'] = k = 4
    G_params['p'] = 0.15
    G_params['q'] = 0.01/k
    G_params['type_z'] = ds.RAND

    # Set seeds
    np.random.seed(SEED)
    manual_seed(SEED)

    start_time = time.time()
    G = ds.create_graph(G_params, seed=SEED)
    G.compute_fourier_basis()
    D = dijkstra(G.W)
    sizes, Us, As = compute_clusters(G, G_params['k'])
    data['g_params'] = G_params

    print("CPUs used:", N_CPUS)
    error = np.zeros((len(N_P), N_SIGNALS, N_EXPS))
    for i, n_p in enumerate(N_P):
        print('Noise:', n_p)
        results = []
        with Pool(processes=N_CPUS) as pool:
            for j in range(N_SIGNALS):
                signals = [create_signal(s_in, G, L, k, D).x for s_in in INPUTS]
                results.append(pool.apply_async(test_input,
                               args=[j, signals, sizes, Us, As, n_p,