Beispiel #1
0
def create_model(Gx, Gy, exp, lrn):
    if exp['type'] == 'Linear':
        model = LinearModel(exp['N'])
    elif exp['type'] == 'Enc_Dec':
        clust_x = gc.MultiResGraphClustering(Gx, exp['n_enc'],
                                             k=exp['n_enc'][-1],
                                             up_method=exp['downs'])
        clust_y = gc.MultiResGraphClustering(Gy, exp['n_dec'],
                                             k=exp['n_enc'][-1],
                                             up_method=exp['ups'])
        net = GraphEncoderDecoder(exp['f_enc'], clust_x.sizes, clust_x.Ds,
                                  exp['f_dec'], clust_y.sizes, clust_y.Us,
                                  exp['f_conv'], As_dec=clust_y.As,
                                  As_enc=clust_x.As, act_fn=lrn['af'],
                                  last_act_fn=lrn['laf'], ups=exp['ups'],
                                  downs=exp['downs'])
    elif exp['type'] == 'AutoConv':
        net = ConvAutoencoder(exp['f_enc'], exp['kernel_enc'],
                              exp['f_dec'], exp['kernel_dec'])
    elif exp['type'] == 'AutoFC':
        net = FCAutoencoder(exp['n_enc'], exp['n_dec'], bias=exp['bias'])
    else:
        raise RuntimeError('Unknown experiment type')
    if exp['type'] != 'Linear':
        model = Model(net, learning_rate=lrn['lr'], decay_rate=lrn['dr'],
                      batch_size=lrn['batch'], epochs=lrn['epochs'],
                      eval_freq=EVAL_F, max_non_dec=lrn['non_dec'],
                      verbose=VERBOSE)
    return model
Beispiel #2
0
def test_model(id, signals, nn_params, model_params):
    Gx, Gy = data_sets.perturbated_graphs(signals['g_params'], signals['eps1'], signals['eps2'],
                                          pct=signals['pct'], perm=signals['perm'])

    # Define the data model
    data = data_sets.LinearDS2GSLinksPert(Gx, Gy,
                                          signals['N_samples'],
                                          signals['L_filter'], signals['g_params']['k'],    # k is n_delts
                                          median=signals['median'])
    data.to_unit_norm()
    data.add_noise(signals['noise'], test_only=signals['test_only'])
    data.to_tensor()

    Gx.compute_laplacian('normalized')
    Gy.compute_laplacian('normalized')

    archit = GIGOArch(Gx.L.todense(), Gy.L.todense(),
                      nn_params['Fi'], nn_params['Fo'], nn_params['Ki'], nn_params['Ko'], nn_params['C'],
                      nn_params['nonlin'], nn_params['last_act_fn'], nn_params['batch_norm'],
                      nn_params['arch_info'])

    model_params['arch'] = archit

    model = Model(**model_params)
    t_init = time.time()
    epochs, _, _ = model.fit(data.train_X, data.train_Y, data.val_X, data.val_Y)
    t_conv = time.time() - t_init
    mean_err, med_err, mse = model.test(data.test_X, data.test_Y)

    print("DONE {}: MSE={} - Mean Err={} - Median Err={} - Params={} - t_conv={} - epochs={}".format(
        id, mse, mean_err, med_err, model.count_params(), round(t_conv, 4), epochs
    ))

    return mse, mean_err, med_err, model.count_params(), t_conv, epochs
Beispiel #3
0
def run(id, Gs, signals, lrn, deltas):
    Gx = ds.create_graph(Gs['params'], seed=SEED)
    Gy = ds.create_graph(Gs['params_y'], seed=SEED)
    data = ds.LinearDS2GS(Gx, Gy, signals['samples'], signals['L'],
                          deltas, median=signals['median'],
                          same_coeffs=signals['same_coeffs'])
    data.to_unit_norm()
    data.add_noise(signals['noise'], test_only=signals['test_only'])
    data.to_tensor()

    epochs = 0
    mean_err = np.zeros(N_EXPS)
    med_err = np.zeros(N_EXPS)
    mse = np.zeros(N_EXPS)
    for i, exp in enumerate(EXPS):
        if exp['type'] == 'Linear':
            model = LinearModel(exp['N'])
        elif exp['type'] == 'Enc_Dec':
            clust_x = gc.MultiResGraphClustering(Gx, exp['n_enc'],
                                                 k=exp['n_enc'][-1],
                                                 up_method=exp['downs'])
            clust_y = gc.MultiResGraphClustering(Gy, exp['n_dec'],
                                                 k=exp['n_enc'][-1],
                                                 up_method=exp['ups'])
            net = GraphEncoderDecoder(exp['f_enc'], clust_x.sizes, clust_x.Ds,
                                      exp['f_dec'], clust_y.sizes, clust_y.Us,
                                      exp['f_conv'], As_dec=clust_y.As,
                                      As_enc=clust_x.As, act_fn=lrn['af'],
                                      last_act_fn=lrn['laf'], ups=exp['ups'],
                                      downs=exp['downs'])
        elif exp['type'] == 'AutoConv':
            net = ConvAutoencoder(exp['f_enc'], exp['kernel_enc'],
                                  exp['f_dec'], exp['kernel_dec'])
        elif exp['type'] == 'AutoFC':
            net = FCAutoencoder(exp['n_enc'], exp['n_dec'], bias=exp['bias'])
        else:
            raise RuntimeError('Unknown experiment type')
        if exp['type'] != 'Linear':
            model = Model(net, learning_rate=lrn['lr'], decay_rate=lrn['dr'],
                          batch_size=lrn['batch'], epochs=lrn['epochs'],
                          eval_freq=EVAL_F, max_non_dec=lrn['non_dec'],
                          verbose=VERBOSE)
        epochs, _, _ = model.fit(data.train_X, data.train_Y, data.val_X, data.val_Y)
        mean_err[i], med_err[i], mse[i] = model.test(data.test_X, data.test_Y)
        print('G: {}, {}-{} ({}): epochs {} - mse {} - MedianErr: {}'
              .format(id, i, exp['type'], model.count_params(), epochs,
                      mse[i], med_err[i]))
    return mean_err, med_err, mse
Beispiel #4
0
def test_model(id, signals, nn_params, model_params):
    Gx, Gy = data_sets.perturbated_graphs(signals['g_params'],
                                          signals['eps1'],
                                          signals['eps2'],
                                          pct=signals['pct'],
                                          perm=signals['perm'])

    # Define the data model
    data = data_sets.LinearDS2GSLinksPert(
        Gx,
        Gy,
        signals['N_samples'],
        signals['L_filter'],
        signals['g_params']['k'],  # k is n_delts
        median=signals['median'])
    data.to_unit_norm()
    data.add_noise(signals['noise'], test_only=signals['test_only'])
    data.to_tensor()

    if nn_params['arch_type'] == "basic":
        Gx.compute_laplacian('normalized')
        archit = BasicArch(Gx.L.todense(), nn_params['F'], nn_params['K'],
                           nn_params['M'], nn_params['nonlin'], ARCH_INFO)
    elif nn_params['arch_type'] == "mlp":
        archit = MLP(nn_params['F'], nn_params['nonlin'], ARCH_INFO)
    elif nn_params['arch_type'] == "conv":
        archit = ConvNN(N, nn_params['F'], nn_params['K'], nn_params['nonlin'],
                        nn_params['M'], ARCH_INFO)
    elif nn_params['arch_type'] == "linear":
        archit = MLP(nn_params['F'], nn_params['nonlin'], ARCH_INFO)
    else:
        raise RuntimeError("arch_type has to be either basic, mlp or conv")

    model_params['arch'] = archit

    model = Model(**model_params)
    t_init = time.time()
    epochs, _, _ = model.fit(data.train_X, data.train_Y, data.val_X,
                             data.val_Y)
    t_conv = time.time() - t_init
    mean_err, med_err, mse = model.test(data.test_X, data.test_Y)

    print(
        "DONE {}: MSE={} - Mean Err={} - Median Err={} - Params={} - t_conv={} - epochs={}"
        .format(id, mse, mean_err, med_err, model.count_params(),
                round(t_conv, 4), epochs),
        flush=True)
    return mse, mean_err, med_err, model.count_params(), t_conv, epochs
Beispiel #5
0
def estimate_signals(i, G_params, eps_c, eps_d, n_samples, L, nodes_enc,
                     nodes_dec, ups, feat_enc, feat_dec, feat_only_conv):
    # Create graphs
    Gx, Gy = data_sets.perturbated_graphs(G_params, eps_c, eps_d, seed=SEED)

    # Create graph signals
    data = data_sets.LinearDS2GS(Gx,
                                 Gy,
                                 n_samples,
                                 L,
                                 3 * G_params['k'],
                                 median=True)
    data.to_unit_norm()
    print('Median Diff between Y and X:',
          np.median(np.linalg.norm((data.train_X - data.train_Y)**2, 1)))

    X = data.train_X
    Beta = np.linalg.pinv(X.T.dot(X)).dot(X.T).dot(data.train_Y)
    test_Y = data.test_Y
    est_Y_test = data.test_X.dot(Beta)
    test_err = np.sum(
        (est_Y_test - test_Y)**2, axis=1) / np.linalg.norm(data.test_Y)**2
    print('Linear model: mean err: {} - median: {}'.format(
        np.mean(test_err), np.median(test_err)))

    data.to_tensor()

    # Obtein clusters
    cluster_x = gc.MultiResGraphClustering(Gx, nodes_enc, k=4, up_method=None)
    cluster_y = gc.MultiResGraphClustering(Gy, nodes_dec, k=4, up_method=ups)

    # Standar ConvAutoenc
    net = architecture.GraphEncoderDecoder(feat_enc, [Gx.N] * 7,
                                           cluster_x.Ds,
                                           feat_dec, [Gx.N] * 7,
                                           cluster_y.Us,
                                           feat_only_conv,
                                           As_dec=cluster_y.As,
                                           last_act_fn=nn.Tanh(),
                                           act_fn=nn.Tanh())
    model = Model(net,
                  decay_rate=.9,
                  epochs=25,
                  batch_size=100,
                  learning_rate=0.05,
                  verbose=True,
                  eval_freq=1,
                  max_non_dec=5)
    print('Model parameters: ', model.count_params())
    model.fit(data.train_X, data.train_Y, data.val_X, data.val_Y)
    mean_err, median_err, mse = model.test(data.test_X, data.test_Y)
    print(
        'Autoencoder: Graph {}: N: {} Mean MSE: {} - Mean Err: {} - Median Err: {}'
        .format(i, Gx.N, mse, mean_err, median_err))

    # Graph Autoenc
    net = architecture.GraphEncoderDecoder(feat_enc,
                                           cluster_x.sizes,
                                           cluster_x.Ds,
                                           feat_dec,
                                           cluster_y.sizes,
                                           cluster_y.Us,
                                           feat_only_conv,
                                           As_dec=cluster_y.As,
                                           last_act_fn=nn.Tanh(),
                                           act_fn=nn.Tanh())
    model = Model(net,
                  decay_rate=.9,
                  epochs=25,
                  batch_size=100,
                  learning_rate=0.05,
                  verbose=True,
                  eval_freq=1,
                  max_non_dec=5)
    print('Model parameters: ', model.count_params())
    model.fit(data.train_X, data.train_Y, data.val_X, data.val_Y)
    mean_err, median_err, mse = model.test(data.test_X, data.test_Y)
    print(
        'GRAPH ENC-DEC Graph {}: N: {} Mean MSE: {} - Mean Err: {} - Median Err: {}'
        .format(i, Gx.N, mse, mean_err, median_err))
    return mean_err, mse, model.count_params()
Beispiel #6
0
def estimate_signals(id, G_params, n_samples, L, nodes_enc, nodes_dec, ups,
                     feat_enc, feat_dec, feat_only_conv):
    # Create graphs
    Gx, Gy = data_sets.perturbated_graphs(G_params,
                                          creat,
                                          dest,
                                          pct=pct,
                                          seed=SEED)
    diff_links = np.sum(Gx.A != Gy.A) / 2 / Gx.Ne * 100
    print('Links different(%):', diff_links)

    # Create graph signals
    data = data_sets.NonLinearDS2GS(Gx,
                                    Gy,
                                    n_samples,
                                    L,
                                    deltas,
                                    median=median,
                                    same_coeffs=same_coeffs)
    data.to_unit_norm()
    data.add_noise(p_n, test_only=True)
    mean_dist = np.median(np.linalg.norm(data.train_X - data.train_Y, axis=1))
    print('Distance signals:', mean_dist)
    data.to_tensor()

    N = G_params['N']
    k = G_params['k']

    model = LinearModel(N)
    model.fit(data.train_X, data.train_Y, data.val_X, data.val_Y)
    mean_err, med_err, mse = model.test(data.test_X, data.test_Y)
    print(
        'LINEAR Graph {}: N: {} Mean MSE: {} - Mean Err: {} - Median Err: {}'.
        format(id, Gx.N, mse, mean_err, med_err))

    # Obtein clusters
    cluster_x = gc.MultiResGraphClustering(Gx, nodes_enc, k=k, up_method=ups)
    cluster_y = gc.MultiResGraphClustering(Gy, nodes_dec, k=k, up_method=ups)

    # Graph Autoenc
    net = architecture.GraphEncoderDecoder(feat_enc,
                                           cluster_x.sizes,
                                           cluster_x.Ds,
                                           feat_dec,
                                           cluster_y.sizes,
                                           cluster_y.Us,
                                           feat_only_conv,
                                           As_dec=cluster_y.As,
                                           As_enc=cluster_x.As,
                                           ups=ups,
                                           last_act_fn=nn.Tanh(),
                                           downs=ups,
                                           act_fn=nn.Tanh())
    model = Model(net,
                  decay_rate=dr,
                  epochs=epochs,
                  batch_size=bs,
                  learning_rate=lr,
                  verbose=False,
                  eval_freq=1,
                  max_non_dec=10)
    print('Model parameters: ', model.count_params())
    iters, _, _ = model.fit(data.train_X, data.train_Y, data.val_X, data.val_Y)

    mean_err, med_err, mse = model.test(data.test_X, data.test_Y)
    print('G: {}, ({}): epochs {} - mse {} - MedianErr: {}'.format(
        id, model.count_params(), iters, mse, med_err))
    return mean_err, mse, med_err, diff_links, mean_dist, iters
Beispiel #7
0
            net = GraphEncoderDecoder(exp['f_enc'], clust_x.sizes, clust_x.Ds,
                                      exp['f_dec'], clust_y.sizes, clust_y.Us,
                                      exp['f_conv'], As_dec=clust_y.As,
                                      As_enc=clust_x.As, act_fn=lrn['af'],
                                      last_act_fn=lrn['laf'], ups=exp['ups'],
                                      downs=exp['downs'])
        elif exp['type'] == 'AutoConv':
            net = ConvAutoencoder(exp['f_enc'], exp['kernel_enc'],
                                  exp['f_dec'], exp['kernel_dec'])
        elif exp['type'] == 'AutoFC':
            net = FCAutoencoder(exp['n_enc'], exp['n_dec'], bias=exp['bias'])
        else:
            raise RuntimeError('Unknown experiment type')
        if exp['type'] != 'Linear':
            model = Model(net, learning_rate=lrn['lr'], decay_rate=lrn['dr'],
                          batch_size=lrn['batch'], epochs=lrn['epochs'],
                          eval_freq=EVAL_F, max_non_dec=lrn['non_dec'],
                          verbose=VERBOSE)
        epochs, _, _ = model.fit(data.train_X, data.train_Y, data.val_X, data.val_Y)
        mean_err[i], med_err[i], mse[i] = model.test(data.test_X, data.test_Y)
        print('G: {}, {}-{} ({}): epochs {} - mse {} - MedianErr: {}'
              .format(id, i, exp['type'], model.count_params(), epochs,
                      mse[i], med_err[i]))
    return mean_err, med_err, mse


if __name__ == '__main__':
    # Set seeds
    np.random.seed(SEED)
    manual_seed(SEED)

    # Graphs parameters
Beispiel #8
0
def run(id, Gs, signals, lrn, p_n):
    Gx, Gy = ds.perturbated_graphs(Gs['params'],
                                   Gs['pct_val'][0],
                                   Gs['pct_val'][1],
                                   pct=Gs['pct'],
                                   seed=SEED)
    data = ds.LinearDS2GS(Gx,
                          Gy,
                          signals['samples'],
                          signals['L'],
                          signals['deltas'],
                          median=signals['median'],
                          same_coeffs=signals['same_coeffs'])
    # data = ds.NonLinearDS2GS(Gx, Gy, signals['samples'], signals['L'],
    #                          signals['deltas'], median=signals['median'],
    #                          same_coeffs=signals['same_coeffs'])
    data.to_unit_norm()
    data.add_noise(p_n, test_only=signals['test_only'])
    median_dist = np.median(np.linalg.norm(data.train_X - data.train_Y,
                                           axis=1))
    print('Signal {}: distance {}'.format(id, median_dist))
    data.to_tensor()

    med_err = np.zeros(N_EXPS)
    mse = np.zeros(N_EXPS)
    epochs = np.zeros(N_EXPS)
    for i, exp in enumerate(EXPS):
        clust_x = gc.MultiResGraphClustering(Gx,
                                             exp['n_enc'],
                                             k=exp['n_enc'][-1],
                                             up_method=exp['downs'])
        clust_y = gc.MultiResGraphClustering(Gy,
                                             exp['n_dec'],
                                             k=exp['n_enc'][-1],
                                             up_method=exp['ups'])
        net = GraphEncoderDecoder(exp['f_enc'],
                                  clust_x.sizes,
                                  clust_x.Ds,
                                  exp['f_dec'],
                                  clust_y.sizes,
                                  clust_y.Us,
                                  exp['f_conv'],
                                  As_dec=clust_y.As,
                                  As_enc=clust_x.As,
                                  act_fn=lrn['af'],
                                  last_act_fn=lrn['laf'],
                                  ups=exp['ups'],
                                  downs=exp['downs'])
        model = Model(net,
                      learning_rate=lrn['lr'],
                      decay_rate=lrn['dr'],
                      batch_size=lrn['batch'],
                      epochs=lrn['epochs'],
                      eval_freq=EVAL_F,
                      max_non_dec=lrn['non_dec'],
                      verbose=VERBOSE)
        epochs[i], _, _ = model.fit(data.train_X, data.train_Y, data.val_X,
                                    data.val_Y)
        _, med_err[i], mse[i] = model.test(data.test_X, data.test_Y)
        print('G: {}, {}-{} ({}): epochs {} - mse {} - MedianErr: {}'.format(
            id, i, exp['type'], model.count_params(), epochs[i], mse[i],
            med_err[i]))
    return med_err, mse, epochs
Beispiel #9
0
def run(id, Gs, Signals, lrn, pert):
    if Gs['params']['type'] == ds.SBM:
        Gx, Gy = ds.nodes_perturbated_graphs(Gs['params'],
                                             pert,
                                             seed=SEED,
                                             perm=True)
    elif Gs['params']['type'] == ds.BA:
        Gx = ds.create_graph(Gs['params'], SEED)
        G_params_y = Gs['params'].copy()
        G_params_y['N'] = Gs['params']['N'] - pert
        Gy = ds.create_graph(G_params_y, 2 * SEED)
    else:
        raise RuntimeError("Choose a valid graph type")
    data = ds.LinearDS2GSNodesPert(Gx,
                                   Gy,
                                   Signals['samples'],
                                   Signals['L'],
                                   Signals['deltas'],
                                   median=Signals['median'],
                                   same_coeffs=Signals['same_coeffs'],
                                   neg_coeffs=Signals['neg_coeffs'])
    data.to_unit_norm()
    data.add_noise(Signals['noise'], test_only=Signals['test_only'])
    data.to_tensor()

    epochs = 0
    params = np.zeros(N_EXPS)
    med_err = np.zeros(N_EXPS)
    mse = np.zeros(N_EXPS)
    for i, exp in enumerate(EXPS):
        if exp['type'] == 'Linear':
            model = LinearModel(exp['N'])
        elif exp['type'] == 'Enc_Dec':
            exp['n_dec'][-1] = Gy.N
            clust_x = gc.MultiResGraphClustering(Gx,
                                                 exp['n_enc'],
                                                 k=exp['n_enc'][-1],
                                                 up_method=exp['downs'])
            clust_y = gc.MultiResGraphClustering(Gy,
                                                 exp['n_dec'],
                                                 k=exp['n_enc'][-1],
                                                 up_method=exp['ups'])
            net = GraphEncoderDecoder(exp['f_enc'],
                                      clust_x.sizes,
                                      clust_x.Ds,
                                      exp['f_dec'],
                                      clust_y.sizes,
                                      clust_y.Us,
                                      exp['f_conv'],
                                      As_dec=clust_y.As,
                                      K_dec=exp['K_dec'],
                                      K_enc=exp['K_enc'],
                                      As_enc=clust_x.As,
                                      act_fn=lrn['af'],
                                      last_act_fn=lrn['laf'],
                                      ups=exp['ups'],
                                      downs=exp['downs'])
        elif exp['type'] == 'AutoConv':
            conv = exp['convs'][PERT.index(pert)]
            net = ConvAutoencoder(conv['f_enc'], conv['kernel_enc'],
                                  conv['f_dec'], conv['kernel_dec'])
        elif exp['type'] == 'AutoFC':
            exp['n_dec'][-1] = Gy.N
            net = FCAutoencoder(exp['n_enc'], exp['n_dec'], bias=exp['bias'])
        else:
            raise RuntimeError('Unknown experiment type')
        if exp['type'] != 'Linear':
            model = Model(net,
                          learning_rate=lrn['lr'],
                          decay_rate=lrn['dr'],
                          batch_size=lrn['batch'],
                          epochs=lrn['epochs'],
                          eval_freq=EVAL_F,
                          max_non_dec=lrn['non_dec'],
                          verbose=VERBOSE,
                          early_stop=exp['early_stop'])
        epochs, _, _ = model.fit(data.train_X, data.train_Y, data.val_X,
                                 data.val_Y)
        _, med_err[i], mse[i] = model.test(data.test_X, data.test_Y)
        params[i] = model.count_params()
        print('G: {}, {}-{} ({}): epochs {} - mse {} - MedianErr: {}'.format(
            id, i, exp['type'], params[i], epochs, mse[i], med_err[i]))
    return params, med_err, mse