Ejemplo n.º 1
0
def experiment_mp_graph_sparsity(model, config, network, dataset, confidence, analysis):
    # Train the model using one of the approaches: "MP", "CL" or "LL"
    p = 0.0
    a = p
    results = []
    while p <= 1:
        print(f"SWITCH: {p}")
        sharedVars.STOP_CONDITION = analysis['SC']
        start_time = time.time()
        p2p = PeerNet(config)
        p2p.network(network, p).init()
        p2p.load_dataset(f"./datasets/{dataset[0]}", df=dataset[1], min_samples=dataset[2], sep=dataset[3])
        show_results = p == 0.9  # and False
        print(f"PEERS: {sum([len(node.peers) for node in p2p.nodes])}")
        p2p.train(
            model=model,
            pre=dataset[4],
            algorithm="MP",
            params={'confidence': confidence, 'debug': False, 'show_results': show_results, 'epochs': 200},
            analysis=analysis['type']
        )
        print(f"\nSWITCH {p} done in {time.time() - start_time} seconds")
        results.append(p2p.results)
        print(f"COST={p2p.results}")
        p2p.stop()
        p = round(p + 0.1, 1)
    b = p
    # Plotting
    x = np.arange(a, b, 0.1)
    y1 = results
    print(y1)

    save(f"OLD_results/mp_sparsity_{analysis['SC']}_{confidence}", (x, y1))
Ejemplo n.º 2
0
def experiment_cl_iter(model, config, network, dataset, analysis):
    """Collaborative Learning / Iterations x Cost."""

    p2p = PeerNet(config)

    p2p.network(network).init()

    data = f"{DATASETS_FOLDER}/{dataset[0]}"
    p2p.load_dataset(data, df=dataset[1], min_samples=dataset[2], sep=dataset[3])

    start_time = time.time()
    iterations = analysis['iterations']
    sharedVars.STOP_CONDITION = iterations[1]
    p2p.train(
        model=model,
        pre=dataset[4],
        algorithm="CL",
        params={'debug': False, 'show_results': True, 'epochs': 100},
        analysis=analysis['type']
    )
    print(f"\nSWITCH {sharedVars.STOP_CONDITION} done in {time.time() - start_time} seconds")

    # Plotting
    x = range(iterations[0], iterations[1] + 1, iterations[2])
    y = p2p.results
    print("X:")
    print(x)
    print("Y:")
    print(y)
    save(f"OLD_results/cl_iterations_iter_{iterations[1]}", (x, y))
    labels = {'x': "Iterations", 'y': "Cost", 'title': "Iterations X Cost"}
    plots.iterations(x, y, labels)
Ejemplo n.º 3
0
def byz_metrics():
    # Data unbalancedness
    x = np.arange(0, 1.04, 0.04)
    a = [20.97522270704118, 21.217673384474576, 20.876454631019296, 21.07826777996672, 21.349780447038157,
         21.393942478768825, 21.532803030065228, 22.657205686154647, 22.29665183020926, 23.027300121823586,
         22.815068476884967, 25.73017628615337, 23.944403332203606, 24.04844875188948, 23.31627348386516,
         25.974550614373058, 23.780625897295366, 24.44231845263808, 25.896590814062524, 24.464179877845577,
         24.831684693585732, 25.468797224581778, 25.438998259407548, 26.469847090726958, 24.18340451113953,
         26.848631982188593]
    b = [20.834537506078362, 21.433614423111706, 22.0139343141721, 22.74375941314483, 22.905046101844015,
         23.129726106403552, 23.35440611096309, 23.918826666255757, 25.65756239676712, 26.420161782205664,
         27.000853838280477, 24.592526709428405, 26.60440273322162, 30.25508491601425, 29.988037864725424,
         26.338992220441718, 28.337370576934077, 29.428795411552308, 29.621630315260447, 30.64375491902934,
         27.363785140923586, 32.34280675793754, 27.237769001749943, 25.916013654476387, 27.107113341901684,
         30.72455463578075]
    c = [21.40099975594494, 22.381069892432663, 22.0139343141721, 21.537095279233423, 22.649897579106515,
         22.535125479758758, 22.144784384827553, 23.30744757975546, 24.50233010582157, 24.183163729342905,
         23.967503769894346, 24.251919436403554, 24.71006291947645, 24.510613321449696, 24.499526616436576,
         25.366167460744677, 24.8212616425998, 24.599566315818226, 23.148352572042786, 25.52913222563778,
         25.586813645708357, 25.824018236341036, 27.8224110516684, 26.550603292850663, 27.892855528971932,
         25.531583204434444]

    # Graph sparsity
    a1 = [15.686966777270044, 16.010875259847474, 19.316491140967777, 21.57121087522782, 22.47766401440054,
          22.842883932879676, 27.634236873516045, 28.35572477967493, 30.766254622484155, 31.78035720014011,
          31.52420093188252, 34.76508207143912, 35.49499611906782, 35.36667615833082, 38.331093734563865,
          40.10024479773742, 40.35297210086058, 39.86334952207632, 42.41329244243512, 43.13970940637516,
          42.10771534859562, 44.11206701013951, 42.27749177807368, 42.538003148177026, 42.94904806697376,
          42.4739993299398, ]
    b2 = [15.344067528659018, 14.703704230041932, 18.101967916102446, 23.790547395495615, 26.218572077454148,
          28.431676973387724, 32.455284586574635, 39.691267270606566, 38.15690411263445, 43.581695474951275,
          43.71638207876443, 48.148541429589145, 44.916286537338856, 48.4026884300349, 53.69972405076261,
          52.71665533661079, 59.408636208264234, 60.03378808584248, 64.64492768338604, 63.260399932357075,
          65.33017469288968, 65.77508494973269, 64.47297896379644, 63.371741518683294, 66.13253708221916,
          62.58157533473175, ]
    c3 = [15.707690213668181, 15.098622551213772, 18.579555214862864, 20.923859528130784, 22.50205320014141,
          24.40904152186545, 27.417010791385295, 30.218323560555007, 31.990325279434156, 35.02001299334454,
          33.555830402865034, 36.67699782981615, 37.96788150302173, 38.06627913014085, 41.08966669323188,
          42.55031007470284, 44.68463634463381, 45.15483678247006, 44.823045978069224, 46.43927294489728,
          45.638875187028745, 47.2930852687084, 46.58889430287625, 45.95703884879189, 44.427697092408366,
          45.5605535372266, ]

    f = [14.879170795699618, 15.144682579117047, 14.990330106270667, 15.182636406344445, 14.944507067911577,
         15.397161710368124, 16.00506915433506, 14.234978263497363, 15.963074843091158, 16.345626592844443,
         15.63737320615790]
    c = [14.290162621326573, 16.11264965334899, 16.642167400277998, 16.829890009970676, 17.122654607689057,
         20.51235593842737, 16.561278432687946, 21.58827265876344, 17.02554882177909, 22.090620759919176,
         19.31665368788403]
    n = [14.570599684609773, 14.86373844206828, 15.101173542879739, 14.153226244863541, 14.313678725978708,
         16.73495917528062, 15.219717354289013, 15.439753880550942, 15.101713512852335, 14.503741257420867,
         15.156022787187679]
    x = np.arange(0, 1.01, 0.1)
    save("data_unbalancedness_50_NCF", (x, n, c, f))

    x, a, b, c = load("data_unbalancedness_50_NCF")

    a = np.poly1d(np.polyfit(x, a, 5))(x)
    b = np.poly1d(np.polyfit(x, b, 5))(x)
    c = np.poly1d(np.polyfit(x, c, 5))(x)

    plt.figure()
    plt.plot(x, np.squeeze(a), '-.', label=f"MP without confidence")
    plt.plot(x, np.squeeze(b), '--', label=f"MP with confidence")
    plt.plot(x, np.squeeze(c), '-', label=f"MP with contribution factor")
    plt.ylabel("Total loss")
    plt.xlabel("Graph density")
    # plt.xlabel("Width $\epsilon$")
    plt.legend(loc='best', shadow=True)
    plt.show()
Ejemplo n.º 4
0
def experiment_mp_unbalancedness(model, config, network, dataset, confidence, analysis):
    """Model Propagation / Data unbalancedness x Cost"""

    p2p = PeerNet(config)

    # Setup the P2P network and the communication between neighbors
    p2p.network(network).init()

    # Train the model using one of the approaches: "MP", "CL" or "LL"
    sharedVars.STOP_CONDITION = analysis['SC']
    a = p2p.epsilon
    while p2p.epsilon <= 1:
        print(f"SWITCH: {p2p.epsilon}")
        start_time = time.time()
        p2p.load_dataset(f"./datasets/{dataset[0]}", df=dataset[1], min_samples=dataset[2], sep=dataset[3],
                         data_distribution="uniform")
        show_results = p2p.epsilon == 1
        p2p.train(
            model=model,
            pre=dataset[4],
            algorithm="MP",
            params={'confidence': confidence, 'debug': False, 'show_results': False},
            analysis=analysis['type']
        )
        print(f"\nSWITCH done in {time.time() - start_time} seconds")

    b = p2p.epsilon
    # Plotting
    x = np.arange(0, 1 + sharedVars.EPSILON_STEP, sharedVars.EPSILON_STEP)
    y = p2p.results
    save(f"OLD_results/mp_epsilon_{analysis['SC']}_{confidence}", (x, y))

    # Train the model using one of the approaches: "MP", "CL" or "LL"
    sharedVars.STOP_CONDITION = analysis['SC']

    # new
    p2p.epsilon = 0.0
    p2p.results = []
    a = p2p.epsilon
    while p2p.epsilon <= 1:
        print(f"SWITCH: {p2p.epsilon}")
        start_time = time.time()
        p2p.load_dataset(f"./datasets/{dataset[0]}", df=dataset[1], min_samples=dataset[2], sep=dataset[3],
                         data_distribution="uniform")
        show_results = p2p.epsilon == 1
        p2p.train(
            model=model,
            pre=dataset[4],
            algorithm="MP",
            params={'confidence': not confidence, 'debug': False, 'show_results': False},
            analysis=analysis['type']
        )
        print(f"\nSWITCH done in {time.time() - start_time} seconds")

    b = p2p.epsilon
    # Plotting
    x = np.arange(0, 1 + sharedVars.EPSILON_STEP, sharedVars.EPSILON_STEP)
    y = p2p.results
    save(f"OLD_results/mp_epsilon_{analysis['SC']}_{not confidence}", (x, y))
    # plots
    fileA = f"./results/mp_epsilon_{analysis['SC']}_{confidence}"
    fileB = f"./results/mp_epsilon_{analysis['SC']}_{not confidence}"
    info = {
        'xlabel': "Width ε",
        'ylabel': "Cost",
        'title': "MP with and without confidence w.r.t. data unbalancednesss."
    }
    plots.file_mp_iter(fileA, fileB, info)
Ejemplo n.º 5
0
def experiment_mp_iter(model, config, network, dataset, confidence, analysis):
    """Model Propagation / Iterations x Cost"""

    # init PeerNet with a configuration option
    p2p = PeerNet(config)

    # Setup the P2P network and the communication between neighbors
    p2p.network(network).init()

    # Load and randomly distribute training samples between nodes
    p2p.load_dataset(f"./datasets/{dataset[0]}", df=dataset[1], min_samples=dataset[2], sep=dataset[3])

    start_time = time.time()
    iterations = analysis['iterations']
    sharedVars.STOP_CONDITION = iterations[1]

    p2p.train(
        model=model,
        pre=dataset[4],
        algorithm="MP",
        params={'confidence': confidence, 'debug': False, 'show_results': False},
        analysis=analysis['type']
    )
    print(f"\nSWITCH {sharedVars.STOP_CONDITION} done in {time.time() - start_time} seconds")
    # Plotting
    x = range(iterations[0], iterations[1] + 1, iterations[2])
    y = p2p.results
    # print(y)
    save(f"OLD_results/mp_iterations_{confidence}", (x, y))
    labels = {'x': "Iterations", 'y': "Cost", 'title': "Iterations X Cost"}
    # plots.iterations(x, y, labels)
    # ddd
    start_time = time.time()
    iterations = analysis['iterations']
    sharedVars.STOP_CONDITION = iterations[1]

    p2p.train(
        model=model,
        pre=dataset[4],
        algorithm="MP",
        params={'confidence': not confidence, 'debug': False, 'show_results': False},
        analysis=analysis['type']
    )
    print(f"\nSWITCH {sharedVars.STOP_CONDITION} done in {time.time() - start_time} seconds")
    # Plotting
    x = range(iterations[0], iterations[1] + 1, iterations[2])
    y = p2p.results
    # print(y)
    save(f"OLD_results/mp_iterations_{not confidence}", (x, y))
    labels = {'x': "Iterations", 'y': "Cost", 'title': "Iterations X Cost"}
    # plots.iterations(x, y, labels)

    fileA = "./results/mp_iterations_True"
    fileB = "./results/mp_iterations_False"
    info = {
        'xlabel': "Iterations",
        # 'ylabel': "Test accuracy",
        'ylabel': "Test cost",
        'title': "MP with and without confidence w.r.t.the number of iterations."
    }
    plots.file_mp_iter(fileA, fileB, info)