示例#1
0
        reduce_radius_after=20,

        # 2 Means that neighbour neurons will have high learning
        # rates during the first iterations
        std=2,
        # Defines a rate at which parameter `std` will be reduced.
        # Reduction is monotonic and reduces after each epoch.
        # In 50 epochs std = 2 / 2 = 1 and after 100 epochs
        # std = 2 / 3 and so on.
        reduce_std_after=50,

        # Step (or learning rate)
        step=0.3,
        # Defines a rate at which parameter `step` will reduced.
        # Reduction is monotonic and reduces after each epoch.
        # In 50 epochs step = 0.3 / 2 = 0.15 and after 100 epochs
        # std = 0.3 / 3 = 0.1 and so on.
        reduce_step_after=50,
    )
    sofm.train(data, epochs=20)

    red, blue = ('#E24A33', '#348ABD')

    plt.scatter(*data.T, color=blue)
    plt.scatter(*sofm.weight, color=red)

    weights = sofm.weight.reshape((2, GRID_HEIGHT, GRID_WIDTH))
    plot_2d_grid(weights, color=red)

    plt.show()
示例#2
0
    n_columns = len(configurations)

    plt.figure(figsize=(12, 5))

    for index, conf in enumerate(configurations, start=1):
        sofm = algorithms.SOFM(
            n_inputs=2,
            features_grid=(GRID_HEIGHT, GRID_WIDTH),
            verbose=True,
            shuffle_data=True,
            grid_type=conf['grid_type'],
            learning_radius=8,
            reduce_radius_after=5,
            std=2,
            reduce_std_after=5,
            step=0.3,
            reduce_step_after=5,
        )
        sofm.train(data, epochs=40)

        plt.subplot(1, n_columns, index)

        plt.title(conf['title'])
        plt.scatter(*data.T, color=blue, alpha=0.05)
        plt.scatter(*sofm.weight, color=red)

        weights = sofm.weight.reshape((2, GRID_HEIGHT, GRID_WIDTH))
        plot_2d_grid(weights, color=red, hexagon=conf['use_hexagon_grid'])

    plt.show()