Пример #1
0
            FullyConnected(size=10, activation=None, last_layer=True)
        ]

        model = Model(
            layers=layers,
            num_classes=10,
            optimizer=GDMomentumOptimizer(lr=1e-3, mu=0.9),
        )

        print("\n\n------------------------------------")

        print("Initialize: {}".format(initializer))

        print("\nRun training:\n------------------------------------")

        stats = model.train(data_set=data, method='dfa', num_passes=num_passes, batch_size=50)
        loss, accuracy = model.cost(*data.test_set())

        print("\nResult:\n------------------------------------")
        print('loss on test set: {}'.format(loss))
        print('accuracy on test set: {}'.format(accuracy))

        statistics.append(stats)

    plt.title('Loss')
    plt.xlabel('epoch')
    plt.ylabel('loss')
    for stats in statistics:
        train_loss = scipy.ndimage.filters.gaussian_filter1d(stats['train_loss'], sigma=10)
        plt.plot(np.arange(len(stats['train_loss'])), train_loss)
    plt.legend(labels, loc='upper right')
Пример #2
0
    ]

    # -------------------------------------------------------
    # Train with BP
    # -------------------------------------------------------

    model = Model(
        layers=layers,
        num_classes=10,
        optimizer=GDMomentumOptimizer(lr=1e-3, mu=0.9),
    )

    print("\nRun training:\n------------------------------------")

    stats_shallow = model.train(data_set=data,
                                method='dfa',
                                num_passes=num_iteration,
                                batch_size=64)
    loss, accuracy = model.cost(*data.test_set())

    print("\nResult:\n------------------------------------")
    print('loss on test set: {}'.format(loss))
    print('accuracy on test set: {}'.format(accuracy))

    print("\nTrain statisistics:\n------------------------------------")

    print("time spend during forward pass: {}".format(
        stats_shallow['forward_time']))
    print("time spend during backward pass: {}".format(
        stats_shallow['backward_time']))
    print("time spend during update pass: {}".format(
        stats_shallow['update_time']))
Пример #3
0
    ]

    # -------------------------------------------------------
    # Train with BP
    # -------------------------------------------------------

    model = Model(layers=layers,
                  num_classes=10,
                  optimizer=GDMomentumOptimizer(lr=1e-2, mu=0.9),
                  lr_decay=0.5,
                  lr_decay_interval=7)

    print("\nRun training:\n------------------------------------")

    stats_bp = model.train(data_set=data,
                           method='bp',
                           num_passes=num_iteration,
                           batch_size=64)
    loss, accuracy = model.cost(*data.test_set())

    print("\nResult:\n------------------------------------")
    print('loss on test set: {}'.format(loss))
    print('accuracy on test set: {}'.format(accuracy))

    print("\nTrain statisistics:\n------------------------------------")

    print("time spend during forward pass: {}".format(
        stats_bp['forward_time']))
    print("time spend during backward pass: {}".format(
        stats_bp['backward_time']))
    print("time spend during update pass: {}".format(stats_bp['update_time']))
    print("time spend in total: {}".format(stats_bp['total_time']))
Пример #4
0
    for depth, num_passes in zip(depths, iterations):
        layers = [ConvToFullyConnected()] + \
                 [FullyConnected(size=240, activation=activation.leaky_relu,
                                 weight_initializer=RandomNormal(sigma=np.sqrt(2.0/240))) for _ in range(depth)] + \
                 [FullyConnected(size=10, activation=None, last_layer=True)]
        """ BP """

        model = Model(layers=layers,
                      num_classes=10,
                      optimizer=GDMomentumOptimizer(lr=1e-2, mu=0.9))

        print("\nRun training:\n------------------------------------")

        stats_bp = model.train(data_set=data,
                               method='bp',
                               num_passes=num_passes,
                               batch_size=64)
        loss, accuracy = model.cost(*data.test_set())

        print("\nResult:\n------------------------------------")
        print('loss on test set: {}'.format(loss))
        print('accuracy on test set: {}'.format(accuracy))

        print("\nTrain statisistics:\n------------------------------------")

        print("time spend during forward pass: {}".format(
            stats_bp['forward_time']))
        print("time spend during backward pass: {}".format(
            stats_bp['backward_time']))
        print("time spend during update pass: {}".format(
            stats_bp['update_time']))
Пример #5
0
def train():
    with tf.device('/GPU:0'):
        model = Model("data.txt", max_size)
        model.load()
        model.train(50)
Пример #6
0
            layers.append(
                FullyConnected(size=10, activation=None, last_layer=True))

            model = Model(
                layers=layers,
                num_classes=10,
                optimizer=GDMomentumOptimizer(lr=1e-3, mu=0.9),
                regularization=0.001,
                # lr_decay=0.5,
                # lr_decay_interval=100
            )

            print("\nRun training:\n------------------------------------")

            stats = model.train(data_set=data,
                                method=train_method,
                                num_passes=3,
                                batch_size=50)
            loss, accuracy = model.cost(*data.test_set())

            print("\nResult:\n------------------------------------")
            print('loss on test set: {}'.format(loss))
            print('accuracy on test set: {}'.format(accuracy))

            statistics.append(stats)
            labels.append('{}, {}'.format(train_method, initializer))

    plt.title('Loss function')
    plt.xlabel('epoch')
    plt.ylabel('loss')
    legends = []
    for stats, label in zip(statistics, labels):