コード例 #1
0
ファイル: test_schedule.py プロジェクト: rupertsmall/neon
def test_exp_schedule(backend):
    """
    Test exponential learning rate schedule
    """
    lr_init = 0.1
    decay = 0.01
    sch = ExpSchedule(decay)
    for epoch in range(10):
        lr = sch.get_learning_rate(learning_rate=lr_init, epoch=epoch)
        assert np.allclose(lr, lr_init / (1. + decay * epoch))
コード例 #2
0
def test_exp_schedule(backend):
    """
    Test exponential learning rate schedule
    """
    lr_init = 0.1
    decay = 0.01
    sch = ExpSchedule(decay)
    for epoch in range(10):
        lr = sch.get_learning_rate(learning_rate=lr_init, epoch=epoch)
        assert np.allclose(lr, lr_init / (1. + decay * epoch))
コード例 #3
0
    def benchmark(self):
        for d in self.devices:
            b = d if (self.backends is None) or (
                "mkl" not in self.backends) else "mkl"
            print("Use {} as backend.".format(b))

            # Common suffix
            suffix = "neon_{}_{}_{}by{}_{}".format(b, self.dataset,
                                                   self.resize_size[0],
                                                   self.resize_size[1],
                                                   self.preprocessing)

            # Set up backend
            # backend: 'cpu' for single cpu, 'mkl' for cpu using mkl library, and 'gpu' for gpu
            be = gen_backend(backend=b,
                             batch_size=self.batch_size,
                             rng_seed=542,
                             datatype=np.float32)

            # Prepare training/validation/testing sets
            neon_train_set = ArrayIterator(X=np.asarray(
                [t.flatten().astype('float32') / 255 for t in self.x_train]),
                                           y=np.asarray(self.y_train),
                                           make_onehot=True,
                                           nclass=self.class_num,
                                           lshape=(3, self.resize_size[0],
                                                   self.resize_size[1]))
            neon_valid_set = ArrayIterator(X=np.asarray(
                [t.flatten().astype('float32') / 255 for t in self.x_valid]),
                                           y=np.asarray(self.y_valid),
                                           make_onehot=True,
                                           nclass=self.class_num,
                                           lshape=(3, self.resize_size[0],
                                                   self.resize_size[1]))
            neon_test_set = ArrayIterator(X=np.asarray([
                t.flatten().astype('float32') / 255 for t in self.testImages
            ]),
                                          y=np.asarray(self.testLabels),
                                          make_onehot=True,
                                          nclass=self.class_num,
                                          lshape=(3, self.resize_size[0],
                                                  self.resize_size[1]))

            # Initialize model object
            self.neon_model = SelfModel(layers=self.constructCNN())

            # Costs
            neon_cost = GeneralizedCost(costfunc=CrossEntropyMulti())

            # Model summary
            self.neon_model.initialize(neon_train_set, neon_cost)
            print(self.neon_model)

            # Learning rules
            neon_optimizer = SGD(0.01,
                                 momentum_coef=0.9,
                                 schedule=ExpSchedule(0.2))
            # neon_optimizer = RMSProp(learning_rate=0.0001, decay_rate=0.95)

            # # Benchmark for 20 minibatches
            # d[b] = self.neon_model.benchmark(neon_train_set, cost=neon_cost, optimizer=neon_optimizer)

            # Reset model
            # self.neon_model = None
            # self.neon_model = Model(layers=layers)
            # self.neon_model.initialize(neon_train_set, neon_cost)

            # Callbacks: validate on validation set
            callbacks = Callbacks(
                self.neon_model,
                eval_set=neon_valid_set,
                metric=Misclassification(3),
                output_file="./saved_data/{}/{}/callback_data_{}.h5".format(
                    self.network_type, d, suffix))
            callbacks.add_callback(
                SelfCallback(eval_set=neon_valid_set,
                             test_set=neon_test_set,
                             epoch_freq=1))

            # Fit
            start = time.time()
            self.neon_model.fit(neon_train_set,
                                optimizer=neon_optimizer,
                                num_epochs=self.epoch_num,
                                cost=neon_cost,
                                callbacks=callbacks)
            print("Neon training finishes in {:.2f} seconds.".format(
                time.time() - start))

            # Result
            # results = self.neon_model.get_outputs(neon_valid_set)

            # Print error on validation set
            start = time.time()
            neon_error_mis = self.neon_model.eval(
                neon_valid_set, metric=Misclassification()) * 100
            print(
                'Misclassification error = {:.1f}%. Finished in {:.2f} seconds.'
                .format(neon_error_mis[0],
                        time.time() - start))

            # start = time.time()
            # neon_error_top3 = self.neon_model.eval(neon_valid_set, metric=TopKMisclassification(3))*100
            # print('Top 3 Misclassification error = {:.1f}%. Finished in {:.2f} seconds.'.format(neon_error_top3[2], time.time() - start))

            # start = time.time()
            # neon_error_top5 = self.neon_model.eval(neon_valid_set, metric=TopKMisclassification(5))*100
            # print('Top 5 Misclassification error = {:.1f}%. Finished in {:.2f} seconds.'.format(neon_error_top5[2], time.time() - start))

            self.neon_model.save_params("./saved_models/{}/{}/{}.prm".format(
                self.network_type, d, suffix))

            # Print error on test set
            start = time.time()
            neon_error_mis_t = self.neon_model.eval(
                neon_test_set, metric=Misclassification()) * 100
            print(
                'Misclassification error = {:.1f}% on test set. Finished in {:.2f} seconds.'
                .format(neon_error_mis_t[0],
                        time.time() - start))

            # start = time.time()
            # neon_error_top3_t = self.neon_model.eval(neon_test_set, metric=TopKMisclassification(3))*100
            # print('Top 3 Misclassification error = {:.1f}% on test set. Finished in {:.2f} seconds.'.format(neon_error_top3_t[2], time.time() - start))

            # start = time.time()
            # neon_error_top5_t = self.neon_model.eval(neon_test_set, metric=TopKMisclassification(5))*100
            # print('Top 5 Misclassification error = {:.1f}% on test set. Finished in {:.2f} seconds.'.format(neon_error_top5_t[2], time.time() - start))

            cleanup_backend()
            self.neon_model = None
コード例 #4
0
ファイル: emoptimizers.py プロジェクト: erjel/emdrp
 def get_learning_rate(self, learning_rate, epoch):
     return ExpSchedule.get_learning_rate(self, learning_rate,
                                          round_to(epoch, self.epoch_freq))