Esempio n. 1
0
    def test_initialize(self):
        input_1 = 3
        with self.assertRaises(TypeError):
            Parameter(input_1)

        input_2 = None
        with self.assertRaises(TypeError):
            Parameter(input_2)
Esempio n. 2
0
    def test_step(self):
        # test for value
        param = OrderedDict()
        param['test_1'] = Parameter(np.ones(4))
        param['test_1'].grad = np.ones(4)

        optimizer = SGD(param)
        optimizer.step()

        self.assertTrue(
            (param['test_1'].val == np.ones(4) *
             (1. - 0.0001)).all())  # checking with hand calculation
Esempio n. 3
0
    def test_step(self):
        # test for raising error
        param = OrderedDict()
        param['test_1'] = Parameter(np.ones(4))
        optimizer = Optimizer(param)

        with self.assertRaises(
                NotImplementedError
        ):  # not imeplement the step method of Optimizer
            optimizer.step()
Esempio n. 4
0
    def test_initialize(self):
        # test for raising error
        # invalid alpha
        param = OrderedDict()
        param['test_1'] = Parameter(np.ones(4))

        with self.assertRaises(TypeError):
            SGD(param, alpha=[0.1])  # wrong value of alpha

        with self.assertRaises(ValueError):
            SGD(param, alpha=-0.1)  # wrong type of alpha
Esempio n. 5
0
    def test_step(self):
        # test for value
        param = OrderedDict()
        param['test_1'] = Parameter(np.ones(4))
        param['test_1'].grad = np.ones(4)

        optimizer = MomentumSGD(param)
        optimizer.step()  # first time

        self.assertTrue(
            (param['test_1'].val == np.ones(4) *
             (1. - 0.0001)).all())  # checking with hand calculation

        pre_grad = -np.ones(4) * 0.0001  # omega
        grad = -np.ones(4) * 0.0001 + pre_grad * 0.9  # omega

        val = param['test_1'].val + grad

        optimizer.step()  # second time

        self.assertTrue((param['test_1'].val == val
                         ).all())  # checking with hand calculation
Esempio n. 6
0
    def test_register_parameters(self):

        input_1 = 0.5  # float

        with self.assertRaises(TypeError):
            self.module.register_parameters(input_1)

        input_2 = np.array([0.1])  # numpy.ndarray

        with self.assertRaises(TypeError):
            self.module.register_parameters(input_2)

        input_3 = Parameter(np.zeros(5))
        self.module.register_parameters(input_3)
        self.assertTrue(id(input_3) == id(self.module.parameters['param_0']))
Esempio n. 7
0
    def test_initialize_numerical_gradient(self):
        # test for type(x)
        input_1_param = 3.
        input_1_fn = lambda weight: None

        with self.assertRaises(TypeError):
            numerical_gradient(input_1_param, input_1_fn)

        # test for type(f)
        input_2_param = OrderedDict()
        input_2_param['test_1'] = Parameter(np.ones(4))
        input_2_fn = 4.

        with self.assertRaises(TypeError):
            numerical_gradient(input_2_param, input_2_fn)