Esempio n. 1
0
    def fit(self, x, t):
        self.w = np.zeros([x.shape[1]*t.shape[1]])

        if self.solver == 'gradient':
            solver.gradient_descent(self, x, t, self.alpha, self.e, self.verbose)
        elif self.solver == 'newton':
            solver.newton(self, x, t, self.e, self.verbose)
Esempio n. 2
0
def find_oscillator_eigenvalue(e0):
    """Finding the eigenvalue for harmonic oscillator potential by iterative gradient descent method.

    Author
    ------
    Paweł Czyż, Date: 01/05/2018

    Parameters
    ----------
    e0 : float
        positive number being an initial guess of the eigenvalue

    Returns
    -------
    float
        the eigenvalue near the initial guess of the system with harmonic oscillator

    Example
    -------
    >>> print(find_oscillator_eigenvalue(1.2))
    0.98
    >>> print(find_oscillator_eigenvalue(3.4))
    3.0
    """
    n = int(0.5 * (e0 - 1))

    def loss(et):
        x, y = solve_numerical_method(0.05, 5, n, et)
        return (y[-1] / 100)**2

    e_found, _ = gradient_descent(loss, e0, step=2e-5, dx=0.05, n_steps=200)
    return e_found
Esempio n. 3
0
    def test_basic(self):
        """Test if a minimum of a square function is found."""
        xm, vm = gradient_descent(self.loss, 0.2)

        with self.subTest("Point"):
            self.assertLess(abs(xm), 0.05)
        with self.subTest("Value"):
            self.assertEqual(vm, self.loss(xm))
Esempio n. 4
0
    def fit(self, y, X):
        """
        Finds weights to fit the data to the model

        :param y: answers
        :param X: data
        """

        # dimensions
        n, d = X.shape

        # initial weight vector
        self.w = np.zeros(d)

        # fit weights
        self.w, f = solver.gradient_descent(self.function_object, self.w, self.max_evaluations,
                                 self.verbose, y, X)
Esempio n. 5
0
 def test_right_minimum(self):
     x, _ = gradient_descent(self.loss, 8, step=0.001, n_steps=40)
     self.assertLess(abs(x - 10), 0.05)
Esempio n. 6
0
 def test_left_minimum(self):
     x, _ = gradient_descent(self.loss, 2, step=0.001, n_steps=50)
     self.assertLess(abs(x), 0.05)
Esempio n. 7
0
    def test_far_away(self):
        """Start from value lying very far from the global maximum."""
        xm, _ = gradient_descent(self.loss, 200, n_steps=40)

        with self.subTest("Point"):
            self.assertLess(abs(xm), 0.05)