def test_convert_orthonormal_polynomials_to_monomials_1d(self):
        """
        Example: orthonormal Hermite polynomials
        deg  monomial coeffs
        0    [1,0,0]
        1    [0,1,0]         1/1*((x-0)*1-1*0)=x
        2    [1/c,0,1/c]     1/c*((x-0)*x-1*1)=(x**2-1)/c,             c=sqrt(2)
        3    [0,-3/d,0,1/d]  1/d*((x-0)*(x**2-1)/c-c*x)=
                             1/(c*d)*(x**3-x-c**2*x)=(x**3-3*x)/(c*d), d=sqrt(3)
        """
        rho = 0.
        degree = 10
        probability_measure = True
        ab = hermite_recurrence(degree + 1,
                                rho,
                                probability=probability_measure)

        basis_mono_coefs = convert_orthonormal_polynomials_to_monomials_1d(
            ab, 4)

        true_basis_mono_coefs = np.zeros((5, 5))
        true_basis_mono_coefs[0, 0] = 1
        true_basis_mono_coefs[1, 1] = 1
        true_basis_mono_coefs[2, [0, 2]] = -1 / np.sqrt(2), 1 / np.sqrt(2)
        true_basis_mono_coefs[3, [1, 3]] = -3 / np.sqrt(6), 1 / np.sqrt(6)
        true_basis_mono_coefs[4,
                              [0, 2, 4]] = np.array([3, -6, 1]) / np.sqrt(24)

        assert np.allclose(basis_mono_coefs, true_basis_mono_coefs)

        coefs = np.ones(degree + 1)
        basis_mono_coefs = convert_orthonormal_polynomials_to_monomials_1d(
            ab, degree)
        mono_coefs = np.sum(basis_mono_coefs * coefs, axis=0)

        x = np.linspace(-3, 3, 5)
        p_ortho = evaluate_orthonormal_polynomial_1d(x, degree, ab)
        ortho_vals = p_ortho.dot(coefs)

        from pyapprox.monomial import evaluate_monomial
        mono_vals = evaluate_monomial(
            np.arange(degree + 1)[np.newaxis, :], mono_coefs,
            x[np.newaxis, :])[:, 0]
        assert np.allclose(ortho_vals, mono_vals)
    def test_least_squares_regression(self):
        """
        Use non-linear least squares to estimate the coefficients of the
        function train approximation of a rank-2 bivariate function.
        """
        alpha = 0
        beta = 0
        degree = 5
        num_vars = 3
        rank = 2
        num_samples = 100
        recursion_coeffs = jacobi_recurrence(degree + 1,
                                             alpha=alpha,
                                             beta=beta,
                                             probability=True)

        ranks = np.ones((num_vars + 1), dtype=int)
        ranks[1:-1] = rank

        def function(samples):
            return np.cos(samples.sum(axis=0))[:, np.newaxis]

        samples = np.random.uniform(-1, 1, (num_vars, num_samples))
        values = function(samples)
        assert values.shape[0] == num_samples

        linear_ft_data = ft_linear_least_squares_regression(samples,
                                                            values,
                                                            degree,
                                                            perturb=None)

        initial_guess = linear_ft_data[1].copy()

        # test jacobian
        # residual_func = partial(
        #    least_squares_residual,samples,values,linear_ft_data,
        #    recursion_coeffs)
        #
        # jacobian = least_squares_jacobian(
        #     samples,values,linear_ft_data,recursion_coeffs,initial_guess)
        # finite difference is expensive check on subset of points
        # for ii in range(2):
        #    func = lambda x: residual_func(x)[ii]
        #    assert np.allclose(
        #        scipy.optimize.approx_fprime(initial_guess, func, 1e-7),
        #        jacobian[ii,:])

        lstsq_ft_params = ft_non_linear_least_squares_regression(
            samples, values, linear_ft_data, recursion_coeffs, initial_guess)
        lstsq_ft_data = copy.deepcopy(linear_ft_data)
        lstsq_ft_data[1] = lstsq_ft_params

        num_valid_samples = 100
        validation_samples = np.random.uniform(-1., 1.,
                                               (num_vars, num_valid_samples))
        validation_values = function(validation_samples)

        ft_validation_values = evaluate_function_train(validation_samples,
                                                       lstsq_ft_data,
                                                       recursion_coeffs)
        ft_error = np.linalg.norm(validation_values - ft_validation_values
                                  ) / np.sqrt(num_valid_samples)
        assert ft_error < 1e-3, ft_error

        # compare against tensor-product linear least squares
        from pyapprox.monomial import monomial_basis_matrix, evaluate_monomial
        from pyapprox.indexing import tensor_product_indices
        indices = tensor_product_indices([degree] * num_vars)
        basis_matrix = monomial_basis_matrix(indices, samples)
        coef = np.linalg.lstsq(basis_matrix, values, rcond=None)[0]
        monomial_validation_values = evaluate_monomial(indices, coef,
                                                       validation_samples)
        monomial_error = np.linalg.norm(validation_values -
                                        monomial_validation_values) / np.sqrt(
                                            num_valid_samples)
        assert ft_error < monomial_error