def get_discrete_univariate_leja_quadrature_rule(variable, growth_rule):
    var_type, __, shapes = get_distribution_info(variable)
    if var_type == 'binom':
        num_trials = variable_parameters['num_trials']
        prob_success = variable_parameters['prob_success']

        def generate_candidate_samples(num_samples):
            assert num_samples == num_trials + 1
            return np.arange(0, num_trials + 1)[np.newaxis, :]

        recursion_coeffs = krawtchouk_recurrence(num_trials,
                                                 num_trials,
                                                 probability=True)
        quad_rule = partial(candidate_based_christoffel_leja_rule_1d,
                            recursion_coeffs,
                            generate_candidate_samples,
                            num_trials + 1,
                            growth_rule=growth_rule,
                            initial_points=np.atleast_2d([
                                binomial_rv.ppf(0.5, num_trials, prob_success)
                            ]))
    elif var_type == 'float_rv_discrete' or var_type == 'discrete_chebyshev':
        from pyapprox.numerically_generate_orthonormal_polynomials_1d import \
            modified_chebyshev_orthonormal
        from pyapprox.orthonormal_polynomials_1d import \
            discrete_chebyshev_recurrence
        nmasses = shapes['xk'].shape[0]
        if var_type == 'discrete_chebyshev':
            xk = shapes['xk']  #do not map discrete_chebyshev
            assert np.allclose(shapes['xk'], np.arange(nmasses))
            assert np.allclose(shapes['pk'], np.ones(nmasses) / nmasses)
            num_coefs = nmasses
            recursion_coeffs = discrete_chebyshev_recurrence(
                num_coefs, nmasses)
        else:
            #shapes['xk'] will be in [0,1] but canonical domain is [-1,1]
            xk = shapes['xk'] * 2 - 1
            num_coefs = nmasses
            recursion_coeffs = modified_chebyshev_orthonormal(
                num_coefs, [xk, shapes['pk']])

        def generate_candidate_samples(num_samples):
            assert num_samples == nmasses
            return xk[np.newaxis, :]

        # do not specify init_samples in partial or a sparse grid cannot
        # update the samples_1d so that next level has same samples_1d
        # TODO: add test that samples_1d[ii] and samples_1d[ii+1] subset
        # are equal
        #init_samples = np.atleast_2d(np.sort(xk)[nmasses//2])
        quad_rule = partial(candidate_based_christoffel_leja_rule_1d,
                            recursion_coeffs,
                            generate_candidate_samples,
                            nmasses,
                            growth_rule=growth_rule)
    else:
        raise Exception('var_type %s not implemented' % var_type)
    return quad_rule
示例#2
0
 def get_recursion_coefficients(self, opts, num_coefs):
     poly_type = opts.get('poly_type', None)
     var_type = None
     if poly_type is None:
         var_type = opts['rv_type']
     if poly_type == 'legendre' or var_type == 'uniform':
         recursion_coeffs = jacobi_recurrence(num_coefs,
                                              alpha=0,
                                              beta=0,
                                              probability=True)
     elif poly_type == 'jacobi' or var_type == 'beta':
         if poly_type is not None:
             alpha_poly, beta_poly = opts['alpha_poly'], opts['beta_poly']
         else:
             alpha_poly, beta_poly = opts['shapes']['b'] - 1, opts[
                 'shapes']['a'] - 1
         recursion_coeffs = jacobi_recurrence(num_coefs,
                                              alpha=alpha_poly,
                                              beta=beta_poly,
                                              probability=True)
     elif poly_type == 'hermite' or var_type == 'norm':
         recursion_coeffs = hermite_recurrence(num_coefs,
                                               rho=0.,
                                               probability=True)
     elif poly_type == 'krawtchouk' or var_type == 'binom':
         if poly_type is None:
             opts = opts['shapes']
         n, p = opts['n'], opts['p']
         num_coefs = min(num_coefs, n)
         recursion_coeffs = krawtchouk_recurrence(num_coefs, n, p)
     elif poly_type == 'hahn' or var_type == 'hypergeom':
         if poly_type is not None:
             apoly, bpoly = opts['alpha_poly'], opts['beta_poly']
             N = opts['N']
         else:
             M, n, N = [opts['shapes'][key] for key in ['M', 'n', 'N']]
             apoly, bpoly = -(n + 1), -M - 1 + n
         num_coefs = min(num_coefs, N)
         recursion_coeffs = hahn_recurrence(num_coefs, N, apoly, bpoly)
     elif poly_type == 'discrete_chebyshev' or var_type == 'discrete_chebyshev':
         if poly_type is not None:
             N = opts['N']
         else:
             N = opts['shapes']['xk'].shape[0]
             assert np.allclose(opts['shapes']['xk'], np.arange(N))
             assert np.allclose(opts['shapes']['pk'], np.ones(N) / N)
         num_coefs = min(num_coefs, N)
         recursion_coeffs = discrete_chebyshev_recurrence(num_coefs, N)
     elif poly_type == 'discrete_numeric' or var_type == 'float_rv_discrete':
         if poly_type is None:
             opts = opts['shapes']
         xk, pk = opts['xk'], opts['pk']
         #shapes['xk'] will be in [0,1] but canonical domain is [-1,1]
         xk = xk * 2 - 1
         assert xk.min() >= -1 and xk.max() <= 1
         if num_coefs > xk.shape[0]:
             msg = 'Number of coefs requested is larger than number of '
             msg += 'probability masses'
             raise Exception(msg)
         recursion_coeffs = modified_chebyshev_orthonormal(
             num_coefs, [xk, pk])
         p = evaluate_orthonormal_polynomial_1d(np.asarray(xk, dtype=float),
                                                num_coefs - 1,
                                                recursion_coeffs)
         error = np.absolute((p.T * pk).dot(p) - np.eye(num_coefs)).max()
         if error > self.numerically_generated_poly_accuracy_tolerance:
             msg = f'basis created is ill conditioned. '
             msg += f'Max error: {error}. Max terms: {xk.shape[0]}, '
             msg += f'Terms requested: {num_coefs}'
             raise Exception(msg)
     elif poly_type == 'monomial':
         recursion_coeffs = None
     else:
         if poly_type is not None:
             raise Exception('poly_type (%s) not supported' % poly_type)
         else:
             raise Exception('var_type (%s) not supported' % var_type)
     return recursion_coeffs
示例#3
0
def get_recursion_coefficients(
        opts,
        num_coefs,
        numerically_generated_poly_accuracy_tolerance=1e-12):
    """
    Parameters
    ----------
    num_coefs : interger
        The number of recursion coefficients desired

    numerically_generated_poly_accuracy_tolerance : float
            Tolerance used to construct any numerically generated polynomial
            basis functions.

    opts : dictionary
        Dictionary with the following attributes

    rv_type : string
        The type of variable associated with the polynomial. If poly_type
        is not provided then the recursion coefficients chosen is selected
        using the Askey scheme. E.g. uniform -> legendre, norm -> hermite.
        rv_type is assumed to be the name of the distribution of scipy.stats
        variables, e.g. for gaussian rv_type = norm(0, 1).dist

    poly_type : string
        The type of polynomial which overides rv_type. Supported types
        ['legendre', 'hermite', 'jacobi', 'krawtchouk', 'hahn',
        'discrete_chebyshev', 'discrete_numeric', 'continuous_numeric',
        'function_indpnt_vars', 'product_indpnt_vars', 'monomial']
        Note 'monomial' does not produce an orthogonal basis

    The remaining options are specific to rv_type and poly_type. See
     - :func:`pyapprox.univariate_quadrature.get_jacobi_recursion_coefficients`
     - :func:`pyapprox.univariate_quadrature.get_function_independent_vars_recursion_coefficients`
     - :func:`pyapprox.univariate_quadrature.get_product_independent_vars_recursion_coefficients`
    
        Note Legendre is just a special instance of a Jacobi polynomial with
        alpha_poly, beta_poly = 0, 0 and alpha_stat, beta_stat = 1, 1

    Returns
    -------
    recursion_coeffs : np.ndarray (num_coefs, 2)
    """

    # variables that require numerically generated polynomials with
    # predictor corrector method
    from scipy import stats
    from scipy.stats import _continuous_distns

    poly_type = opts.get('poly_type', None)
    var_type = None
    if poly_type is None:
        var_type = opts['rv_type']
    if poly_type == 'legendre' or var_type == 'uniform':
        recursion_coeffs = jacobi_recurrence(
            num_coefs, alpha=0, beta=0, probability=True)
    elif poly_type == 'jacobi' or var_type == 'beta':
        recursion_coeffs = get_jacobi_recursion_coefficients(
            poly_type, opts, num_coefs)
    elif poly_type == 'hermite' or var_type == 'norm':
        recursion_coeffs = hermite_recurrence(
            num_coefs, rho=0., probability=True)
    elif poly_type == 'krawtchouk' or var_type == 'binom':
        # although bounded the krwatchouk polynomials are not defined
        # on the canonical domain [-1,1] but rather the user and
        # canconical domain are the same
        if poly_type is None:
            opts = opts['shapes']
        n, p = opts['n'], opts['p']
        num_coefs = min(num_coefs, n)
        recursion_coeffs = krawtchouk_recurrence(
            num_coefs, n, p)
    elif poly_type == 'hahn' or var_type == 'hypergeom':
        # although bounded the hahn polynomials are not defined
        # on the canonical domain [-1,1] but rather the user and
        # canconical domain are the same
        if poly_type is not None:
            apoly, bpoly = opts['alpha_poly'], opts['beta_poly']
            N = opts['N']
        else:
            M, n, N = [opts['shapes'][key] for key in ['M', 'n', 'N']]
            apoly, bpoly = -(n+1), -M-1+n
        num_coefs = min(num_coefs, N)
        recursion_coeffs = hahn_recurrence(num_coefs, N, apoly, bpoly)
        xk = np.arange(max(0, N-M+n), min(n, N)+1, dtype=float)
    elif poly_type == 'discrete_chebyshev' or var_type == 'discrete_chebyshev':
        # although bounded the discrete_chebyshev polynomials are not defined
        # on the canonical domain [-1,1] but rather the user and
        # canconical domain are the same
        if poly_type is not None:
            N = opts['N']
        else:
            N = opts['shapes']['xk'].shape[0]
            assert np.allclose(opts['shapes']['xk'], np.arange(N))
            assert np.allclose(opts['shapes']['pk'], np.ones(N)/N)
        num_coefs = min(num_coefs, N)
        recursion_coeffs = discrete_chebyshev_recurrence(num_coefs, N)
    elif poly_type == 'discrete_numeric' or var_type == 'float_rv_discrete':
        if poly_type is None:
            opts = opts['shapes']
        xk, pk = opts['xk'], opts['pk']
        # shapes['xk'] will be in [0, 1] but canonical domain is [-1, 1]
        xk = xk*2-1
        assert xk.min() >= -1 and xk.max() <= 1
        if num_coefs > xk.shape[0]:
            msg = 'Number of coefs requested is larger than number of '
            msg += 'probability masses'
            raise Exception(msg)
        #recursion_coeffs = modified_chebyshev_orthonormal(num_coefs, [xk, pk])
        recursion_coeffs = lanczos(xk, pk, num_coefs)
        p = evaluate_orthonormal_polynomial_1d(
            np.asarray(xk, dtype=float), num_coefs-1, recursion_coeffs)
        error = np.absolute((p.T*pk).dot(p)-np.eye(num_coefs)).max()
        if error > numerically_generated_poly_accuracy_tolerance:
            msg = f'basis created is ill conditioned. '
            msg += f'Max error: {error}. Max terms: {xk.shape[0]}, '
            msg += f'Terms requested: {num_coefs}'
            raise Exception(msg)
    elif (poly_type == 'continuous_numeric' or
          var_type == 'continuous_rv_sample'):
        if poly_type is None:
            opts = opts['shapes']
        xk, pk = opts['xk'], opts['pk']
        if num_coefs > xk.shape[0]:
            msg = 'Number of coefs requested is larger than number of '
            msg += 'samples'
            raise Exception(msg)
        #print(num_coefs)
        #recursion_coeffs = modified_chebyshev_orthonormal(num_coefs, [xk, pk])
        #recursion_coeffs = lanczos(xk, pk, num_coefs)
        recursion_coeffs = predictor_corrector(
            num_coefs, (xk, pk), xk.min(), xk.max(),
            interval_size=xk.max()-xk.min())
        p = evaluate_orthonormal_polynomial_1d(
            np.asarray(xk, dtype=float), num_coefs-1, recursion_coeffs)
        error = np.absolute((p.T*pk).dot(p)-np.eye(num_coefs)).max()
        if error > numerically_generated_poly_accuracy_tolerance:
            msg = f'basis created is ill conditioned. '
            msg += f'Max error: {error}. Max terms: {xk.shape[0]}, '
            msg += f'Terms requested: {num_coefs}'
            raise Exception(msg)
    elif poly_type == 'monomial':
        recursion_coeffs = None
    elif var_type in _continuous_distns._distn_names:
        quad_options = {
            'nquad_samples': 10,
            'atol': numerically_generated_poly_accuracy_tolerance,
            'rtol': numerically_generated_poly_accuracy_tolerance,
            'max_steps': 10000, 'verbose': 0}
        rv = getattr(stats, var_type)(**opts['shapes'])
        recursion_coeffs = predictor_corrector_known_scipy_pdf(
            num_coefs, rv, quad_options)
    elif poly_type == 'function_indpnt_vars':
        recursion_coeffs = get_function_independent_vars_recursion_coefficients(
            opts, num_coefs)
    elif poly_type == 'product_indpnt_vars':
        recursion_coeffs = get_product_independent_vars_recursion_coefficients(
            opts, num_coefs)
    else:
        if poly_type is not None:
            raise Exception('poly_type (%s) not supported' % poly_type)
        else:
            raise Exception('var_type (%s) not supported' % var_type)
    return recursion_coeffs