Beispiel #1
0
def speech_fourier_3be(ker_sigma, regu, upper_freqs, domain=np.array([[0,
                                                                       1]])):
    # Fourier output basis
    output_basis_params = {
        "lower_freq": 0,
        "upper_freq": upper_freqs,
        "domain": domain
    }
    output_bases = configs_generation.subconfigs_combinations(
        "fourier", output_basis_params, exclude_list=["domain"])
    # Sum of Gaussian kernels
    ker_sigmas = ker_sigma * np.ones(13)
    gauss_kers = [
        kernels.GaussianScalarKernel(sig, normalize=False, normalize_dist=True)
        for sig in ker_sigmas
    ]
    multi_ker = kernels.SumOfScalarKernel(gauss_kers, normalize=False)
    # Generate full configs
    params = {
        "kernel": multi_ker,
        "basis_out": output_bases,
        "regu": regu,
        "center_output": True
    }
    configs = configs_generation.configs_combinations(params)
    # Create list of regressors from that config
    regs = [triple_basis.BiBasisEstimator(**config) for config in configs]
    return configs, regs
Beispiel #2
0
def speech_fpca_3be(ker_sigma,
                    regu,
                    n_fpca,
                    n_evals_fpca,
                    domain=np.array([[0, 1]])):
    # FPCA output basis
    output_basis_params = {
        "n_basis": n_fpca,
        "input_dim": 1,
        "domain": domain,
        "n_evals": n_evals_fpca
    }
    output_bases = configs_generation.subconfigs_combinations(
        "functional_pca", output_basis_params, exclude_list=["domain"])
    # Sum of Gaussian kernels
    ker_sigmas = ker_sigma * np.ones(13)
    gauss_kers = [
        kernels.GaussianScalarKernel(sig, normalize=False, normalize_dist=True)
        for sig in ker_sigmas
    ]
    multi_ker = kernels.SumOfScalarKernel(gauss_kers, normalize=False)
    # Generate full configs
    params = {
        "kernel": multi_ker,
        "basis_out": output_bases,
        "regu": regu,
        "center_output": True
    }
    configs = configs_generation.configs_combinations(params)
    # Create list of regressors from that config
    regs = [triple_basis.BiBasisEstimator(**config) for config in configs]
    return configs, regs
Beispiel #3
0
def kernel_generator_ke_speech(kx_sigma):
    if isinstance(kx_sigma, Iterable):
        multi_sigs = [sig * np.ones(13) for sig in kx_sigma]
        bases_kers = [[
            kernels.GaussianScalarKernel(sig,
                                         normalize=False,
                                         normalize_dist=True)
            for sig in multi_sig
        ] for multi_sig in multi_sigs]
        kxs = [
            kernels.SumOfScalarKernel(base_ker, normalize=False)
            for base_ker in bases_kers
        ]
    else:
        multi_sig = kx_sigma * np.ones(13)
        base_ker = [
            kernels.GaussianScalarKernel(sig,
                                         normalize=False,
                                         normalize_dist=True)
            for sig in multi_sig
        ]
        kxs = kernels.SumOfScalarKernel(base_ker, normalize=False)
    return kxs
Beispiel #4
0
def kernels_generator_gauss_fkrr_speech(kin_sigma, kout_sigma):
    kin_sigmas = kin_sigma * np.ones(13)
    gauss_kers = [
        kernels.GaussianScalarKernel(sig, normalize=False, normalize_dist=True)
        for sig in kin_sigmas
    ]
    kernels_in = kernels.SumOfScalarKernel(gauss_kers, normalize=False)
    if isinstance(kout_sigma, Iterable):
        kernels_out = [
            kernels.GaussianScalarKernel(sig, normalize=False)
            for sig in kout_sigma
        ]
    else:
        kernels_out = kernels.GaussianScalarKernel(kout_sigma, normalize=False)
    return kernels_in, kernels_out
Beispiel #5
0
def speech_rffs_kpl(kernel_sigma,
                    regu,
                    n_rffs,
                    rffs_sigma,
                    seed_rffs,
                    center_output,
                    domain=np.array([[0, 1]])):
    # FPCA output basis
    output_basis_params = {
        "n_basis": n_rffs,
        "bandwidth": rffs_sigma,
        "input_dim": 1,
        "domain": domain,
        "seed": seed_rffs
    }
    output_bases = configs_generation.subconfigs_combinations(
        "random_fourier", output_basis_params, exclude_list=["domain"])
    # Sum of Gaussian kernels
    kernel_sigmas = kernel_sigma * np.ones(13)
    gauss_kers = [
        kernels.GaussianScalarKernel(sig, normalize=False, normalize_dist=True)
        for sig in kernel_sigmas
    ]
    multi_ker = kernels.SumOfScalarKernel(gauss_kers, normalize=False)
    # Penalize power
    output_matrix_params = {}
    output_matrices = configs_generation.subconfigs_combinations(
        "eye", output_matrix_params)
    # Generate full configs
    params = {
        "kernel": multi_ker,
        "B": output_matrices,
        "basis_out": output_bases,
        "regu": regu,
        "center_output": center_output
    }
    configs = configs_generation.configs_combinations(params)
    # Create list of regressors from that config
    regs = [kproj_learning.SeperableKPL(**config) for config in configs]
    return configs, regs
Beispiel #6
0
def speech_fpca_penpow_kpl(kernel_sigma,
                           regu,
                           n_fpca,
                           n_evals_fpca,
                           decrease_base,
                           domain=np.array([[0, 1]])):
    # FPCA output basis
    output_basis_params = {
        "n_basis": n_fpca,
        "input_dim": 1,
        "domain": domain,
        "n_evals": n_evals_fpca
    }
    output_bases = configs_generation.subconfigs_combinations(
        "functional_pca", output_basis_params, exclude_list=["domain"])
    # Sum of Gaussian kernels
    kernel_sigmas = kernel_sigma * np.ones(13)
    gauss_kers = [
        kernels.GaussianScalarKernel(sig, normalize=False, normalize_dist=True)
        for sig in kernel_sigmas
    ]
    multi_ker = kernels.SumOfScalarKernel(gauss_kers, normalize=False)
    # Penalize power
    output_matrix_params = {"decrease_base": decrease_base}
    output_matrices = configs_generation.subconfigs_combinations(
        "pow", output_matrix_params)
    # Generate full configs
    params = {
        "kernel": multi_ker,
        "B": output_matrices,
        "basis_out": output_bases,
        "regu": regu,
        "center_output": True
    }
    configs = configs_generation.configs_combinations(params)
    # Create list of regressors from that config
    regs = [kproj_learning.SeperableKPL(**config) for config in configs]
    return configs, regs