Example #1
0
def look_for_nu_max_osc_region(data: np.ndarray, kwargs: Dict) -> ufloat:
    result = BackgroundResults(kwargs, runID="FullBackground")
    model = result.createBackgroundModel()

    f_data = compute_periodogram(data, kwargs)

    background = np.sum(model[:3], axis=0)

    cleared_data = np.divide(f_data[1], background)
    cleared_data = _butter_lowpass_filtfilt(
        cleared_data, nyqFreq(data),
        0.267 * pow(result.nuMax.nominal_value, 0.760) * 10)

    mask = np.logical_and(
        f_data[0] > (result.nuMax - 1 * result.sigma).nominal_value,
        f_data[0] < (result.nuMax + 1 * result.sigma).nominal_value)

    f_x = f_data[0][mask]
    f_y = cleared_data[mask]

    popt, perr = perform_fit(f_x, f_y, kwargs)

    plot_nu_max_fit(np.array((f_x, f_y)), popt, result.nuMax.nominal_value,
                    kwargs)
    return ufloat(popt[2], (perr[2] + popt[3] + perr[3]))
Example #2
0
def plot_peridogramm_from_timeseries(data: np.ndarray,
                                     kwargs: dict,
                                     add_smoothing: bool = False,
                                     f_list: List[Tuple[float, str]] = None,
                                     bg_model: List[np.ndarray] = None,
                                     plot_name: str = None):
    """
    Directly converts a timeseries and plots it in frequency space
    :param data: Timeseries
    :param kwargs: Run configuration
    :param add_smoothing: Show smoothing
    :param f_list: List of frequency markers
    """
    f_space = compute_periodogram(data, kwargs)
    plot_f_space(f_space, kwargs, add_smoothing, f_list, bg_model, plot_name)
Example #3
0
def create_files(data: np.ndarray, nyq_f: float, priors: List[List[float]],
                 kwargs: Dict):
    """
    Creates all files for a DIAMONDS run
    :param data: Lightcurve dataset
    :param nyq_f: Nyquist frequency
    :param priors: List of priors
    :param kwargs: Run configuratio
    """
    print_int(f"Path: {full_result_path(kwargs)}", kwargs)
    create_folder(full_result_path(kwargs), kwargs)
    create_data(compute_periodogram(data, kwargs), kwargs)
    create_priors(np.array(priors), full_result_path(kwargs))
    create_nsmc_configuring_parameters(full_result_path(kwargs))
    create_xmeans_configuring_parameters(full_result_path(kwargs))
    create_nyquist_frequency(nyq_f, full_result_path(kwargs))
Example #4
0
def get_delta_nu(data: np.ndarray, result: BackgroundResults, kwargs):
    model = result.createBackgroundModel()
    f_data = compute_periodogram(data, kwargs)

    background = np.sum(model[:4], axis=0)

    delta_nu = _estimateDeltaNu(result.nuMax.nominal_value)

    cleared_data = np.divide(f_data[1], background)
    cleared_data = _butter_lowpass_filtfilt(cleared_data, nyqFreq(data),
                                            delta_nu * 10)

    mask = np.logical_and(
        f_data[0] > (result.nuMax - 3 * result.sigma).nominal_value,
        f_data[0] < (result.nuMax + 3 * result.sigma).nominal_value)

    f_x = f_data[0][mask]
    f_y = cleared_data[mask]

    plot_f_space(np.array((f_x, f_y)),
                 f_list=[(result.nuMax.nominal_value, "Nu Max")],
                 plot_name="Oscillation_region",
                 kwargs=kwargs)

    _, _, index_min, index_max = _findGaussBoundaries(
        np.array((f_x, f_y)), result.nuMax.nominal_value,
        result.sigma.nominal_value)

    corrs = autocorrelate(f_y)

    stepFreq = f_x[2] - f_x[1]
    deltaF = np.zeros(len(corrs))
    for i in range(0, len(deltaF)):
        deltaF[i] = i * stepFreq

    mask = np.logical_and(deltaF > delta_nu / 1.4, deltaF < 1.5 * delta_nu)
    plot_delta_nu_acf(np.array((deltaF[mask], corrs[mask])), delta_nu, kwargs)
    popt, perr = perform_fit(deltaF[mask], corrs[mask], kwargs)
    plot_delta_nu_fit(np.array((deltaF[mask], corrs[mask])), popt, kwargs)

    delta_nu = ufloat(popt[2], perr[2])

    return delta_nu
Example #5
0
def compute_fliper_exact(data: np.ndarray, kwargs: Dict) -> Union[float, None]:
    if internal_teff in kwargs.keys():
        T_eff = kwargs[internal_teff]
    else:
        return None

    if internal_mag_value in kwargs.keys():
        mag = kwargs[internal_mag_value]
    else:
        return None

    if np.median(
            data[0][1:] - data[0][:-1]
    ) * 24 * 60 < 10:  #arbitrary, far below 30 minutes, but a bit above 1
        temp_data = np.array((rebin(data[0], 30), rebin(data[1], 30)))
    else:
        temp_data = data

    data_f = compute_periodogram(temp_data, kwargs)

    mask = data_f[0] < 277
    mask_20 = np.logical_and(mask, data_f[0] > (10**6 / (20 * 24 * 3600)))
    mask_80 = np.logical_and(mask, data_f[0] > (10**6 / (80 * 24 * 3600)))
    data_f_new_20 = np.array((data_f[0][mask_20], data_f[1][mask_20]))
    data_f_new_80 = np.array((data_f[0][mask_80], data_f[1][mask_80]))
    data_f_new_20[0] /= 10**6
    data_f_new_80[0] /= 10**6

    Fliper_20_d = FLIPER(kwargs).Fp_20_days(data_f_new_20.T, mag)
    Fliper_80_d = FLIPER(kwargs).Fp_80_days(data_f_new_80.T, mag)
    Fp02 = Fliper_80_d.fp02[0]
    Fp07 = Fliper_20_d.fp07[0]
    Fp7 = Fliper_20_d.fp7[0]
    Fp20 = Fliper_20_d.fp20[0]
    Fp50 = Fliper_20_d.fp50[0]
    with warnings.catch_warnings():
        warnings.filterwarnings("ignore", category=DeprecationWarning)
        numax = 10**(ML().PREDICTION(
            T_eff, mag, Fp02, Fp07, Fp7, Fp20, Fp50,
            f"{kwargs[internal_path]}/FLIPER/ML_numax_training_paper"))

    return float(numax[0])
Example #6
0
pre_path = re.findall(r'.+\/LCA\/', os.getcwd())[0]
test_file_dir = f"{pre_path}tests/testFiles/"

file = f"{test_file_dir}YS_224319473.txt"

kwargs = {
    file_ascii_skiprows: 1,
    file_ascii_use_cols: (0, 10),
    plot_show: False,
    general_kic: "224319473",
    file_fits_hdulist_column: 0,
    general_background_result_path: "/Users/marco/Documents/Dev/Background/results/",
    general_background_data_path: "/Users/marco/Documents/Dev/Background/data/",
    general_binary_path: "/Users/marco/Documents/Dev/Background/build/",

}

data = load_file(file, kwargs)
data = refine_data(data, kwargs)
sigma_ampl = calculate_flicker_amplitude(data)
f_ampl = flicker_amplitude_to_frequency(sigma_ampl)
nu_max = compute_nu_max(data, f_ampl, kwargs)
f_data = compute_periodogram(data)
create_files(data, nyqFreq(data), priors(nu_max, data), kwargs)

proc = BackgroundProcess(kwargs)
proc.run()

save_results(priors(nu_max, data),data, kwargs)
Example #7
0
def compute_nu_max(
        data: np.ndarray, f_flicker: float,
        kwargs: Dict) -> Tuple[float, Dict[str, float], Union[float, None]]:
    """
    Performs the full procedure introduced by Kallinger (2016)
    :param data: Full dataset from the lightcurve
    :param f_flicker: flicker frequency from the flicker amplitude. In uHz
    :return: guess for nu_max. In uHz
    """

    f_fliper = compute_fliper_nu_max(compute_periodogram(data, kwargs), kwargs)
    if f_fliper != {}:
        if "Fliper exact" in f_fliper.keys():
            f = f_fliper["Fliper exact"]
        else:
            f = f_fliper["Fliper rough"]

        return f, {}, f_fliper

    f_list = []

    f_list.append((f_flicker, rf"F_flicker_{'%.2f' % f_flicker}$\mu Hz$"))
    plot_peridogramm_from_timeseries(data, kwargs, True, f_list)
    print_int(f"Flicker frequency {'%.2f' % f_flicker}", kwargs)

    tau = single_step_procedure(data, (f_to_t(f_flicker) / 60) + 5, kwargs)
    f = f_from_tau(tau)
    print_int(f"1. frequency {'%.2f' % f}", kwargs)

    f_list.append((f, rf"F_filter_0_{'%.2f' % f}$\mu Hz$"))
    plot_peridogramm_from_timeseries(data, kwargs, True, f_list)

    # for frequencies below 70 the first guess seems good enough
    n = 1

    # repeat process n-times
    for i in range(0, n):
        try:
            f_guess = np.amax([
                f_list[-2][0], f_list[-1][0]
            ]) - (1 / 3) * np.abs(f_list[-1][0] - f_list[-2][0])
            tau = single_step_procedure(data, (f_to_t(f_guess) / 60), kwargs)
        except ValueError:
            break
        f_new = f_from_tau(tau)
        f = f_new

        print_int(f"{i + 2}. frequency {'%.2f' % f}", kwargs)

        f_list.append((f, rf"F_filter_{i + 1}_{'%.2f' % f}$\mu Hz$"))
        plot_peridogramm_from_timeseries(data, kwargs, True, f_list)

    print_int(f"Nu_max: {'%.2f' % f}", kwargs)

    f_list_ret = {}
    for val, name in f_list:
        f_list_ret[name] = val

    f_fliper = compute_fliper_nu_max(compute_periodogram(data, kwargs), kwargs)
    if f_fliper != {}:
        if "Fliper exact" in f_fliper.keys():
            f = f_fliper["Fliper exact"]
        else:
            f = f_fliper["Fliper rough"]

    return f, f_list_ret, f_fliper
Example #8
0
def priors(nu_max: float, data: np.ndarray, kwargs: Dict):
    """
    Returns a List of priors for DIAMONDS.
    :param nu_max: nu_max determined by pipe
    :param photon_noise: photon_noise determined in signal_features
    :return: List of Lists of priors in correct order
    """
    f_data = compute_periodogram(data, kwargs)

    bg_model = background_model(f_data, nyqFreq(data), noise(f_data),
                                harvey_amp(nu_max), first_harvey(nu_max),
                                harvey_amp(nu_max), second_harvey(nu_max),
                                harvey_amp(nu_max), third_harvey(nu_max),
                                nu_max, amp(nu_max, sigma(nu_max), f_data),
                                sigma(nu_max))

    params = {
        'w': noise(f_data),
        'sigma_1': harvey_amp(nu_max),
        'b_1': first_harvey(nu_max),
        'sigma_2': harvey_amp(nu_max),
        'b_2': second_harvey(nu_max),
        'sigma_3': harvey_amp(nu_max),
        'b_3': third_harvey(nu_max),
        'nu_max': nu_max,
        'H_osc': amp(nu_max, 2 * sigma(nu_max), f_data),
        'sigma': sigma(nu_max)
    }

    plot_f_space(f_data,
                 kwargs,
                 bg_model=bg_model,
                 plot_name="PSD_guess",
                 add_smoothing=True)

    lower_harvey_1 = min(f_data[0])
    minimum_percentage_harvey_1 = 0.7

    max_harvey_1 = minimum_percentage_harvey_1 * second_harvey(
        nu_max
    ) if minimum_percentage_harvey_1 * second_harvey(nu_max) < 20 else 20

    max_harvey_amp = np.amax(f_data[1][f_data[0] < nu_max - sigma(nu_max)])
    harvey_upper_prior = 3 * harvey_amp(
        nu_max) if 3 * harvey_amp(nu_max) < max_harvey_amp else max_harvey_amp

    #LC Data (red giants)
    if nyqFreq(data) * 24 * 60 < 15:
        return [
            [0.5 * noise(f_data), 2 * noise(f_data)],
            [0.05 * harvey_amp(nu_max), harvey_upper_prior],
            [lower_harvey_1, max_harvey_1],
            [0.05 * harvey_amp(nu_max), harvey_upper_prior],
            [
                minimum_percentage_harvey_1 * second_harvey(nu_max),
                1.3 * second_harvey(nu_max)
            ],
            [0.05 * harvey_amp(nu_max), harvey_upper_prior],
            [0.7 * third_harvey(nu_max), 1.3 * third_harvey(nu_max)],
            [
                0.1 * amp(nu_max, sigma(nu_max), f_data),
                3.5 * amp(nu_max, sigma(nu_max), f_data)
            ],
            # [0.75 * nu_max                               , 1.25 * nu_max],
            [0.6 * nu_max, 1.4 * nu_max],
            [0.7 * sigma(nu_max), 1.3 * sigma(nu_max)]
        ], params
    #SC Data (dwarfs)
    else:
        return [
            [0.5 * noise(f_data), 3 * noise(f_data)],
            [0.01 * harvey_amp(nu_max), 1.5 * harvey_upper_prior],
            [lower_harvey_1, max_harvey_1],
            [0.01 * harvey_amp(nu_max), harvey_upper_prior],
            [
                minimum_percentage_harvey_1 * second_harvey(nu_max),
                1.3 * second_harvey(nu_max)
            ],
            [0.01 * harvey_amp(nu_max), harvey_upper_prior],
            [0.7 * third_harvey(nu_max), 1.4 * third_harvey(nu_max)],
            [
                0.1 * amp(nu_max, sigma(nu_max), f_data),
                3.5 * amp(nu_max, sigma(nu_max), f_data)
            ],
            # [0.75 * nu_max                               , 1.25 * nu_max],
            [0.8 * nu_max, 1.2 * nu_max],
            [0.7 * sigma(nu_max), 2 * sigma(nu_max)]
        ], params