Ejemplo n.º 1
0
# TODO: reproduce the results of the other dust model also

# load BOSS wavelengths, prep for interpolation
boss_lambdas = np.load('/Users/blakechellew/Documents/DustProject/alphas_and_stds/wavelength_boss.npy')  # angstroms
total_int_f = interp1d(lambdas, total_int, kind='cubic')

if dust_model == 'zd':
    bd12_factor = 0.52
elif dust_model == 'wd':
    bd12_factor = 0.49
plot1 = lambdas * bc03_f(lambdas)
plot2 = total_int / nu_I_nu
plot3 = bd12_factor * boss_lambdas * total_int_f(boss_lambdas) / nu_I_nu * bc03_f(boss_lambdas)

# bin the main plot
lambdas_boss_bin, plot3_bin, stds = generate_binned_alphas([plot3], [np.ones(plot3.shape)], boss_lambdas)
plot3_bin = plot3_bin[0]

plot1_mean = np.mean(plot1)
plot2_mean = np.mean(plot2)
plot3_mean = np.mean(plot3)
print("plot1", plot1)
print("plot2", plot2)
print("plot3", plot3)
print("plot 1 mean:", plot1_mean)
print("plot 2 mean:", plot2_mean)
print("plot 3 mean:", plot3_mean)
plt.plot(lambdas, plot1 * plot3_mean / plot1_mean, label='bc03 * wav')
plt.plot(lambdas, plot2 * plot3_mean / plot2_mean, label='just the integral')
plt.plot(lambdas_boss_bin, plot3_bin, step='pre', label='predicted alphas')
plt.legend()
# scaling factors
boss_fluxfactor = 1.38
sdss_fluxfactor = 1.38

# apply correction factor
correction_factors = [
    np.load('../alphas_and_stds/correction_factor_boss_iris_smooth.npy'),
    np.load('../alphas_and_stds/correction_factor_boss_iris_north_smooth.npy'),
    np.load('../alphas_and_stds/correction_factor_boss_iris_south_smooth.npy')
]
correction_factor_sdss = np.load(
    '../alphas_and_stds/correction_factor_sdss_iris_smooth.npy')
lambdas_bin, binned_corrections, _ = generate_binned_alphas(
    correction_factors,
    3 * [np.ones(len(correction_factors[0]))],
    wavelength,
    boss=True,
    bin_width=bin_width)
_, binned_sdss_correction, _ = generate_binned_alphas(
    [correction_factor_sdss], [np.ones(len(correction_factor_sdss))],
    wavelength_sdss,
    boss=False,
    bin_width=bin_width)
binned_sdss_correction = binned_sdss_correction[0]

alphas_norad = [
    a / boss_fluxfactor * corr
    for a, corr in zip(alphas_norad, correction_factors)
]
# bin
alphas_norad_bin_wav, alphas_norad_bin, _ = generate_binned_alphas(
        if wd01_model:
            bd12_alphas = np.loadtxt('/Users/blakechellew/Documents/DustProject/alphas_and_stds/bd12_fig3_green_052621.csv',
                                     delimiter=",")
        else:
            bd12_alphas = np.loadtxt('/Users/blakechellew/Documents/DustProject/alphas_and_stds/bd12_fig3_blue_052621.csv',
                                     delimiter=",")

        # load plot from brandt code
        brandt_path = '/Users/blakechellew/Documents/DustProject/BrandtFiles/brandt_radiative/integrals/'
        if wd01_model:
            brandt_alphas = np.load(brandt_path + 'alphas_wd.npy')
        else:
            brandt_alphas = np.load(brandt_path + 'alphas_zd.npy')

        # bin some alphas
        lambdas_bin, alphas_bin, _ = generate_binned_alphas([alphas], [np.ones(alphas.shape)], wavelength, bin_offset=0)
        alphas_bin = alphas_bin[0]

        alphas_norad = [np.load('../alphas_and_stds/alphas_boss_iris_2d_012720.npy'),
                        np.load('../alphas_and_stds/alphas_north011720.npy'),
                        np.load('../alphas_and_stds/alphas_south011720.npy')]
        # apply correction factor
        correction_factors = [np.load('../alphas_and_stds/correction_factor_boss_iris_smooth.npy'),
                              np.load('../alphas_and_stds/correction_factor_boss_iris_north_smooth.npy'),
                              np.load('../alphas_and_stds/correction_factor_boss_iris_south_smooth.npy')]
        _, binned_corrections, _ = generate_binned_alphas(correction_factors, 3 * [np.ones(len(correction_factors[0]))], wavelength, boss=boss)
        alphas_norad = [a / boss_fluxfactor * corr for a, corr in zip(alphas_norad, correction_factors)]

        # bin
        alphas_norad_bin_wav, alphas_norad_bin, _ = generate_binned_alphas(alphas_norad, [np.ones(alphas_norad[0].shape) for i in range(len(alphas_norad))],
                                                                           wavelength, bin_offset=0)
    np.load(alpha_direc_boot + 'bootstrap_alpha_stds_boss_iris_2d_' + loadkey +
            '_10.npy'),
    np.load(alpha_direc_boot + 'bootstrap_alpha_stds_boss_iris_2d_' + loadkey +
            '_15.npy'),
    np.load(alpha_direc_boot + 'bootstrap_alpha_stds_boss_iris_2d_' + loadkey +
            '_20.npy'),
    np.load(alpha_direc_boot + 'bootstrap_alpha_stds_boss_iris_2d_' + loadkey +
            '_25.npy'),
    np.load(alpha_direc_boot + 'bootstrap_alpha_stds_boss_iris_2d_' + loadkey +
            '_30.npy')
]
print("loaded threshold alphas")

_, bootstrap_binned_alphas_thresh_1d, _ = generate_binned_alphas(
    bootstrap_alphas_thresh_1d,
    bootstrap_alpha_stds_thresh_1d,
    wavelength_boss,
    boss=True)
_, bootstrap_binned_alphas_thresh_2d, _ = generate_binned_alphas(
    bootstrap_alphas_thresh_2d,
    bootstrap_alpha_stds_thresh_2d,
    wavelength_boss,
    boss=True)
print("done binning")

# bootstrap percentiles
bootstrap_binned_lower_thresh_1d = [
    np.percentile(b, 16, axis=0) for b in bootstrap_binned_alphas_thresh_1d
]  # 68 percent confidence interval
bootstrap_binned_upper_thresh_1d = [
    np.percentile(b, 84, axis=0) for b in bootstrap_binned_alphas_thresh_1d
Ejemplo n.º 5
0
else:
    hdulist_direc = '/Users/blakechellew/Documents/DustProject/BrandtFiles/'
    hdulist_sdsswav = fits.open('/Users/blakechellew/Documents/DustProject/BrandtFiles/SDSS_allskyspec.fits')
    survey_lambdas = np.array(hdulist_sdsswav[1].data)
total_int_f = interp1d(lambdas, total_int, kind='cubic')

if dust_model == 'zd':
    bd12_factor = 0.52
elif dust_model == 'wd':
    bd12_factor = 0.49
plot1 = lambdas * bc03_f(lambdas)
plot2 = total_int / nu_I_nu
plot3 = bd12_factor * survey_lambdas * total_int_f(survey_lambdas) / nu_I_nu * bc03_f(survey_lambdas)

# bin the main plot
lambdas_bin, plot3_bin, stds = generate_binned_alphas([plot3], [np.ones(plot3.shape)], survey_lambdas)
plot3_bin = plot3_bin[0]

# load bd12 plot for comparison
if dust_model == 'wd':
    bd12_plot = np.loadtxt('/Users/blakechellew/Documents/DustProject/alphas_and_stds/bd12_fig3_green_052621.csv',
                            delimiter=",")
else:
    bd12_plot = np.loadtxt('/Users/blakechellew/Documents/DustProject/alphas_and_stds/bd12_fig3_blue_052621.csv',
                        delimiter=",")

plot1_mean = np.mean(plot1)
plot2_mean = np.mean(plot2)
plot3_mean = np.mean(plot3)
print("plot1", plot1)
print("plot2", plot2)
Ejemplo n.º 6
0
idx_array_north = [
    378, 1271, 2498, 211, 212, 905, 938, 1265, 1509, 2141, 2495, 2500, 2502
]
idx_array_south = [
    22, 23, 389, 419, 440, 536, 667, 1065, 1081, 1106, 1684, 1690, 1739, 1786,
    2229, 2380, 2381, 2383, 2388, 74, 1078, 1104, 1110, 1393, 1696, 1990, 1997,
    2009, 2011, 2382, 2416
]

# override to check just one
# idx_array_north = [2141]
# idx_array_south = []

for bad_idx in idx_array_north:

    wav_binned, alphas_binned_1, _ = generate_binned_alphas(
        [north[bad_idx]], [np.ones(len(boss_wavelength))], boss_wavelength)
    wav_binned, alphas_binned_2, _ = generate_binned_alphas(
        [north[1000]], [np.ones(len(boss_wavelength))], boss_wavelength)
    alphas_binned_1 = alphas_binned_1[0]
    alphas_binned_2 = alphas_binned_2[0]
    plt.plot(wav_binned, alphas_binned_1, 'k', label='good')
    plt.plot(wav_binned, alphas_binned_2, 'r', label='bad')
    plt.legend()
    plt.ylim(0, 1)
    plt.title("North idx " + str(bad_idx))
    plt.show()

for bad_idx in idx_array_south:
    wav_binned, alphas_binned_3, _ = generate_binned_alphas(
        [south[bad_idx]], [np.ones(len(boss_wavelength))], boss_wavelength)
    wav_binned, alphas_binned_4, _ = generate_binned_alphas(
Ejemplo n.º 7
0
                paths_bc03,
                showPlots=False)  # TEST: was using alphas_bootstrap[i]
    print(coeffs_array)

    # combine the coefficients:
    avg_coeffs = np.mean(coeffs_array, axis=0)
    coeff_stds = np.std(coeffs_array, axis=0)
    print("coeffs with stds:")
    print(avg_coeffs)
    print(coeff_stds)

    # bin the bootstrap stuff
    # I think the std input here can be ones, but maybe there is something better?
    print("best fit array:", best_fit_array.shape)
    lambdas_bin, best_fit_bin, _ = generate_binned_alphas(
        [best_fit_array], [np.ones(best_fit_array.shape)],
        best_fit_wavelengths,
        bin_offset=0)
    best_fit_bin = best_fit_bin[0]
    print("best fit bin:", best_fit_bin.shape)
    # bin the boss stuff
    lambdas_bin_boss, boss_bin, _ = generate_binned_alphas([alphas_boss],
                                                           [alpha_stds_boss],
                                                           wavelength,
                                                           bin_offset=0)
    boss_bin = boss_bin[0]
    # percentiles
    bootstrap_binned_lower = np.nanpercentile(
        best_fit_bin, 16, axis=0)  # 68 percent confidence interval
    bootstrap_binned_upper = np.nanpercentile(best_fit_bin, 84, axis=0)
    # TODO: need to run with the original alphas. for now take the mean.
    plt.plot(lambdas_bin,