Пример #1
0
def job_afb_analysis(T):
    data = np.loadtxt('Data/radiative_corrections.tsv')
    corrections = data[:, 1]

    energies = np.loadtxt('Data/energies.txt')
    data = np.loadtxt('Data/afb.txt')
    negative = data[:, 0]
    positive = data[:, 1]

    results = []
    for i in range(SAMPLES):
        positive_boot = bootstrap.redraw_count(positive)
        negative_boot = bootstrap.redraw_count(negative)

        result = afb_kernel(positive_boot, negative_boot, corrections)
        if result is not None:
            results.append(result)

    afb_corr_dist, sin_sq_dist = zip(*results)

    afb_filt, sin_sq_filt = zip(*[
        (x[3], y)
        for x, y in zip(afb_corr_dist, sin_sq_dist)
        if not np.isnan(y)
    ])

    print('afb:', len(afb_corr_dist), len(afb_filt))

    T['sin_sq_bootstrap_acceptance'] = siunitx((1 - len(sin_sq_filt) / len(sin_sq_dist)) * 100)

    afb_val, afb_err = bootstrap.average_and_std_arrays(afb_corr_dist)
    sin_sq_val, sin_sq_err = bootstrap.average_and_std_arrays(sin_sq_filt)

    afb_val, sin_sq_val = afb_kernel(positive, negative, corrections)

    sin_sq_up, sin_sq_down = bootstrap.percentile_arrays(sin_sq_filt, sin_sq_val)

    print('sin_sq:', sin_sq_val, sin_sq_err, sin_sq_up, sin_sq_down)

    np.savetxt('_build/xy/afb.tsv', np.column_stack([energies, afb_val, afb_err]))

    T['afb_table'] = list(zip(
        siunitx(energies),
        siunitx(afb_val, afb_err),
    ))

    T['sin_sq_afb'] = siunitx(sin_sq_val, sin_sq_err)

    T['sin_sq_afb_asym'] = '{:.3f}^{{+{:.3f}}}_{{-{:.3f}}}'.format(sin_sq_val, sin_sq_up, sin_sq_down)

    counts, bins = np.histogram(sin_sq_filt)
    counts = np.array(list(counts) + [counts[-1]])
    print(bins.shape, counts.shape)
    np.savetxt('_build/xy/sin_sq_hist.tsv', np.column_stack([bins, counts]))

    counts, bins = np.histogram([x[3] for x in afb_corr_dist])
    counts = np.array(list(counts) + [counts[-1]])
    print(bins.shape, counts.shape)
    np.savetxt('_build/xy/afb_hist.tsv', np.column_stack([bins, counts]))

    counts, bins = np.histogram(afb_filt, bins=bins)
    counts = np.array(list(counts) + [counts[-1]])
    print(bins.shape, counts.shape)
    np.savetxt('_build/xy/afb_filt_hist.tsv', np.column_stack([bins, counts]))
Пример #2
0
def get_indium_data(T, slope_val, width):
    files = glob.glob('Data/in-*.txt')

    temps_val = []
    temps_err = []

    all_counts = []

    all_tau_0_dist = []
    all_tau_bar_dist = []
    all_tau_f_dist = []
    all_tau_t_dist = []

    all_intens_0_dist = []
    all_intens_t_dist = []

    all_lifetime_y_dist = []
    all_lifetime_popt_dist = []

    all_sigma_c_dist = []

    # Process lifetime curves with bootstrap.
    for sample_id in range(BOOTSTRAP_SAMPLES):
        print('Bootstrap sample', sample_id, 'running …')

        results = []

        for file_ in sorted(files):
            print('Working on lifetime spectrum', file_)

            if sample_id == 0:
                temp_lower, temp_upper = get_temp(file_)
                temp_mean = (temp_lower + temp_upper) / 2
                temp_err = temp_upper - temp_mean
                temps_val.append(temp_mean)
                temps_err.append(temp_err)
                print('Mean temperature:', temp_mean)

            data = np.loadtxt(file_)
            channel = data[:, 0]
            time = slope_val * channel
            counts = data[:, 1]
            boot_counts = bootstrap.redraw_count(counts)

            if sample_id == 0:
                all_counts.append(counts)

            x = np.linspace(np.min(time), np.max(time), 2000)

            sel = (9 < time) & (time < 15)

            fit_func = lambda t, mean, A_0, A_t, tau_0, tau_t, BG: \
                    models.lifetime_spectrum(t, mean, width, A_0, A_t, tau_0, tau_t, BG)
            p0 = [10.5, 210, 190, 0.07, 0.8, 0]
            popt, pconv = op.curve_fit(fit_func,
                                       time[sel],
                                       boot_counts[sel],
                                       p0=p0)
            mean, A_0, A_t, tau_0, tau_t, BG = popt

            intens_0 = A_0 / (A_0 + A_t)
            intens_t = A_t / (A_0 + A_t)
            tau_bar = intens_0 * tau_0 + intens_t * tau_t
            y = fit_func(x, *popt)
            tau_f = 1 / (intens_0 / tau_0 - intens_t / tau_t)
            sigma_c = 1 / tau_0 - 1 / tau_f

            results.append([
                tau_0,
                tau_bar,
                tau_f,
                tau_t,
                intens_0,
                intens_t,
                y,
                popt,
                sigma_c,
            ])


        tau_0_list, tau_bar_list, tau_f_list, tau_t_list, intens_0_list, \
                intens_t_list, lifetime_y_list, lifetime_popt_list, sigma_c_list \
                = zip(*results)

        all_tau_0_dist.append(tau_0_list)
        all_tau_bar_dist.append(tau_bar_list)
        all_tau_f_dist.append(tau_f_list)
        all_tau_t_dist.append(tau_t_list)
        all_intens_0_dist.append(intens_0_list)
        all_intens_t_dist.append(intens_t_list)
        all_lifetime_y_dist.append(lifetime_y_list)
        all_lifetime_popt_dist.append(lifetime_popt_list)
        all_sigma_c_dist.append(sigma_c_list)

    T['temps_int'] = []

    # Generate plots with lifetime curves and fits.
    for temp, counts, lifetime_y_dist in zip(temps_val, all_counts,
                                             zip(*all_lifetime_y_dist)):
        print('Creating lifetime plot with temp', temp)
        y_val, y_err = bootstrap.average_and_std_arrays(lifetime_y_dist)

        np.savetxt(
            '_build/xy/lifetime-{}K-data.tsv'.format(int(temp)),
            bootstrap.pgfplots_error_band(time[0:4000], counts[0:4000],
                                          np.sqrt(counts[0:4000])))
        np.savetxt('_build/xy/lifetime-{}K-fit.tsv'.format(int(temp)),
                   np.column_stack([x, y_val]))
        np.savetxt('_build/xy/lifetime-{}K-band.tsv'.format(int(temp)),
                   bootstrap.pgfplots_error_band(x, y_val, y_err))

        T['temps_int'].append(int(temp))

        if False:
            pl.fill_between(x,
                            y_val - y_err,
                            y_val + y_err,
                            alpha=0.5,
                            color='red')
            pl.plot(time, counts, color='black')
            counts_smooth = scipy.ndimage.filters.gaussian_filter1d(counts, 8)
            pl.plot(time, counts_smooth, color='green')
            pl.plot(x, y_val, color='red')
            pl.xlabel('Time / ns')
            pl.ylabel('Counts')
            dandify_plot()
            pl.xlim((8, 20))
            pl.savefig('_build/mpl-lifetime-{:04d}K.pdf'.format(int(temp)))
            pl.savefig('_build/mpl-lifetime-{:04d}K.png'.format(int(temp)))
            pl.yscale('log')
            pl.savefig('_build/mpl-lifetime-{:04d}K-log.pdf'.format(int(temp)))
            pl.savefig('_build/mpl-lifetime-{:04d}K-log.png'.format(int(temp)))
            pl.clf()

    T['temps_int'].sort()

    # Plot the lifetimes.
    taus_0_val, taus_0_err = bootstrap.average_and_std_arrays(all_tau_0_dist)
    taus_t_val, taus_t_err = bootstrap.average_and_std_arrays(all_tau_t_dist)
    taus_f_val, taus_f_err = bootstrap.average_and_std_arrays(all_tau_f_dist)
    taus_bar_val, taus_bar_err = bootstrap.average_and_std_arrays(
        all_tau_bar_dist)
    pl.errorbar(temps_val,
                taus_0_val,
                xerr=temps_err,
                yerr=taus_0_err,
                label=r'$\tau_0$',
                linestyle='none',
                marker='+')
    pl.errorbar(temps_val,
                taus_bar_val,
                xerr=temps_err,
                yerr=taus_bar_err,
                label=r'$\bar\tau$',
                linestyle='none',
                marker='+')
    pl.errorbar(temps_val,
                taus_t_val,
                xerr=temps_err,
                yerr=taus_t_err,
                label=r'$\tau_\mathrm{t}$',
                linestyle='none',
                marker='+')
    pl.errorbar(temps_val,
                taus_f_val,
                xerr=temps_err,
                yerr=taus_f_err,
                label=r'$\tau_\mathrm{f}$',
                linestyle='none',
                marker='+')
    pl.xlabel('T / K')
    pl.ylabel(r'$\tau$ / ns')
    dandify_plot()
    pl.savefig('_build/mpl-tau_0-tau_t.pdf')
    pl.savefig('_build/mpl-tau_0-tau_t.png')
    pl.clf()
    np.savetxt('_build/xy/tau_0.tsv',
               np.column_stack([temps_val, taus_0_val, taus_0_err]))
    np.savetxt('_build/xy/tau_t.tsv',
               np.column_stack([temps_val, taus_t_val, taus_t_err]))
    np.savetxt('_build/xy/tau_f.tsv',
               np.column_stack([temps_val, taus_f_val, taus_f_err]))
    np.savetxt('_build/xy/tau_bar.tsv',
               np.column_stack([temps_val, taus_bar_val, taus_bar_err]))

    T['taus_table'] = list(
        zip(
            siunitx(temps_val, temps_err),
            siunitx(taus_0_val, taus_0_err),
            siunitx(taus_t_val, taus_t_err),
            siunitx(taus_f_val, taus_f_err),
            siunitx(taus_bar_val, taus_bar_err),
        ))

    # Plot relative intensities.
    all_intens_0_val, all_intens_0_err = bootstrap.average_and_std_arrays(
        all_intens_0_dist)
    all_intens_t_val, all_intens_t_err = bootstrap.average_and_std_arrays(
        all_intens_t_dist)
    pl.errorbar(temps_val,
                all_intens_0_val,
                xerr=temps_err,
                yerr=all_intens_0_err,
                label=r'$A_0$',
                linestyle='none',
                marker='+')
    pl.errorbar(temps_val,
                all_intens_t_val,
                xerr=temps_err,
                yerr=all_intens_t_err,
                label=r'$A_\mathrm{t}$',
                linestyle='none',
                marker='+')
    pl.xlabel('T / K')
    pl.ylabel(r'Relative Intensity')
    dandify_plot()
    pl.savefig('_build/mpl-intensities.pdf')
    pl.savefig('_build/mpl-intensities.png')
    pl.clf()

    np.savetxt(
        '_build/xy/intensities-0.tsv',
        np.column_stack([temps_val, all_intens_0_val, all_intens_0_err]))
    np.savetxt(
        '_build/xy/intensities-t.tsv',
        np.column_stack([temps_val, all_intens_t_val, all_intens_t_err]))

    T['intensities_table'] = list(
        zip(
            siunitx(temps_val, temps_err),
            siunitx(all_intens_0_val, all_intens_0_err),
            siunitx(all_intens_t_val, all_intens_t_err),
        ))

    inv_temps = 1 / np.array(temps_val)
    results = []
    x = np.linspace(np.min(inv_temps), np.max(inv_temps), 1000)
    kelvin_to_eV = 8.621738e-5
    for all_sigma_c in all_sigma_c_dist:
        p0 = [11, 240]
        print('inv_temps:', inv_temps)
        print('all_sigma_c:', all_sigma_c)
        for leave_out in range(len(all_sigma_c)):
            inv_temps_jack = np.delete(inv_temps, leave_out)
            all_sigma_c_jack = np.delete(all_sigma_c, leave_out)
            popt, pconv = op.curve_fit(exp_decay,
                                       inv_temps_jack,
                                       all_sigma_c_jack,
                                       p0=p0)
            y = exp_decay(x, *popt)
            results.append([
                popt,
                popt[1] * kelvin_to_eV,
                y,
            ])

    popt_dist, Ht_eV_dist, arr_y_dist = zip(*results)

    popt_val, popt_err = bootstrap.average_and_std_arrays(popt_dist)
    print('popt:', siunitx(popt_val, popt_err))
    Ht_eV_val, Ht_eV_err = bootstrap.average_and_std_arrays(Ht_eV_dist)
    arr_y_val, arr_y_err = bootstrap.average_and_std_arrays(arr_y_dist)
    sigma_c_val, sigma_c_err = bootstrap.average_and_std_arrays(
        all_sigma_c_dist)

    pl.fill_between(x,
                    arr_y_val - arr_y_err,
                    arr_y_val + arr_y_err,
                    alpha=0.5,
                    color='red')
    pl.plot(x, arr_y_val, color='red')
    pl.errorbar(inv_temps,
                sigma_c_val,
                yerr=sigma_c_err,
                marker='+',
                linestyle='none',
                color='black')
    pl.xlabel(r'$1 / T$')
    pl.ylabel(r'$\sigma C_t(T)$')
    pl.savefig('_build/mpl-arrhenius.pdf')
    pl.savefig('_build/mpl-arrhenius.png')
    pl.clf()

    np.savetxt('_build/xy/arrhenius-data.tsv',
               np.column_stack([inv_temps, sigma_c_val, sigma_c_err]))
    np.savetxt('_build/xy/arrhenius-fit.tsv', np.column_stack([x, arr_y_val]))
    np.savetxt('_build/xy/arrhenius-band.tsv',
               bootstrap.pgfplots_error_band(x, arr_y_val, arr_y_err))

    T['arrhenius_table'] = list(
        zip(
            siunitx(inv_temps),
            siunitx(sigma_c_val, sigma_c_err),
        ))

    print('Ht_eV:', siunitx(Ht_eV_val, Ht_eV_err))

    T['Ht_eV'] = siunitx(Ht_eV_val, Ht_eV_err)

    pl.errorbar(temps_val,
                taus_bar_val,
                xerr=temps_err,
                yerr=taus_bar_err,
                label=r'$\bar\tau$',
                linestyle='none',
                marker='+')
    dandify_plot()
    pl.xlabel('T / K')
    pl.ylabel(r'$\bar\tau$ / ns')
    pl.savefig('_build/mpl-s_curve.pdf')
    pl.savefig('_build/mpl-s_curve.png')
    pl.clf()
    np.savetxt('_build/xy/s_curve.tsv',
               np.column_stack([temps_val, taus_bar_val, taus_bar_err]))
Пример #3
0
def bootstrap_driver(T):
    # Load all the input data from the files.
    lum_data = np.loadtxt('Data/luminosity.txt')
    lum_val = lum_data[:, 0]
    lum_err = lum_data[:, 3]
    radiative_hadrons = np.loadtxt('Data/radiative-hadrons.tsv')
    radiative_leptons = np.loadtxt('Data/radiative-leptons.tsv')
    raw_matrix = np.loadtxt('Data/matrix.txt').T
    mc_sizes = np.loadtxt('Data/monte-carlo-sizes.txt')
    filtered = np.loadtxt('Data/filtered.txt')

    # Some output into the template.
    T['luminosities_table'] = list(zip(siunitx(energies), siunitx(lum_val, lum_err)))
    T['radiative_cs_table'] = list(zip(
        siunitx(energies),
        siunitx(radiative_hadrons),
        siunitx(radiative_leptons),
    ))

    # Container for the results of each bootstrap run.
    results = []

    for r in range(SAMPLES):
        # Draw new numbers for the matrix.
        boot_matrix = bootstrap.redraw_count(raw_matrix)

        # Draw new luminosities.
        boot_lum_val = np.array([
            random.gauss(val, err)
            for val, err
            in zip(lum_val, lum_err)])

        # Draw new filtered readings.
        boot_readings = bootstrap.redraw_count(filtered)

        # Run the analysis on the resampled data and save the results.
        results.append(bootstrap_kernel(mc_sizes, boot_matrix, boot_readings,
                                        boot_lum_val, radiative_hadrons,
                                        radiative_leptons))

    # The `results` is a list which contains one entry per bootstrap run. This
    # is not particularly helpful as the different interesting quantities are
    # only on the second index on the list. The first index of the `results`
    # list is the bootstrap run index. Therefore we use the `zip(*x)` trick to
    # exchange the two indices. The result will be a list of quantities which
    # are themselves lists of the bootstrap samples. Then using Python tuple
    # assignments, we can split that (now) outer list into different
    # quantities. Each of the new variables created here is a list of R
    # bootstrap samples.
    x_dist, masses_dist, widths_dist, cross_sections_dist, y_dist, corr_dist, \
            matrix_dist, inverted_dist, readings_dist, peaks_dist, brs_dist, \
            width_electron_dist, width_flavors_dist, missing_width_dist, \
            width_lepton_dist, neutrino_families_dist, popts_dist, \
            mean_mass_dist, mean_width_dist \
            = zip(*results)

    # We only need one of the lists of the x-values as they are all the same.
    # So take the first and throw the others out.
    x = x_dist[0]

    # The masses and the widths that are given back from the `bootstrap_kernel`
    # are a list of four elements (electrons, muons, tauons, hadrons) each. The
    # variable `masses_dist` contains R copies of this four-list, one copy for
    # each bootstrap sample. We now average along the bootstrap dimension, that
    # is the outermost dimension. For each of the four masses, we take the
    # average along the R copies. This will give us four masses and four
    # masses-errors.
    masses_val, masses_err = bootstrap.average_and_std_arrays(masses_dist)
    widths_val, widths_err = bootstrap.average_and_std_arrays(widths_dist)
    peaks_val, peaks_err = bootstrap.average_and_std_arrays(peaks_dist)
    brs_val, brs_err = bootstrap.average_and_std_arrays(brs_dist)

    T['brs'] = siunitx(brs_val[0:3], brs_err[0:3])

    # Format masses and widths for the template.
    T['lorentz_fits_table'] = list(zip(
        display_names,
        siunitx(masses_val, masses_err),
        siunitx(widths_val, widths_err),
        siunitx(peaks_val, peaks_err),
    ))

    width_electron_val, width_electron_err = bootstrap.average_and_std_arrays(width_electron_dist)
    width_flavors_val, width_flavors_err = bootstrap.average_and_std_arrays(width_flavors_dist)

    T['width_electron_mev'] = siunitx(width_electron_val*1000, width_electron_err*1000)
    T['width_flavors_mev'] = siunitx(width_flavors_val*1000, width_flavors_err*1000)

    missing_width_val, missing_width_err = bootstrap.average_and_std_arrays(missing_width_dist)
    width_lepton_val, width_lepton_err = bootstrap.average_and_std_arrays(width_lepton_dist)
    neutrino_families_val, neutrino_families_err = bootstrap.average_and_std_arrays(neutrino_families_dist)

    T['missing_width_mev'] = siunitx(missing_width_val*1000, missing_width_err*1000)
    T['width_lepton_mev'] = siunitx(width_lepton_val*1000, width_lepton_err*1000)
    T['neutrino_families'] = siunitx(neutrino_families_val, neutrino_families_err)

    # Format original counts for the template.
    val, err = bootstrap.average_and_std_arrays(readings_dist)
    T['counts_table'] = []
    for i in range(7):
        T['counts_table'].append([siunitx(energies[i])] + siunitx(val[i, :], err[i, :], allowed_hang=10))

    # Format corrected counts for the template.
    val, err = bootstrap.average_and_std_arrays(corr_dist)
    T['corrected_counts_table'] = []
    for i in range(7):
        T['corrected_counts_table'].append([siunitx(energies[i])] + siunitx(val[i, :], err[i, :], allowed_hang=10))

    # Format matrix for the template.
    matrix_val, matrix_err = bootstrap.average_and_std_arrays(matrix_dist)
    T['matrix'] = []
    for i in range(4):
        T['matrix'].append([display_names[i]] + siunitx(matrix_val[i, :]*100, matrix_err[i, :]*100, allowed_hang=10))

    # Format inverted matrix for the template.
    inverted_val, inverted_err = bootstrap.average_and_std_arrays(inverted_dist)
    T['inverted'] = []
    for i in range(4):
        T['inverted'].append([display_names[i]] +
                             list(map(number_padding,
                             siunitx(inverted_val[i, :], inverted_err[i, :], allowed_hang=10))))

    # Format cross sections for the template.
    cs_val, cs_err = bootstrap.average_and_std_arrays(cross_sections_dist)
    T['cross_sections_table'] = []
    for i in range(7):
        T['cross_sections_table'].append([siunitx(energies[i])] + siunitx(cs_val[:, i], cs_err[:, i]))

    # Build error band for pgfplots.
    y_list_val, y_list_err = bootstrap.average_and_std_arrays(y_dist)
    for i, name in zip(itertools.count(), names):
        # Extract the y-values for the given decay type.
        y_val = y_list_val[i, :]
        y_err = y_list_err[i, :]

        # Store the data for pgfplots.
        np.savetxt('_build/xy/cross_section-{}s.tsv'.format(name),
                   np.column_stack([energies, cs_val[i, :], cs_err[i, :]]))
        np.savetxt('_build/xy/cross_section-{}s-band.tsv'.format(name),
                   bootstrap.pgfplots_error_band(x, y_val, y_err))
        np.savetxt('_build/xy/cross_section-{}s-fit.tsv'.format(name),
                   np.column_stack((x, y_val)))

    popts_val, popts_err = bootstrap.average_and_std_arrays(popts_dist)
    T['chi_sq'] = []
    T['chi_sq_red'] = []
    T['p'] = []
    for i in range(4):
        residuals = cs_val[i, :] - propagator(energies, *popts_val[i, :])
        chi_sq = np.sum((residuals / cs_err[i, :])**2)
        dof = len(residuals) - 1 - len(popts_val[i, :])
        p = 1 - scipy.stats.chi2.cdf(chi_sq, dof)

        print('chi_sq', chi_sq, chi_sq/dof, p)
        T['chi_sq'].append(siunitx(chi_sq))
        T['chi_sq_red'].append(siunitx(chi_sq/dof))
        T['p'].append(siunitx(p))

    T['confidence_table'] = list(zip(
        display_names,
        T['chi_sq'],
        T['chi_sq_red'],
        T['p'],
    ))

    mean_mass_val, mean_mass_err = bootstrap.average_and_std_arrays(mean_mass_dist)
    mean_width_val, mean_width_err = bootstrap.average_and_std_arrays(mean_width_dist)

    T['mean_mass'] = siunitx(mean_mass_val, mean_mass_err)
    T['mean_width'] = siunitx(mean_width_val, mean_width_err)
Пример #4
0
def get_acryl_data(T, slope_val, width):
    data = np.loadtxt('Data/longlong.txt')
    channel = data[:, 0]
    time = slope_val * channel
    counts = data[:, 1]

    x = np.linspace(np.min(time), np.max(time), 500)

    fit_func = lambda t, mean, A_0, A_t, tau_0, tau_t, BG: \
            np.log(models.lifetime_spectrum(t, mean, width, A_0, A_t, tau_0, tau_t, BG))

    results = []

    sel1 = (10.92 < time) & (time < 11.58)
    sel2 = (13.11 < time) & (time < 22)
    sels = [sel1, sel2]

    x1 = np.linspace(np.min(time[sel1]), np.max(time[sel1]), 10)
    x2 = np.linspace(np.min(time[sel2]), np.max(time[sel2]), 10)

    for sample_id in range(BOOTSTRAP_SAMPLES):
        print('Bootstrap sample', sample_id, 'running …')

        boot_counts = bootstrap.redraw_count(counts)

        lin_lifetimes = []
        lin_results = []
        for sel_lin, x_lin in zip(sels, [x1, x2]):
            popt_lin, pconv_lin = op.curve_fit(exp_decay,
                                               time[sel_lin],
                                               boot_counts[sel_lin],
                                               p0=[1e5, 0.3])
            y_lin = exp_decay(x_lin, *popt_lin)

            lin_results.append(y_lin)
            lin_results.append(popt_lin)
            lin_results.append(1 / popt_lin[1])
            lin_lifetimes.append(1 / popt_lin[1])

        sel = (10 < time) & (time < 50) & (boot_counts > 0)

        p0 = [10.5, 13e3, 34e2] + lin_lifetimes + [2]
        popt, pconv = op.curve_fit(fit_func,
                                   time[sel],
                                   np.log(boot_counts[sel]),
                                   p0=p0)
        mean, A_0, A_t, tau_0, tau_t, BG = popt

        intens_0 = A_0 / (A_0 + A_t)
        intens_t = A_t / (A_0 + A_t)
        tau_bar = intens_0 * tau_0 + intens_t * tau_t
        y = np.exp(fit_func(x, *popt))
        tau_f = 1 / (intens_0 / tau_0 - intens_t / tau_t)
        sigma_c = 1 / tau_0 - 1 / tau_f

        results.append([
            tau_0,
            tau_bar,
            tau_f,
            tau_t,
            intens_0,
            intens_t,
            y,
            popt,
            sigma_c,
        ] + lin_results)

    tau_0_dist, tau_bar_dist, tau_f_dist, tau_t_dist, intens_0_dist, \
            intens_t_dist, lifetime_y_dist, lifetime_popt_dist, sigma_c_dist, \
            y_lin1_dist, popt_lin1_dist, tau_lin1_dist, \
            y_lin2_dist, popt_lin2_dist, tau_lin2_dist, \
            = zip(*results)

    tau_0_val, tau_0_err = bootstrap.average_and_std_arrays(tau_0_dist)
    tau_t_val, tau_t_err = bootstrap.average_and_std_arrays(tau_t_dist)
    tau_f_val, tau_f_err = bootstrap.average_and_std_arrays(tau_f_dist)
    tau_bar_val, tau_bar_err = bootstrap.average_and_std_arrays(tau_bar_dist)

    popt_val, popt_err = bootstrap.average_and_std_arrays(lifetime_popt_dist)
    y_val, y_err = bootstrap.average_and_std_arrays(lifetime_y_dist)

    popt_lin1_val, popt_lin1_err = bootstrap.average_and_std_arrays(
        popt_lin1_dist)
    y_lin1_val, y_lin1_err = bootstrap.average_and_std_arrays(y_lin1_dist)
    tau_lin1_val, tau_lin1_err = bootstrap.average_and_std_arrays(
        tau_lin1_dist)
    popt_lin2_val, popt_lin2_err = bootstrap.average_and_std_arrays(
        popt_lin2_dist)
    y_lin2_val, y_lin2_err = bootstrap.average_and_std_arrays(y_lin2_dist)
    tau_lin2_val, tau_lin2_err = bootstrap.average_and_std_arrays(
        tau_lin2_dist)

    print('tau_0', siunitx(tau_0_val, tau_0_err))
    print('tau_t', siunitx(tau_t_val, tau_t_err))
    print('tau_f', siunitx(tau_f_val, tau_f_err))
    print('tau_bar', siunitx(tau_bar_val, tau_bar_err))

    T['acryl_tau_0'] = siunitx(tau_0_val, tau_0_err)
    T['acryl_tau_t'] = siunitx(tau_t_val, tau_t_err)
    T['acryl_tau_f'] = siunitx(tau_f_val, tau_f_err)
    T['acryl_tau_bar'] = siunitx(tau_bar_val, tau_bar_err)

    print('popt', siunitx(popt_val, popt_err))
    print('popt_lin1', siunitx(popt_lin1_val, popt_lin1_err))
    print('popt_lin2', siunitx(popt_lin2_val, popt_lin2_err))
    print('tau_lin1', siunitx(tau_lin1_val, tau_lin1_err))
    print('tau_lin2', siunitx(tau_lin2_val, tau_lin2_err))

    T['acryl_tau_0_lin'] = siunitx(tau_lin1_val, tau_lin1_err)
    T['acryl_tau_t_lin'] = siunitx(tau_lin2_val, tau_lin2_err)

    print(x.shape)
    print(y_lin1_val.shape)

    pl.plot(time, counts, color='black', alpha=0.3)
    counts_smooth = scipy.ndimage.filters.gaussian_filter1d(counts, 8)
    pl.plot(time, counts_smooth, color='green')
    pl.fill_between(x, y_val - y_err, y_val + y_err, alpha=0.5, color='red')
    pl.plot(x, y_val, color='red')
    pl.xlabel('Time / ns')
    pl.ylabel('Counts')
    dandify_plot()
    #pl.xlim((8, 20))
    pl.ylim((0.1, np.max(counts) * 1.1))
    pl.savefig('_build/mpl-lifetime-acryl.pdf')
    pl.savefig('_build/mpl-lifetime-acryl.png')
    pl.yscale('log')
    pl.fill_between(x1,
                    y_lin1_val - y_lin1_err,
                    y_lin1_val + y_lin1_err,
                    alpha=0.5,
                    color='blue')
    pl.fill_between(x2,
                    y_lin2_val - y_lin2_err,
                    y_lin2_val + y_lin2_err,
                    alpha=0.5,
                    color='blue')
    pl.plot(x1, y_lin1_val, color='blue', alpha=0.5)
    pl.plot(x2, y_lin2_val, color='blue', alpha=0.5)
    dandify_plot()
    pl.savefig('_build/mpl-lifetime-acryl-log.pdf')
    pl.savefig('_build/mpl-lifetime-acryl-log.png')
    #pl.show()
    pl.clf()

    np.savetxt('_build/xy/acryl-lifetime-data.tsv',
               np.column_stack([time, counts]))

    np.savetxt('_build/xy/acryl-lifetime-smoothed.tsv',
               np.column_stack([time, counts_smooth]))

    np.savetxt('_build/xy/acryl-lifetime-fit.tsv', np.column_stack([x, y_val]))
    np.savetxt('_build/xy/acryl-lifetime-band.tsv',
               bootstrap.pgfplots_error_band(x, y_val, y_err))

    np.savetxt('_build/xy/acryl-lifetime-fit-lin1.tsv',
               np.column_stack([x1, y_lin1_val]))
    np.savetxt('_build/xy/acryl-lifetime-band-lin1.tsv',
               bootstrap.pgfplots_error_band(x1, y_lin1_val, y_lin1_err))

    np.savetxt('_build/xy/acryl-lifetime-fit-lin2.tsv',
               np.column_stack([x2, y_lin2_val]))
    np.savetxt('_build/xy/acryl-lifetime-band-lin2.tsv',
               bootstrap.pgfplots_error_band(x2, y_lin2_val, y_lin2_err))
Пример #5
0
def get_indium_data(T, slope_val, width):
    files = glob.glob('Data/in-*.txt')

    temps_val = []
    temps_err = []

    all_counts = []

    all_tau_0_dist = []
    all_tau_bar_dist = []
    all_tau_f_dist = []
    all_tau_t_dist = []

    all_intens_0_dist = []
    all_intens_t_dist = []

    all_lifetime_y_dist = []
    all_lifetime_popt_dist = []

    all_sigma_c_dist = []

    # Process lifetime curves with bootstrap.
    for sample_id in range(BOOTSTRAP_SAMPLES):
        print('Bootstrap sample', sample_id, 'running …')

        results = []

        for file_ in sorted(files):
            print('Working on lifetime spectrum', file_)

            if sample_id == 0:
                temp_lower, temp_upper = get_temp(file_)
                temp_mean = (temp_lower + temp_upper)/2
                temp_err = temp_upper - temp_mean
                temps_val.append(temp_mean)
                temps_err.append(temp_err)
                print('Mean temperature:', temp_mean)

            data = np.loadtxt(file_)
            channel = data[:, 0]
            time = slope_val * channel
            counts = data[:, 1]
            boot_counts = bootstrap.redraw_count(counts)

            if sample_id == 0:
                all_counts.append(counts)

            x = np.linspace(np.min(time), np.max(time), 2000)

            sel = (9 < time) & (time < 15)

            fit_func = lambda t, mean, A_0, A_t, tau_0, tau_t, BG: \
                    models.lifetime_spectrum(t, mean, width, A_0, A_t, tau_0, tau_t, BG)
            p0 = [10.5, 210, 190, 0.07, 0.8, 0]
            popt, pconv = op.curve_fit(fit_func, time[sel], boot_counts[sel], p0=p0)
            mean, A_0, A_t, tau_0, tau_t, BG = popt

            intens_0 = A_0 / (A_0 + A_t)
            intens_t = A_t / (A_0 + A_t)
            tau_bar = intens_0 * tau_0 + intens_t * tau_t
            y = fit_func(x, *popt)
            tau_f = 1 / (intens_0 / tau_0 - intens_t / tau_t)
            sigma_c = 1 / tau_0 - 1 / tau_f

            results.append([
                tau_0,
                tau_bar,
                tau_f,
                tau_t,
                intens_0,
                intens_t,
                y,
                popt,
                sigma_c,
            ])


        tau_0_list, tau_bar_list, tau_f_list, tau_t_list, intens_0_list, \
                intens_t_list, lifetime_y_list, lifetime_popt_list, sigma_c_list \
                = zip(*results)

        all_tau_0_dist.append(tau_0_list)
        all_tau_bar_dist.append(tau_bar_list)
        all_tau_f_dist.append(tau_f_list)
        all_tau_t_dist.append(tau_t_list)
        all_intens_0_dist.append(intens_0_list)
        all_intens_t_dist.append(intens_t_list)
        all_lifetime_y_dist.append(lifetime_y_list)
        all_lifetime_popt_dist.append(lifetime_popt_list)
        all_sigma_c_dist.append(sigma_c_list)

    T['temps_int'] = []

    # Generate plots with lifetime curves and fits.
    for temp, counts, lifetime_y_dist in zip(temps_val, all_counts, zip(*all_lifetime_y_dist)):
        print('Creating lifetime plot with temp', temp)
        y_val, y_err = bootstrap.average_and_std_arrays(lifetime_y_dist)

        np.savetxt('_build/xy/lifetime-{}K-data.tsv'.format(int(temp)),
                   bootstrap.pgfplots_error_band(time[0:4000], counts[0:4000], np.sqrt(counts[0:4000])))
        np.savetxt('_build/xy/lifetime-{}K-fit.tsv'.format(int(temp)),
                   np.column_stack([x, y_val]))
        np.savetxt('_build/xy/lifetime-{}K-band.tsv'.format(int(temp)),
                   bootstrap.pgfplots_error_band(x, y_val, y_err))

        T['temps_int'].append(int(temp))

        if False:
            pl.fill_between(x, y_val - y_err, y_val + y_err, alpha=0.5, color='red')
            pl.plot(time, counts, color='black')
            counts_smooth = scipy.ndimage.filters.gaussian_filter1d(counts, 8)
            pl.plot(time, counts_smooth, color='green')
            pl.plot(x, y_val, color='red')
            pl.xlabel('Time / ns')
            pl.ylabel('Counts')
            dandify_plot()
            pl.xlim((8, 20))
            pl.savefig('_build/mpl-lifetime-{:04d}K.pdf'.format(int(temp)))
            pl.savefig('_build/mpl-lifetime-{:04d}K.png'.format(int(temp)))
            pl.yscale('log')
            pl.savefig('_build/mpl-lifetime-{:04d}K-log.pdf'.format(int(temp)))
            pl.savefig('_build/mpl-lifetime-{:04d}K-log.png'.format(int(temp)))
            pl.clf()

    T['temps_int'].sort()

    # Plot the lifetimes.
    taus_0_val, taus_0_err = bootstrap.average_and_std_arrays(all_tau_0_dist)
    taus_t_val, taus_t_err = bootstrap.average_and_std_arrays(all_tau_t_dist)
    taus_f_val, taus_f_err = bootstrap.average_and_std_arrays(all_tau_f_dist)
    taus_bar_val, taus_bar_err = bootstrap.average_and_std_arrays(all_tau_bar_dist)
    pl.errorbar(temps_val, taus_0_val, xerr=temps_err, yerr=taus_0_err,
                label=r'$\tau_0$', linestyle='none', marker='+')
    pl.errorbar(temps_val, taus_bar_val, xerr=temps_err, yerr=taus_bar_err,
                label=r'$\bar\tau$', linestyle='none', marker='+')
    pl.errorbar(temps_val, taus_t_val, xerr=temps_err, yerr=taus_t_err,
                label=r'$\tau_\mathrm{t}$', linestyle='none', marker='+')
    pl.errorbar(temps_val, taus_f_val, xerr=temps_err, yerr=taus_f_err,
                label=r'$\tau_\mathrm{f}$', linestyle='none', marker='+')
    pl.xlabel('T / K')
    pl.ylabel(r'$\tau$ / ns')
    dandify_plot()
    pl.savefig('_build/mpl-tau_0-tau_t.pdf')
    pl.savefig('_build/mpl-tau_0-tau_t.png')
    pl.clf()
    np.savetxt('_build/xy/tau_0.tsv',
               np.column_stack([temps_val, taus_0_val, taus_0_err]))
    np.savetxt('_build/xy/tau_t.tsv',
               np.column_stack([temps_val, taus_t_val, taus_t_err]))
    np.savetxt('_build/xy/tau_f.tsv',
               np.column_stack([temps_val, taus_f_val, taus_f_err]))
    np.savetxt('_build/xy/tau_bar.tsv',
               np.column_stack([temps_val, taus_bar_val, taus_bar_err]))

    T['taus_table'] = list(zip(
        siunitx(temps_val, temps_err),
        siunitx(taus_0_val, taus_0_err),
        siunitx(taus_t_val, taus_t_err),
        siunitx(taus_f_val, taus_f_err),
        siunitx(taus_bar_val, taus_bar_err),
    ))

    # Plot relative intensities.
    all_intens_0_val, all_intens_0_err = bootstrap.average_and_std_arrays(all_intens_0_dist)
    all_intens_t_val, all_intens_t_err = bootstrap.average_and_std_arrays(all_intens_t_dist)
    pl.errorbar(temps_val, all_intens_0_val, xerr=temps_err, yerr=all_intens_0_err,
                label=r'$A_0$', linestyle='none', marker='+')
    pl.errorbar(temps_val, all_intens_t_val, xerr=temps_err, yerr=all_intens_t_err,
                label=r'$A_\mathrm{t}$', linestyle='none', marker='+')
    pl.xlabel('T / K')
    pl.ylabel(r'Relative Intensity')
    dandify_plot()
    pl.savefig('_build/mpl-intensities.pdf')
    pl.savefig('_build/mpl-intensities.png')
    pl.clf()

    np.savetxt('_build/xy/intensities-0.tsv',
               np.column_stack([temps_val, all_intens_0_val, all_intens_0_err]))
    np.savetxt('_build/xy/intensities-t.tsv',
               np.column_stack([temps_val, all_intens_t_val, all_intens_t_err]))

    T['intensities_table'] = list(zip(
        siunitx(temps_val, temps_err),
        siunitx(all_intens_0_val, all_intens_0_err),
        siunitx(all_intens_t_val, all_intens_t_err),
    ))

    inv_temps = 1 / np.array(temps_val)
    results = []
    x = np.linspace(np.min(inv_temps), np.max(inv_temps), 1000)
    kelvin_to_eV = 8.621738e-5
    for all_sigma_c in all_sigma_c_dist:
        p0 = [11, 240]
        print('inv_temps:', inv_temps)
        print('all_sigma_c:', all_sigma_c)
        for leave_out in range(len(all_sigma_c)):
            inv_temps_jack = np.delete(inv_temps, leave_out)
            all_sigma_c_jack = np.delete(all_sigma_c, leave_out)
            popt, pconv = op.curve_fit(exp_decay, inv_temps_jack, all_sigma_c_jack, p0=p0)
            y = exp_decay(x, *popt)
            results.append([
                popt,
                popt[1] * kelvin_to_eV,
                y,
            ])

    popt_dist, Ht_eV_dist, arr_y_dist = zip(*results)

    popt_val, popt_err = bootstrap.average_and_std_arrays(popt_dist)
    print('popt:', siunitx(popt_val, popt_err))
    Ht_eV_val, Ht_eV_err = bootstrap.average_and_std_arrays(Ht_eV_dist)
    arr_y_val, arr_y_err = bootstrap.average_and_std_arrays(arr_y_dist)
    sigma_c_val, sigma_c_err = bootstrap.average_and_std_arrays(all_sigma_c_dist)

    pl.fill_between(x, arr_y_val - arr_y_err, arr_y_val + arr_y_err, alpha=0.5, color='red')
    pl.plot(x, arr_y_val, color='red')
    pl.errorbar(inv_temps, sigma_c_val, yerr=sigma_c_err, marker='+', linestyle='none', color='black')
    pl.xlabel(r'$1 / T$')
    pl.ylabel(r'$\sigma C_t(T)$')
    pl.savefig('_build/mpl-arrhenius.pdf')
    pl.savefig('_build/mpl-arrhenius.png')
    pl.clf()

    np.savetxt('_build/xy/arrhenius-data.tsv',
               np.column_stack([inv_temps, sigma_c_val, sigma_c_err]))
    np.savetxt('_build/xy/arrhenius-fit.tsv',
               np.column_stack([x, arr_y_val]))
    np.savetxt('_build/xy/arrhenius-band.tsv',
               bootstrap.pgfplots_error_band(x, arr_y_val, arr_y_err))

    T['arrhenius_table'] = list(zip(
        siunitx(inv_temps),
        siunitx(sigma_c_val, sigma_c_err),
    ))

    print('Ht_eV:', siunitx(Ht_eV_val, Ht_eV_err))

    T['Ht_eV'] = siunitx(Ht_eV_val, Ht_eV_err)

    pl.errorbar(temps_val, taus_bar_val, xerr=temps_err, yerr=taus_bar_err,
                label=r'$\bar\tau$', linestyle='none', marker='+')
    dandify_plot()
    pl.xlabel('T / K')
    pl.ylabel(r'$\bar\tau$ / ns')
    pl.savefig('_build/mpl-s_curve.pdf')
    pl.savefig('_build/mpl-s_curve.png')
    pl.clf()
    np.savetxt('_build/xy/s_curve.tsv',
               np.column_stack([temps_val, taus_bar_val, taus_bar_err]))
Пример #6
0
def get_acryl_data(T, slope_val, width):
    data = np.loadtxt('Data/longlong.txt')
    channel = data[:, 0]
    time = slope_val * channel
    counts = data[:, 1]

    x = np.linspace(np.min(time), np.max(time), 500)

    fit_func = lambda t, mean, A_0, A_t, tau_0, tau_t, BG: \
            np.log(models.lifetime_spectrum(t, mean, width, A_0, A_t, tau_0, tau_t, BG))

    results = []

    sel1 = (10.92 < time) & (time < 11.58)
    sel2 = (13.11 < time) & (time < 22)
    sels = [sel1, sel2]

    x1 = np.linspace(np.min(time[sel1]), np.max(time[sel1]), 10)
    x2 = np.linspace(np.min(time[sel2]), np.max(time[sel2]), 10)

    for sample_id in range(BOOTSTRAP_SAMPLES):
        print('Bootstrap sample', sample_id, 'running …')

        boot_counts = bootstrap.redraw_count(counts)

        lin_lifetimes = []
        lin_results = []
        for sel_lin, x_lin in zip(sels, [x1, x2]):
            popt_lin, pconv_lin = op.curve_fit(exp_decay, time[sel_lin], boot_counts[sel_lin], p0=[1e5, 0.3])
            y_lin = exp_decay(x_lin, *popt_lin)

            lin_results.append(y_lin)
            lin_results.append(popt_lin)
            lin_results.append(1/popt_lin[1])
            lin_lifetimes.append(1/popt_lin[1])

        sel = (10 < time) & (time < 50) & (boot_counts > 0)

        p0 = [10.5, 13e3, 34e2] + lin_lifetimes + [2]
        popt, pconv = op.curve_fit(fit_func, time[sel], np.log(boot_counts[sel]), p0=p0)
        mean, A_0, A_t, tau_0, tau_t, BG = popt

        intens_0 = A_0 / (A_0 + A_t)
        intens_t = A_t / (A_0 + A_t)
        tau_bar = intens_0 * tau_0 + intens_t * tau_t
        y = np.exp(fit_func(x, *popt))
        tau_f = 1 / (intens_0 / tau_0 - intens_t / tau_t)
        sigma_c = 1 / tau_0 - 1 / tau_f

        results.append([
            tau_0,
            tau_bar,
            tau_f,
            tau_t,
            intens_0,
            intens_t,
            y,
            popt,
            sigma_c,
        ] + lin_results)
        
    tau_0_dist, tau_bar_dist, tau_f_dist, tau_t_dist, intens_0_dist, \
            intens_t_dist, lifetime_y_dist, lifetime_popt_dist, sigma_c_dist, \
            y_lin1_dist, popt_lin1_dist, tau_lin1_dist, \
            y_lin2_dist, popt_lin2_dist, tau_lin2_dist, \
            = zip(*results)

    tau_0_val, tau_0_err = bootstrap.average_and_std_arrays(tau_0_dist)
    tau_t_val, tau_t_err = bootstrap.average_and_std_arrays(tau_t_dist)
    tau_f_val, tau_f_err = bootstrap.average_and_std_arrays(tau_f_dist)
    tau_bar_val, tau_bar_err = bootstrap.average_and_std_arrays(tau_bar_dist)

    popt_val, popt_err = bootstrap.average_and_std_arrays(lifetime_popt_dist)
    y_val, y_err = bootstrap.average_and_std_arrays(lifetime_y_dist)

    popt_lin1_val, popt_lin1_err = bootstrap.average_and_std_arrays(popt_lin1_dist)
    y_lin1_val, y_lin1_err = bootstrap.average_and_std_arrays(y_lin1_dist)
    tau_lin1_val, tau_lin1_err = bootstrap.average_and_std_arrays(tau_lin1_dist)
    popt_lin2_val, popt_lin2_err = bootstrap.average_and_std_arrays(popt_lin2_dist)
    y_lin2_val, y_lin2_err = bootstrap.average_and_std_arrays(y_lin2_dist)
    tau_lin2_val, tau_lin2_err = bootstrap.average_and_std_arrays(tau_lin2_dist)

    print('tau_0', siunitx(tau_0_val, tau_0_err))
    print('tau_t', siunitx(tau_t_val, tau_t_err))
    print('tau_f', siunitx(tau_f_val, tau_f_err))
    print('tau_bar', siunitx(tau_bar_val, tau_bar_err))

    T['acryl_tau_0'] = siunitx(tau_0_val, tau_0_err)
    T['acryl_tau_t'] = siunitx(tau_t_val, tau_t_err)
    T['acryl_tau_f'] = siunitx(tau_f_val, tau_f_err)
    T['acryl_tau_bar'] = siunitx(tau_bar_val, tau_bar_err)

    print('popt', siunitx(popt_val, popt_err))
    print('popt_lin1', siunitx(popt_lin1_val, popt_lin1_err))
    print('popt_lin2', siunitx(popt_lin2_val, popt_lin2_err))
    print('tau_lin1', siunitx(tau_lin1_val, tau_lin1_err))
    print('tau_lin2', siunitx(tau_lin2_val, tau_lin2_err))

    T['acryl_tau_0_lin'] = siunitx(tau_lin1_val, tau_lin1_err)
    T['acryl_tau_t_lin'] = siunitx(tau_lin2_val, tau_lin2_err)

    print(x.shape)
    print(y_lin1_val.shape)

    pl.plot(time, counts, color='black', alpha=0.3)
    counts_smooth = scipy.ndimage.filters.gaussian_filter1d(counts, 8)
    pl.plot(time, counts_smooth, color='green')
    pl.fill_between(x, y_val - y_err, y_val + y_err, alpha=0.5, color='red')
    pl.plot(x, y_val, color='red')
    pl.xlabel('Time / ns')
    pl.ylabel('Counts')
    dandify_plot()
    #pl.xlim((8, 20))
    pl.ylim((0.1, np.max(counts)*1.1))
    pl.savefig('_build/mpl-lifetime-acryl.pdf')
    pl.savefig('_build/mpl-lifetime-acryl.png')
    pl.yscale('log')
    pl.fill_between(x1, y_lin1_val - y_lin1_err, y_lin1_val + y_lin1_err, alpha=0.5, color='blue')
    pl.fill_between(x2, y_lin2_val - y_lin2_err, y_lin2_val + y_lin2_err, alpha=0.5, color='blue')
    pl.plot(x1, y_lin1_val, color='blue', alpha=0.5)
    pl.plot(x2, y_lin2_val, color='blue', alpha=0.5)
    dandify_plot()
    pl.savefig('_build/mpl-lifetime-acryl-log.pdf')
    pl.savefig('_build/mpl-lifetime-acryl-log.png')
    #pl.show()
    pl.clf()

    np.savetxt('_build/xy/acryl-lifetime-data.tsv',
               np.column_stack([time, counts]))

    np.savetxt('_build/xy/acryl-lifetime-smoothed.tsv',
               np.column_stack([time, counts_smooth]))

    np.savetxt('_build/xy/acryl-lifetime-fit.tsv',
               np.column_stack([x, y_val]))
    np.savetxt('_build/xy/acryl-lifetime-band.tsv',
               bootstrap.pgfplots_error_band(x, y_val, y_err))

    np.savetxt('_build/xy/acryl-lifetime-fit-lin1.tsv',
               np.column_stack([x1, y_lin1_val]))
    np.savetxt('_build/xy/acryl-lifetime-band-lin1.tsv',
               bootstrap.pgfplots_error_band(x1, y_lin1_val, y_lin1_err))

    np.savetxt('_build/xy/acryl-lifetime-fit-lin2.tsv',
               np.column_stack([x2, y_lin2_val]))
    np.savetxt('_build/xy/acryl-lifetime-band-lin2.tsv',
               bootstrap.pgfplots_error_band(x2, y_lin2_val, y_lin2_err))
Пример #7
0
def job_afb_analysis(T):
    data = np.loadtxt('Data/radiative_corrections.tsv')
    corrections = data[:, 1]

    energies = np.loadtxt('Data/energies.txt')
    data = np.loadtxt('Data/afb.txt')
    negative = data[:, 0]
    positive = data[:, 1]

    results = []
    for i in range(SAMPLES):
        positive_boot = bootstrap.redraw_count(positive)
        negative_boot = bootstrap.redraw_count(negative)

        result = afb_kernel(positive_boot, negative_boot, corrections)
        if result is not None:
            results.append(result)

    afb_corr_dist, sin_sq_dist = zip(*results)

    afb_filt, sin_sq_filt = zip(*[(x[3], y)
                                  for x, y in zip(afb_corr_dist, sin_sq_dist)
                                  if not np.isnan(y)])

    print('afb:', len(afb_corr_dist), len(afb_filt))

    T['sin_sq_bootstrap_acceptance'] = siunitx(
        (1 - len(sin_sq_filt) / len(sin_sq_dist)) * 100)

    afb_val, afb_err = bootstrap.average_and_std_arrays(afb_corr_dist)
    sin_sq_val, sin_sq_err = bootstrap.average_and_std_arrays(sin_sq_filt)

    afb_val, sin_sq_val = afb_kernel(positive, negative, corrections)

    sin_sq_up, sin_sq_down = bootstrap.percentile_arrays(
        sin_sq_filt, sin_sq_val)

    print('sin_sq:', sin_sq_val, sin_sq_err, sin_sq_up, sin_sq_down)

    np.savetxt('_build/xy/afb.tsv',
               np.column_stack([energies, afb_val, afb_err]))

    T['afb_table'] = list(zip(
        siunitx(energies),
        siunitx(afb_val, afb_err),
    ))

    T['sin_sq_afb'] = siunitx(sin_sq_val, sin_sq_err)

    T['sin_sq_afb_asym'] = '{:.3f}^{{+{:.3f}}}_{{-{:.3f}}}'.format(
        sin_sq_val, sin_sq_up, sin_sq_down)

    counts, bins = np.histogram(sin_sq_filt)
    counts = np.array(list(counts) + [counts[-1]])
    print(bins.shape, counts.shape)
    np.savetxt('_build/xy/sin_sq_hist.tsv', np.column_stack([bins, counts]))

    counts, bins = np.histogram([x[3] for x in afb_corr_dist])
    counts = np.array(list(counts) + [counts[-1]])
    print(bins.shape, counts.shape)
    np.savetxt('_build/xy/afb_hist.tsv', np.column_stack([bins, counts]))

    counts, bins = np.histogram(afb_filt, bins=bins)
    counts = np.array(list(counts) + [counts[-1]])
    print(bins.shape, counts.shape)
    np.savetxt('_build/xy/afb_filt_hist.tsv', np.column_stack([bins, counts]))
Пример #8
0
def bootstrap_driver(T):
    # Load all the input data from the files.
    lum_data = np.loadtxt('Data/luminosity.txt')
    lum_val = lum_data[:, 0]
    lum_err = lum_data[:, 3]
    radiative_hadrons = np.loadtxt('Data/radiative-hadrons.tsv')
    radiative_leptons = np.loadtxt('Data/radiative-leptons.tsv')
    raw_matrix = np.loadtxt('Data/matrix.txt').T
    mc_sizes = np.loadtxt('Data/monte-carlo-sizes.txt')
    filtered = np.loadtxt('Data/filtered.txt')

    # Some output into the template.
    T['luminosities_table'] = list(
        zip(siunitx(energies), siunitx(lum_val, lum_err)))
    T['radiative_cs_table'] = list(
        zip(
            siunitx(energies),
            siunitx(radiative_hadrons),
            siunitx(radiative_leptons),
        ))

    # Container for the results of each bootstrap run.
    results = []

    for r in range(SAMPLES):
        # Draw new numbers for the matrix.
        boot_matrix = bootstrap.redraw_count(raw_matrix)

        # Draw new luminosities.
        boot_lum_val = np.array(
            [random.gauss(val, err) for val, err in zip(lum_val, lum_err)])

        # Draw new filtered readings.
        boot_readings = bootstrap.redraw_count(filtered)

        # Run the analysis on the resampled data and save the results.
        results.append(
            bootstrap_kernel(mc_sizes, boot_matrix, boot_readings,
                             boot_lum_val, radiative_hadrons,
                             radiative_leptons))

    # The `results` is a list which contains one entry per bootstrap run. This
    # is not particularly helpful as the different interesting quantities are
    # only on the second index on the list. The first index of the `results`
    # list is the bootstrap run index. Therefore we use the `zip(*x)` trick to
    # exchange the two indices. The result will be a list of quantities which
    # are themselves lists of the bootstrap samples. Then using Python tuple
    # assignments, we can split that (now) outer list into different
    # quantities. Each of the new variables created here is a list of R
    # bootstrap samples.
    x_dist, masses_dist, widths_dist, cross_sections_dist, y_dist, corr_dist, \
            matrix_dist, inverted_dist, readings_dist, peaks_dist, brs_dist, \
            width_electron_dist, width_flavors_dist, missing_width_dist, \
            width_lepton_dist, neutrino_families_dist, popts_dist, \
            mean_mass_dist, mean_width_dist \
            = zip(*results)

    # We only need one of the lists of the x-values as they are all the same.
    # So take the first and throw the others out.
    x = x_dist[0]

    # The masses and the widths that are given back from the `bootstrap_kernel`
    # are a list of four elements (electrons, muons, tauons, hadrons) each. The
    # variable `masses_dist` contains R copies of this four-list, one copy for
    # each bootstrap sample. We now average along the bootstrap dimension, that
    # is the outermost dimension. For each of the four masses, we take the
    # average along the R copies. This will give us four masses and four
    # masses-errors.
    masses_val, masses_err = bootstrap.average_and_std_arrays(masses_dist)
    widths_val, widths_err = bootstrap.average_and_std_arrays(widths_dist)
    peaks_val, peaks_err = bootstrap.average_and_std_arrays(peaks_dist)
    brs_val, brs_err = bootstrap.average_and_std_arrays(brs_dist)

    T['brs'] = siunitx(brs_val[0:3], brs_err[0:3])

    # Format masses and widths for the template.
    T['lorentz_fits_table'] = list(
        zip(
            display_names,
            siunitx(masses_val, masses_err),
            siunitx(widths_val, widths_err),
            siunitx(peaks_val, peaks_err),
        ))

    width_electron_val, width_electron_err = bootstrap.average_and_std_arrays(
        width_electron_dist)
    width_flavors_val, width_flavors_err = bootstrap.average_and_std_arrays(
        width_flavors_dist)

    T['width_electron_mev'] = siunitx(width_electron_val * 1000,
                                      width_electron_err * 1000)
    T['width_flavors_mev'] = siunitx(width_flavors_val * 1000,
                                     width_flavors_err * 1000)

    missing_width_val, missing_width_err = bootstrap.average_and_std_arrays(
        missing_width_dist)
    width_lepton_val, width_lepton_err = bootstrap.average_and_std_arrays(
        width_lepton_dist)
    neutrino_families_val, neutrino_families_err = bootstrap.average_and_std_arrays(
        neutrino_families_dist)

    T['missing_width_mev'] = siunitx(missing_width_val * 1000,
                                     missing_width_err * 1000)
    T['width_lepton_mev'] = siunitx(width_lepton_val * 1000,
                                    width_lepton_err * 1000)
    T['neutrino_families'] = siunitx(neutrino_families_val,
                                     neutrino_families_err)

    # Format original counts for the template.
    val, err = bootstrap.average_and_std_arrays(readings_dist)
    T['counts_table'] = []
    for i in range(7):
        T['counts_table'].append(
            [siunitx(energies[i])] +
            siunitx(val[i, :], err[i, :], allowed_hang=10))

    # Format corrected counts for the template.
    val, err = bootstrap.average_and_std_arrays(corr_dist)
    T['corrected_counts_table'] = []
    for i in range(7):
        T['corrected_counts_table'].append(
            [siunitx(energies[i])] +
            siunitx(val[i, :], err[i, :], allowed_hang=10))

    # Format matrix for the template.
    matrix_val, matrix_err = bootstrap.average_and_std_arrays(matrix_dist)
    T['matrix'] = []
    for i in range(4):
        T['matrix'].append([display_names[i]] + siunitx(
            matrix_val[i, :] * 100, matrix_err[i, :] * 100, allowed_hang=10))

    # Format inverted matrix for the template.
    inverted_val, inverted_err = bootstrap.average_and_std_arrays(
        inverted_dist)
    T['inverted'] = []
    for i in range(4):
        T['inverted'].append([display_names[i]] + list(
            map(
                number_padding,
                siunitx(
                    inverted_val[i, :], inverted_err[i, :], allowed_hang=10))))

    # Format cross sections for the template.
    cs_val, cs_err = bootstrap.average_and_std_arrays(cross_sections_dist)
    T['cross_sections_table'] = []
    for i in range(7):
        T['cross_sections_table'].append([siunitx(energies[i])] +
                                         siunitx(cs_val[:, i], cs_err[:, i]))

    # Build error band for pgfplots.
    y_list_val, y_list_err = bootstrap.average_and_std_arrays(y_dist)
    for i, name in zip(itertools.count(), names):
        # Extract the y-values for the given decay type.
        y_val = y_list_val[i, :]
        y_err = y_list_err[i, :]

        # Store the data for pgfplots.
        np.savetxt('_build/xy/cross_section-{}s.tsv'.format(name),
                   np.column_stack([energies, cs_val[i, :], cs_err[i, :]]))
        np.savetxt('_build/xy/cross_section-{}s-band.tsv'.format(name),
                   bootstrap.pgfplots_error_band(x, y_val, y_err))
        np.savetxt('_build/xy/cross_section-{}s-fit.tsv'.format(name),
                   np.column_stack((x, y_val)))

    popts_val, popts_err = bootstrap.average_and_std_arrays(popts_dist)
    T['chi_sq'] = []
    T['chi_sq_red'] = []
    T['p'] = []
    for i in range(4):
        residuals = cs_val[i, :] - propagator(energies, *popts_val[i, :])
        chi_sq = np.sum((residuals / cs_err[i, :])**2)
        dof = len(residuals) - 1 - len(popts_val[i, :])
        p = 1 - scipy.stats.chi2.cdf(chi_sq, dof)

        print('chi_sq', chi_sq, chi_sq / dof, p)
        T['chi_sq'].append(siunitx(chi_sq))
        T['chi_sq_red'].append(siunitx(chi_sq / dof))
        T['p'].append(siunitx(p))

    T['confidence_table'] = list(
        zip(
            display_names,
            T['chi_sq'],
            T['chi_sq_red'],
            T['p'],
        ))

    mean_mass_val, mean_mass_err = bootstrap.average_and_std_arrays(
        mean_mass_dist)
    mean_width_val, mean_width_err = bootstrap.average_and_std_arrays(
        mean_width_dist)

    T['mean_mass'] = siunitx(mean_mass_val, mean_mass_err)
    T['mean_width'] = siunitx(mean_width_val, mean_width_err)