def plot_sciencepark_cluster():
    stations = range(501, 507)
    cluster = clusters.ScienceParkCluster(stations)

    figure()
    x_list, y_list = [], []
    x_stations, y_stations = [], []
    for station in cluster.stations:
        x_detectors, y_detectors = [], []
        for detector in station.detectors:
            x, y = detector.get_xy_coordinates()
            x_detectors.append(x)
            y_detectors.append(y)
            scatter(x, y, c='black', s=3)
        x_list.extend(x_detectors)
        y_list.extend(y_detectors)
        x_stations.append(mean(x_detectors))
        y_stations.append(mean(y_detectors))
    axis('equal')

    cluster = clusters.ScienceParkCluster([501, 503, 506])
    pos = []
    for station in cluster.stations:
        x, y, alpha = station.get_xyalpha_coordinates()
        pos.append((x, y))
    for (x0, y0), (x1, y1) in itertools.combinations(pos, 2):
        plot([x0, x1], [y0, y1], 'gray')

    utils.savedata([x_list, y_list])
    utils.saveplot()

    artist.utils.save_data([x_list, y_list], suffix='detectors',
                           dirname='plots')
    artist.utils.save_data([stations, x_stations, y_stations],
                           suffix='stations', dirname='plots')
Beispiel #2
0
def boxplot_arrival_times(group, N):
    table = group.E_1PeV.zenith_0

    sel = table.read_where('min_n134 >= N')
    t1 = sel[:]['t1']
    t3 = sel[:]['t3']
    t4 = sel[:]['t4']
    ts = concatenate([t1, t3, t4])
    print "Median arrival time delay over all detected events", median(ts)

    figure()

    bin_edges = linspace(0, 100, 11)
    x, arrival_times = [], []
    t25, t50, t75 = [], [], []
    for low, high in zip(bin_edges[:-1], bin_edges[1:]):
        query = '(min_n134 >= N) & (low <= r) & (r < high)'
        sel = table.read_where(query)
        t1 = sel[:]['t1']
        t2 = sel[:]['t2']
        ct1 = t1.compress((t1 > -999) & (t2 > -999))
        ct2 = t2.compress((t1 > -999) & (t2 > -999))
        ts = abs(ct2 - ct1)

        t25.append(scoreatpercentile(ts, 25))
        t50.append(scoreatpercentile(ts, 50))
        t75.append(scoreatpercentile(ts, 75))
        x.append((low + high) / 2)

    fill_between(x, t25, t75, color='0.75')
    plot(x, t50, 'o-', color='black')

    xlabel("Core distance [m]")
    ylabel("Arrival time delay [ns]")
    #title(r"$N_{MIP} \geq %d, \quad \theta = 0^\circ$" % N)

    xticks(arange(0, 100.5, 10))

    utils.savedata((x, t25, t50, t75), N)
    utils.saveplot(N)

    graph = GraphArtist()
    graph.shade_region(x, t25, t75)
    graph.plot(x, t50, linestyle=None)
    graph.set_xlabel(r"Core distance [\si{\meter}]")
    graph.set_ylabel(
        r"Arrival time difference $|t_2 - t_1|$ [\si{\nano\second}]")
    graph.set_xlimits(0, 100)
    graph.set_ylimits(min=0)
    artist.utils.save_graph(graph, suffix=N, dirname='plots')
def boxplot_arrival_times(group, N):
    table = group.E_1PeV.zenith_0

    sel = table.read_where('min_n134 >= N')
    t1 = sel[:]['t1']
    t3 = sel[:]['t3']
    t4 = sel[:]['t4']
    ts = concatenate([t1, t3, t4])
    print "Median arrival time delay over all detected events", median(ts)

    figure()

    bin_edges = linspace(0, 100, 11)
    x, arrival_times = [], []
    t25, t50, t75 = [], [], []
    for low, high in zip(bin_edges[:-1], bin_edges[1:]):
        query = '(min_n134 >= N) & (low <= r) & (r < high)'
        sel = table.read_where(query)
        t1 = sel[:]['t1']
        t2 = sel[:]['t2']
        ct1 = t1.compress((t1 > -999) & (t2 > -999))
        ct2 = t2.compress((t1 > -999) & (t2 > -999))
        ts = abs(ct2 - ct1)

        t25.append(scoreatpercentile(ts, 25))
        t50.append(scoreatpercentile(ts, 50))
        t75.append(scoreatpercentile(ts, 75))
        x.append((low + high) / 2)

    fill_between(x, t25, t75, color='0.75')
    plot(x, t50, 'o-', color='black')

    xlabel("Core distance [m]")
    ylabel("Arrival time delay [ns]")
    #title(r"$N_{MIP} \geq %d, \quad \theta = 0^\circ$" % N)

    xticks(arange(0, 100.5, 10))

    utils.savedata((x, t25, t50, t75), N)
    utils.saveplot(N)

    graph = GraphArtist()
    graph.shade_region(x, t25, t75)
    graph.plot(x, t50, linestyle=None)
    graph.set_xlabel(r"Core distance [\si{\meter}]")
    graph.set_ylabel(r"Arrival time difference $|t_2 - t_1|$ [\si{\nano\second}]")
    graph.set_xlimits(0, 100)
    graph.set_ylimits(min=0)
    artist.utils.save_graph(graph, suffix=N, dirname='plots')
def save_for_kascade_boxplot_core_distances_for_mips(group):
    table = group.E_1PeV.zenith_22_5

    r25_list = []
    r50_list = []
    r75_list = []
    x = []
    for N in range(1, 5):
        sel = table.read_where('(min_n134 == N) & (r <= 80)')
        r = sel[:]['r']
        r25_list.append(scoreatpercentile(r, 25))
        r50_list.append(scoreatpercentile(r, 50))
        r75_list.append(scoreatpercentile(r, 75))
        x.append(N)

    utils.savedata((x, r25_list, r50_list, r75_list))
Beispiel #5
0
def save_for_kascade_boxplot_core_distances_for_mips(group):
    table = group.E_1PeV.zenith_22_5

    r25_list = []
    r50_list = []
    r75_list = []
    x = []
    for N in range(1, 5):
        sel = table.read_where('(min_n134 == N) & (r <= 80)')
        r = sel[:]['r']
        r25_list.append(scoreatpercentile(r, 25))
        r50_list.append(scoreatpercentile(r, 50))
        r75_list.append(scoreatpercentile(r, 75))
        x.append(N)

    utils.savedata((x, r25_list, r50_list, r75_list))
Beispiel #6
0
def plot_uncertainty_core_distance(group):
    table = group.E_1PeV.zenith_22_5

    N = 2
    DR = 10

    figure()
    x, y, y2 = [], [], []
    for R in range(0, 81, 20):
        x.append(R)
        events = table.read_where('(min_n134 == N) & (abs(r - R) <= DR)')
        print len(events),
        errors = events['reference_theta'] - events['reconstructed_theta']
        # Make sure -pi < errors < pi
        errors = (errors + pi) % (2 * pi) - pi
        errors2 = events['reference_phi'] - events['reconstructed_phi']
        # Make sure -pi < errors2 < pi
        errors2 = (errors2 + pi) % (2 * pi) - pi
        #y.append(std(errors))
        #y2.append(std(errors2))
        y.append(
            (scoreatpercentile(errors, 83) - scoreatpercentile(errors, 17)) /
            2)
        y2.append(
            (scoreatpercentile(errors2, 83) - scoreatpercentile(errors2, 17)) /
            2)

    print
    print "R: theta_std, phi_std"
    for u, v, w in zip(x, y, y2):
        print u, v, w
    print
    utils.savedata((x, y, y2))

    # Plots
    plot(x, rad2deg(y), '^-', label="Theta")
    plot(x, rad2deg(y2), 'v-', label="Phi")

    # Labels etc.
    xlabel("Core distance [m] $\pm %d$" % DR)
    ylabel("Angle reconstruction uncertainty [deg]")
    #title(r"$N_{MIP} = %d, \theta = 22.5^\circ$" % N)
    ylim(ymin=0)
    legend(numpoints=1, loc='best')
    utils.saveplot()
    print
Beispiel #7
0
def plot_reconstruction_efficiency_vs_R_for_angles(N):
    group = data.root.reconstructions.E_1PeV

    figure()

    bin_edges = linspace(0, 100, 10)
    x = (bin_edges[:-1] + bin_edges[1:]) / 2.

    all_data = []

    for angle in [0, 22.5, 35]:
        angle_str = str(angle).replace('.', '_')
        shower_group = '/simulations/E_1PeV/zenith_%s' % angle_str
        reconstructions = group._f_get_child('zenith_%s' % angle_str)

        efficiencies = []
        for low, high in zip(bin_edges[:-1], bin_edges[1:]):
            shower_results = []
            for shower in data.list_nodes(shower_group):
                sel_query = '(low <= r) & (r < high)'
                coinc_sel = shower.coincidences.read_where(sel_query)
                ids = coinc_sel['id']
                obs_sel = shower.observables.read_coordinates(ids)
                assert (obs_sel['id'] == ids).all()

                o = obs_sel
                sel = obs_sel.compress((o['n1'] >= N) & (o['n3'] >= N)
                                       & (o['n4'] >= N))
                shower_results.append(len(sel))
            ssel = reconstructions.read_where(
                '(min_n134 >= N) & (low <= r) & (r < high)')
            efficiencies.append(len(ssel) / sum(shower_results))

        all_data.append(efficiencies)
        plot(x, efficiencies, label=r'$\theta = %s^\circ$' % angle)

    xlabel("Core distance [m]")
    ylabel("Reconstruction efficiency")
    #title(r"$N_{MIP} \geq %d$" % N)
    legend()

    utils.saveplot(N)
    utils.savedata(array([x] + all_data).T, suffix=N)
def plot_reconstruction_efficiency_vs_R_for_angles(N):
    group = data.root.reconstructions.E_1PeV

    figure()

    bin_edges = linspace(0, 100, 10)
    x = (bin_edges[:-1] + bin_edges[1:]) / 2.

    all_data = []

    for angle in [0, 22.5, 35]:
        angle_str = str(angle).replace('.', '_')
        shower_group = '/simulations/E_1PeV/zenith_%s' % angle_str
        reconstructions = group._f_get_child('zenith_%s' % angle_str)

        efficiencies = []
        for low, high in zip(bin_edges[:-1], bin_edges[1:]):
            shower_results = []
            for shower in data.list_nodes(shower_group):
                sel_query = '(low <= r) & (r < high)'
                coinc_sel = shower.coincidences.read_where(sel_query)
                ids = coinc_sel['id']
                obs_sel = shower.observables.read_coordinates(ids)
                assert (obs_sel['id'] == ids).all()

                o = obs_sel
                sel = obs_sel.compress((o['n1'] >= N) & (o['n3'] >= N) &
                                       (o['n4'] >= N))
                shower_results.append(len(sel))
            ssel = reconstructions.read_where('(min_n134 >= N) & (low <= r) & (r < high)')
            efficiencies.append(len(ssel) / sum(shower_results))

        all_data.append(efficiencies)
        plot(x, efficiencies, label=r'$\theta = %s^\circ$' % angle)

    xlabel("Core distance [m]")
    ylabel("Reconstruction efficiency")
    #title(r"$N_{MIP} \geq %d$" % N)
    legend()

    utils.saveplot(N)
    utils.savedata(array([x] + all_data).T, suffix=N)
def plot_uncertainty_core_distance(group):
    table = group.E_1PeV.zenith_22_5

    N = 2
    DR = 10

    figure()
    x, y, y2 = [], [], []
    for R in range(0, 81, 20):
        x.append(R)
        events = table.read_where('(min_n134 == N) & (abs(r - R) <= DR)')
        print len(events),
        errors = events['reference_theta'] - events['reconstructed_theta']
        # Make sure -pi < errors < pi
        errors = (errors + pi) % (2 * pi) - pi
        errors2 = events['reference_phi'] - events['reconstructed_phi']
        # Make sure -pi < errors2 < pi
        errors2 = (errors2 + pi) % (2 * pi) - pi
        #y.append(std(errors))
        #y2.append(std(errors2))
        y.append((scoreatpercentile(errors, 83) - scoreatpercentile(errors, 17)) / 2)
        y2.append((scoreatpercentile(errors2, 83) - scoreatpercentile(errors2, 17)) / 2)

    print
    print "R: theta_std, phi_std"
    for u, v, w in zip(x, y, y2):
        print u, v, w
    print
    utils.savedata((x, y, y2))

    # Plots
    plot(x, rad2deg(y), '^-', label="Theta")
    plot(x, rad2deg(y2), 'v-', label="Phi")

    # Labels etc.
    xlabel("Core distance [m] $\pm %d$" % DR)
    ylabel("Angle reconstruction uncertainty [deg]")
    #title(r"$N_{MIP} = %d, \theta = 22.5^\circ$" % N)
    ylim(ymin=0)
    legend(numpoints=1, loc='best')
    utils.saveplot()
    print
def plot_sciencepark_cluster():
    stations = range(501, 507)
    cluster = clusters.ScienceParkCluster(stations)

    figure()
    x_list, y_list = [], []
    x_stations, y_stations = [], []
    for station in cluster.stations:
        x_detectors, y_detectors = [], []
        for detector in station.detectors:
            x, y = detector.get_xy_coordinates()
            x_detectors.append(x)
            y_detectors.append(y)
            scatter(x, y, c='black', s=3)
        x_list.extend(x_detectors)
        y_list.extend(y_detectors)
        x_stations.append(mean(x_detectors))
        y_stations.append(mean(y_detectors))
    axis('equal')

    cluster = clusters.ScienceParkCluster([501, 503, 506])
    pos = []
    for station in cluster.stations:
        x, y, alpha = station.get_xyalpha_coordinates()
        pos.append((x, y))
    for (x0, y0), (x1, y1) in itertools.combinations(pos, 2):
        plot([x0, x1], [y0, y1], 'gray')

    utils.savedata([x_list, y_list])
    utils.saveplot()

    artist.utils.save_data([x_list, y_list],
                           suffix='detectors',
                           dirname='plots')
    artist.utils.save_data([stations, x_stations, y_stations],
                           suffix='stations',
                           dirname='plots')
def plot_N_vs_R(data):
    stations = range(501, 507)
    station_ids = range(6)
    cluster = clusters.ScienceParkCluster(stations)

    c_index = data.root.coincidences.c_index
    observables = data.root.coincidences.observables

    figure()
    #clf()
    global c_x, c_y
    if 'c_x' in globals():
        scatter(c_x, c_y)
    else:
        stations_in_coincidence = []
        for coincidence_events in c_index:
            stations = [observables[u]['station_id'] for u in
                        coincidence_events]
            stations_in_coincidence.append(stations)

        c_x = []
        c_y = []
        for station1, station2 in itertools.combinations(station_ids, 2):
            condition = [station1 in u and station2 in u for u in
                         stations_in_coincidence]
            N = sum(condition)
            R, phi = cluster.calc_r_and_phi_for_stations(station1, station2)
            scatter(R, N)
            c_x.append(R)
            c_y.append(N)
            print R, N, station1, station2

    ldf = KascadeLdf()
    R = linspace(100, 500)
    E = linspace(1e14, 1e19, 100)
    F = E ** -2.7
    N = []
    for r in R:
        x = []
        for f, e in zip(F, E):
            Ne = e / 1e15 * 10 ** 4.8
            density = ldf.get_ldf_value_for_size(r, Ne)
            prob = 1 - exp(-.5 * density)
            x.append(f * prob)
        N.append(mean(x))
    N = array(N)
    f = lambda x, S: S * interp(x, R, N)
    c_x = array(c_x)
    c_y = array(c_y)
    # WTF wrong with point at slightly less than 100 m? 501 / 502??
    sc_x = c_x.compress(c_x >= 100)
    sc_y = c_y.compress(c_x >= 100)
    popt, pcov = curve_fit(f, sc_x, sc_y, p0=(1e45))
    plot(R, f(R, popt[0]))
    #ylim(0, 150000)
    ylim(0, 500000)
    xlim(0, 500)

    xlabel("Distance [m]")
    ylabel("Number of coincidences")

    utils.saveplot()
    utils.savedata([sc_x, sc_y], suffix='data')
    utils.savedata([R, f(R, popt[0])], suffix='fit')
def plot_N_vs_R(data):
    stations = range(501, 507)
    station_ids = range(6)
    cluster = clusters.ScienceParkCluster(stations)

    c_index = data.root.coincidences.c_index
    observables = data.root.coincidences.observables

    figure()
    #clf()
    global c_x, c_y
    if 'c_x' in globals():
        scatter(c_x, c_y)
    else:
        stations_in_coincidence = []
        for coincidence_events in c_index:
            stations = [
                observables[u]['station_id'] for u in coincidence_events
            ]
            stations_in_coincidence.append(stations)

        c_x = []
        c_y = []
        for station1, station2 in itertools.combinations(station_ids, 2):
            condition = [
                station1 in u and station2 in u
                for u in stations_in_coincidence
            ]
            N = sum(condition)
            R, phi = cluster.calc_r_and_phi_for_stations(station1, station2)
            scatter(R, N)
            c_x.append(R)
            c_y.append(N)
            print R, N, station1, station2

    ldf = KascadeLdf()
    R = linspace(100, 500)
    E = linspace(1e14, 1e19, 100)
    F = E**-2.7
    N = []
    for r in R:
        x = []
        for f, e in zip(F, E):
            Ne = e / 1e15 * 10**4.8
            density = ldf.get_ldf_value_for_size(r, Ne)
            prob = 1 - exp(-.5 * density)
            x.append(f * prob)
        N.append(mean(x))
    N = array(N)
    f = lambda x, S: S * interp(x, R, N)
    c_x = array(c_x)
    c_y = array(c_y)
    # WTF wrong with point at slightly less than 100 m? 501 / 502??
    sc_x = c_x.compress(c_x >= 100)
    sc_y = c_y.compress(c_x >= 100)
    popt, pcov = curve_fit(f, sc_x, sc_y, p0=(1e45))
    plot(R, f(R, popt[0]))
    #ylim(0, 150000)
    ylim(0, 500000)
    xlim(0, 500)

    xlabel("Distance [m]")
    ylabel("Number of coincidences")

    utils.saveplot()
    utils.savedata([sc_x, sc_y], suffix='data')
    utils.savedata([R, f(R, popt[0])], suffix='fit')
def plot_uncertainty_zenith(group):
    group = group.E_1PeV
    rec = DirectionReconstruction

    N = 2

    graph = GraphArtist()

    # constants for uncertainty estimation
    # BEWARE: stations must be the same over all reconstruction tables used
    station = group.zenith_0.attrs.cluster.stations[0]
    r1, phi1 = station.calc_r_and_phi_for_detectors(1, 3)
    r2, phi2 = station.calc_r_and_phi_for_detectors(1, 4)

    figure()
    x, y, y2 = [], [], []
    for THETA in 0, 5, 10, 15, 22.5, 30, 35, 45:
        x.append(THETA)
        table = group._f_get_child('zenith_%s' % str(THETA).replace('.', '_'))
        events = table.read_where('min_n134 >= N')
        print THETA, len(events),
        errors = events['reference_theta'] - events['reconstructed_theta']
        # Make sure -pi < errors < pi
        errors = (errors + pi) % (2 * pi) - pi
        errors2 = events['reference_phi'] - events['reconstructed_phi']
        # Make sure -pi < errors2 < pi
        errors2 = (errors2 + pi) % (2 * pi) - pi
        #y.append(std(errors))
        #y2.append(std(errors2))
        y.append((scoreatpercentile(errors, 83) - scoreatpercentile(errors, 17)) / 2)
        y2.append((scoreatpercentile(errors2, 83) - scoreatpercentile(errors2, 17)) / 2)
    plot(x, rad2deg(y), '^', label="Theta")
    graph.plot(x, rad2deg(y), mark='o', linestyle=None)
    # Azimuthal angle undefined for zenith = 0
    plot(x[1:], rad2deg(y2[1:]), 'v', label="Phi")
    graph.plot(x[1:], rad2deg(y2[1:]), mark='*', linestyle=None)
    print
    print "zenith: theta, theta_std, phi_std"
    for u, v, w in zip(x, y, y2):
        print u, v, w
    print
    utils.savedata((x, y, y2))

    # Uncertainty estimate
    x = linspace(0, deg2rad(45), 50)
    phis = linspace(-pi, pi, 50)
    y, y2 = [], []
    for t in x:
        y.append(mean(rec.rel_phi_errorsq(t, phis, phi1, phi2, r1, r2)))
        y2.append(mean(rec.rel_theta1_errorsq(t, phis, phi1, phi2, r1, r2)))
    y = TIMING_ERROR * sqrt(array(y))
    y2 = TIMING_ERROR * sqrt(array(y2))
    plot(rad2deg(x), rad2deg(y), label="Estimate Phi")
    graph.plot(rad2deg(x), rad2deg(y), mark=None)
    plot(rad2deg(x), rad2deg(y2), label="Estimate Theta")
    graph.plot(rad2deg(x), rad2deg(y2), mark=None)

    # Labels etc.
    xlabel("Shower zenith angle [deg]")
    graph.set_xlabel(r"Shower zenith angle [\si{\degree}]")
    ylabel("Angle reconstruction uncertainty [deg]")
    graph.set_ylabel(r"Angle reconstruction uncertainty [\si{\degree}]")
    #title(r"$N_{MIP} \geq %d$" % N)
    ylim(0, 100)
    graph.set_ylimits(0, 60)
    legend(numpoints=1)
    utils.saveplot()
    artist.utils.save_graph(graph, dirname='plots')
    print
def plot_uncertainty_mip(group):
    table = group.E_1PeV.zenith_22_5
    rec = DirectionReconstruction

    # constants for uncertainty estimation
    station = table.attrs.cluster.stations[0]
    r1, phi1 = station.calc_r_and_phi_for_detectors(1, 3)
    r2, phi2 = station.calc_r_and_phi_for_detectors(1, 4)

    R_list = get_median_core_distances_for_mips(group, range(1, 6))

    figure()
    x, y, y2 = [], [], []
    for N in range(1, 5):
        x.append(N)
        events = table.read_where('min_n134>=%d' % N)
        #query = '(n1 == N) & (n3 == N) & (n4 == N)'
        #vents = table.read_where(query)
        print len(events),
        errors = events['reference_theta'] - events['reconstructed_theta']
        # Make sure -pi < errors < pi
        errors = (errors + pi) % (2 * pi) - pi
        errors2 = events['reference_phi'] - events['reconstructed_phi']
        # Make sure -pi < errors2 < pi
        errors2 = (errors2 + pi) % (2 * pi) - pi
        #y.append(std(errors))
        #y2.append(std(errors2))
        y.append((scoreatpercentile(errors, 83) - scoreatpercentile(errors, 17)) / 2)
        y2.append((scoreatpercentile(errors2, 83) - scoreatpercentile(errors2, 17)) / 2)
        print "YYY", rad2deg(scoreatpercentile(errors2, 83) - scoreatpercentile(errors2, 17))

    plot(x, rad2deg(y), '^', label="Theta")
    plot(x, rad2deg(y2), 'v', label="Phi")
    Sx = x
    Sy = y
    Sy2 = y2
    print
    print "mip: min_n134, theta_std, phi_std"
    for u, v, w in zip(x, y, y2):
        print u, v, w
    print
    utils.savedata((x, y, y2))

    # Uncertainty estimate
    x = [1, 2, 3, 4, 5]
    phis = linspace(-pi, pi, 50)
    phi_errsq = mean(rec.rel_phi_errorsq(pi / 8, phis, phi1, phi2, r1, r2))
    theta_errsq = mean(rec.rel_theta1_errorsq(pi / 8, phis, phi1, phi2, r1, r2))
    y = TIMING_ERROR * std_t(x) * sqrt(phi_errsq)
    y2 = TIMING_ERROR * std_t(x) * sqrt(theta_errsq)

    mc = my_std_t_for_R(data, x, R_list)
    for u, v in zip(mc, R_list):
        print v, u, sqrt(u ** 2 + 1.2 ** 2), sqrt((.66 * u) ** 2 + 1.2 ** 2)
    mc = sqrt(mc ** 2 + 1.2 ** 2)
    y3 = mc * sqrt(phi_errsq)
    y4 = mc * sqrt(theta_errsq)

    nx = linspace(1, 4, 100)
    y = spline(x, y, nx)
    y2 = spline(x, y2, nx)
    y3 = spline(x, y3, nx)
    y4 = spline(x, y4, nx)

    plot(nx, rad2deg(y), label="Gauss Phi")
    plot(nx, rad2deg(y2), label="Gauss Theta")
    plot(nx, rad2deg(y3), label="Monte Carlo Phi")
    plot(nx, rad2deg(y4), label="Monte Carlo Theta")
    # Labels etc.
    xlabel("Minimum number of particles")
    ylabel("Angle reconstruction uncertainty [deg]")
    #title(r"$\theta = 22.5^\circ$")
    legend(numpoints=1)
    xlim(.5, 4.5)
    utils.saveplot()
    print

    graph = GraphArtist()
    graph.plot(Sx, rad2deg(Sy), mark='o', linestyle='only marks')
    graph.plot(Sx, rad2deg(Sy2), mark='*', linestyle='only marks')
    graph.plot(nx, rad2deg(y), mark=None, linestyle='dashed,smooth')
    graph.plot(nx, rad2deg(y2), mark=None, linestyle='dashed,smooth')
    graph.set_xlabel("Minimum number of particles")
    graph.set_ylabel(r"Reconstruction uncertainty [\si{\degree}]")
    graph.set_xticks(range(1, 5))
    graph.set_ylimits(0, 32)
    artist.utils.save_graph(graph, dirname='plots')
    graph.plot(nx, rad2deg(y3), mark=None, linestyle='smooth')
    graph.plot(nx, rad2deg(y4), mark=None, linestyle='smooth')
    artist.utils.save_graph(graph, suffix='full', dirname='plots')
Beispiel #15
0
def plot_uncertainty_zenith(group):
    group = group.E_1PeV
    rec = DirectionReconstruction

    N = 2

    graph = GraphArtist()

    # constants for uncertainty estimation
    # BEWARE: stations must be the same over all reconstruction tables used
    station = group.zenith_0.attrs.cluster.stations[0]
    r1, phi1 = station.calc_r_and_phi_for_detectors(1, 3)
    r2, phi2 = station.calc_r_and_phi_for_detectors(1, 4)

    figure()
    x, y, y2 = [], [], []
    for THETA in 0, 5, 10, 15, 22.5, 30, 35, 45:
        x.append(THETA)
        table = group._f_get_child('zenith_%s' % str(THETA).replace('.', '_'))
        events = table.read_where('min_n134 >= N')
        print THETA, len(events),
        errors = events['reference_theta'] - events['reconstructed_theta']
        # Make sure -pi < errors < pi
        errors = (errors + pi) % (2 * pi) - pi
        errors2 = events['reference_phi'] - events['reconstructed_phi']
        # Make sure -pi < errors2 < pi
        errors2 = (errors2 + pi) % (2 * pi) - pi
        #y.append(std(errors))
        #y2.append(std(errors2))
        y.append(
            (scoreatpercentile(errors, 83) - scoreatpercentile(errors, 17)) /
            2)
        y2.append(
            (scoreatpercentile(errors2, 83) - scoreatpercentile(errors2, 17)) /
            2)
    plot(x, rad2deg(y), '^', label="Theta")
    graph.plot(x, rad2deg(y), mark='o', linestyle=None)
    # Azimuthal angle undefined for zenith = 0
    plot(x[1:], rad2deg(y2[1:]), 'v', label="Phi")
    graph.plot(x[1:], rad2deg(y2[1:]), mark='*', linestyle=None)
    print
    print "zenith: theta, theta_std, phi_std"
    for u, v, w in zip(x, y, y2):
        print u, v, w
    print
    utils.savedata((x, y, y2))

    # Uncertainty estimate
    x = linspace(0, deg2rad(45), 50)
    phis = linspace(-pi, pi, 50)
    y, y2 = [], []
    for t in x:
        y.append(mean(rec.rel_phi_errorsq(t, phis, phi1, phi2, r1, r2)))
        y2.append(mean(rec.rel_theta1_errorsq(t, phis, phi1, phi2, r1, r2)))
    y = TIMING_ERROR * sqrt(array(y))
    y2 = TIMING_ERROR * sqrt(array(y2))
    plot(rad2deg(x), rad2deg(y), label="Estimate Phi")
    graph.plot(rad2deg(x), rad2deg(y), mark=None)
    plot(rad2deg(x), rad2deg(y2), label="Estimate Theta")
    graph.plot(rad2deg(x), rad2deg(y2), mark=None)

    # Labels etc.
    xlabel("Shower zenith angle [deg]")
    graph.set_xlabel(r"Shower zenith angle [\si{\degree}]")
    ylabel("Angle reconstruction uncertainty [deg]")
    graph.set_ylabel(r"Angle reconstruction uncertainty [\si{\degree}]")
    #title(r"$N_{MIP} \geq %d$" % N)
    ylim(0, 100)
    graph.set_ylimits(0, 60)
    legend(numpoints=1)
    utils.saveplot()
    artist.utils.save_graph(graph, dirname='plots')
    print
Beispiel #16
0
def plot_uncertainty_mip(group):
    table = group.E_1PeV.zenith_22_5
    rec = DirectionReconstruction

    # constants for uncertainty estimation
    station = table.attrs.cluster.stations[0]
    r1, phi1 = station.calc_r_and_phi_for_detectors(1, 3)
    r2, phi2 = station.calc_r_and_phi_for_detectors(1, 4)

    R_list = get_median_core_distances_for_mips(group, range(1, 6))

    figure()
    x, y, y2 = [], [], []
    for N in range(1, 5):
        x.append(N)
        events = table.read_where('min_n134>=%d' % N)
        #query = '(n1 == N) & (n3 == N) & (n4 == N)'
        #vents = table.read_where(query)
        print len(events),
        errors = events['reference_theta'] - events['reconstructed_theta']
        # Make sure -pi < errors < pi
        errors = (errors + pi) % (2 * pi) - pi
        errors2 = events['reference_phi'] - events['reconstructed_phi']
        # Make sure -pi < errors2 < pi
        errors2 = (errors2 + pi) % (2 * pi) - pi
        #y.append(std(errors))
        #y2.append(std(errors2))
        y.append(
            (scoreatpercentile(errors, 83) - scoreatpercentile(errors, 17)) /
            2)
        y2.append(
            (scoreatpercentile(errors2, 83) - scoreatpercentile(errors2, 17)) /
            2)
        print "YYY", rad2deg(
            scoreatpercentile(errors2, 83) - scoreatpercentile(errors2, 17))

    plot(x, rad2deg(y), '^', label="Theta")
    plot(x, rad2deg(y2), 'v', label="Phi")
    Sx = x
    Sy = y
    Sy2 = y2
    print
    print "mip: min_n134, theta_std, phi_std"
    for u, v, w in zip(x, y, y2):
        print u, v, w
    print
    utils.savedata((x, y, y2))

    # Uncertainty estimate
    x = [1, 2, 3, 4, 5]
    phis = linspace(-pi, pi, 50)
    phi_errsq = mean(rec.rel_phi_errorsq(pi / 8, phis, phi1, phi2, r1, r2))
    theta_errsq = mean(rec.rel_theta1_errorsq(pi / 8, phis, phi1, phi2, r1,
                                              r2))
    y = TIMING_ERROR * std_t(x) * sqrt(phi_errsq)
    y2 = TIMING_ERROR * std_t(x) * sqrt(theta_errsq)

    mc = my_std_t_for_R(data, x, R_list)
    for u, v in zip(mc, R_list):
        print v, u, sqrt(u**2 + 1.2**2), sqrt((.66 * u)**2 + 1.2**2)
    mc = sqrt(mc**2 + 1.2**2)
    y3 = mc * sqrt(phi_errsq)
    y4 = mc * sqrt(theta_errsq)

    nx = linspace(1, 4, 100)
    y = spline(x, y, nx)
    y2 = spline(x, y2, nx)
    y3 = spline(x, y3, nx)
    y4 = spline(x, y4, nx)

    plot(nx, rad2deg(y), label="Gauss Phi")
    plot(nx, rad2deg(y2), label="Gauss Theta")
    plot(nx, rad2deg(y3), label="Monte Carlo Phi")
    plot(nx, rad2deg(y4), label="Monte Carlo Theta")
    # Labels etc.
    xlabel("Minimum number of particles")
    ylabel("Angle reconstruction uncertainty [deg]")
    #title(r"$\theta = 22.5^\circ$")
    legend(numpoints=1)
    xlim(.5, 4.5)
    utils.saveplot()
    print

    graph = GraphArtist()
    graph.plot(Sx, rad2deg(Sy), mark='o', linestyle='only marks')
    graph.plot(Sx, rad2deg(Sy2), mark='*', linestyle='only marks')
    graph.plot(nx, rad2deg(y), mark=None, linestyle='dashed,smooth')
    graph.plot(nx, rad2deg(y2), mark=None, linestyle='dashed,smooth')
    graph.set_xlabel("Minimum number of particles")
    graph.set_ylabel(r"Reconstruction uncertainty [\si{\degree}]")
    graph.set_xticks(range(1, 5))
    graph.set_ylimits(0, 32)
    artist.utils.save_graph(graph, dirname='plots')
    graph.plot(nx, rad2deg(y3), mark=None, linestyle='smooth')
    graph.plot(nx, rad2deg(y4), mark=None, linestyle='smooth')
    artist.utils.save_graph(graph, suffix='full', dirname='plots')