class KascadeLdfTest(unittest.TestCase):
    def setUp(self):
        self.ldf = KascadeLdf()

    def test_cache_c_s_value(self):
        # change s value
        self.ldf._s += .1

        self.ldf._cache_c_s_value()
        expected = self.ldf._c(self.ldf._s)

        self.assertEqual(expected, self.ldf._c_s)

    def test_init_stores_Ne_and_s(self):
        Ne = sentinel.Ne
        s = sentinel.s

        with patch.object(KascadeLdf, '_cache_c_s_value') as mock_cache:
            ldf = KascadeLdf(Ne, s)

        self.assertIs(ldf._Ne, Ne)
        self.assertIs(ldf._s, s)

    def test_init_calls_cache_c_s_value(self):
        with patch.object(KascadeLdf, '_cache_c_s_value') as mock_cache:
            sim = KascadeLdf()
            mock_cache.assert_called_once_with()
示例#2
0
    def main(self):
        global weights, densities, weighted_densities
        plt.figure()

        cluster = clusters.SingleStation()
        self.station = cluster.stations[0]

        R = np.linspace(0, 100, 100)
        densities = []
        weights = []
        for E in np.linspace(1e13, 1e17, 10000):
            relative_flux = E ** -2.7
            Ne = 10 ** (np.log10(E) - 15 + 4.8)
            self.ldf = KascadeLdf(Ne)
            min_dens = self.calculate_minimum_density_for_station_at_R(R)

            weights.append(relative_flux)
            densities.append(min_dens)
        weights = np.array(weights)
        densities = np.array(densities).T

        weighted_densities = (np.sum(weights * densities, axis=1) /
                              np.sum(weights))
        plt.plot(R, weighted_densities)
        plt.yscale('log')
        plt.ylabel("Min. density [m^{-2}]")
        plt.xlabel("Core distance [m]")
        plt.axvline(5.77)
        plt.show()
示例#3
0
    def __init__(self):
        # Detectors
        stations = [501, 503, 506]
        self.cluster = ScienceParkCluster(stations=stations)

        # Conditions
        self.detection_probability = 0.5
        self.min_detectors = 2
        self.min_stations = 3

        # Shower parameters
        self.ldf = KascadeLdf()
        self.grid_points = 2500
        self.max_radius = 1000
        self.min_energy = 1e12
        self.max_energy = 1e21
        self.start_energy = 1e17
        self.bisections = 11

        # Throw showers in a regular grid around center mass of the station
        xc, yc, _ = self.cluster.calc_center_of_mass_coordinates()
        self.xx = np.linspace(-self.max_radius + xc, self.max_radius + xc,
                              np.sqrt(self.grid_points))
        self.yy = np.linspace(-self.max_radius + yc, self.max_radius + yc,
                              np.sqrt(self.grid_points))
class KascadeLdfTest(unittest.TestCase):
    def setUp(self):
        self.ldf = KascadeLdf()

    def test_cache_c_s_value(self):
        # change s value
        self.ldf._s += 0.1

        self.ldf._cache_c_s_value()
        expected = self.ldf._c(self.ldf._s)

        self.assertEqual(expected, self.ldf._c_s)

    def test_init_stores_Ne_and_s(self):
        Ne = sentinel.Ne
        s = sentinel.s

        with patch.object(KascadeLdf, "_cache_c_s_value") as mock_cache:
            ldf = KascadeLdf(Ne, s)

        self.assertIs(ldf._Ne, Ne)
        self.assertIs(ldf._s, s)

    def test_init_calls_cache_c_s_value(self):
        with patch.object(KascadeLdf, "_cache_c_s_value") as mock_cache:
            sim = KascadeLdf()
            mock_cache.assert_called_once_with()
    def test_init_stores_Ne_and_s(self):
        Ne = sentinel.Ne
        s = sentinel.s

        with patch.object(KascadeLdf, '_cache_c_s_value') as mock_cache:
            ldf = KascadeLdf(Ne, s)

        self.assertIs(ldf._Ne, Ne)
        self.assertIs(ldf._s, s)
示例#6
0
class ToyMonteCarlo(object):
    def main(self):
        global weights, densities, weighted_densities
        plt.figure()

        cluster = clusters.SingleStation()
        self.station = cluster.stations[0]

        R = np.linspace(0, 100, 100)
        densities = []
        weights = []
        for E in np.linspace(1e13, 1e17, 10000):
            relative_flux = E ** -2.7
            Ne = 10 ** (np.log10(E) - 15 + 4.8)
            self.ldf = KascadeLdf(Ne)
            min_dens = self.calculate_minimum_density_for_station_at_R(R)

            weights.append(relative_flux)
            densities.append(min_dens)
        weights = np.array(weights)
        densities = np.array(densities).T

        weighted_densities = (np.sum(weights * densities, axis=1) /
                              np.sum(weights))
        plt.plot(R, weighted_densities)
        plt.yscale('log')
        plt.ylabel("Min. density [m^{-2}]")
        plt.xlabel("Core distance [m]")
        plt.axvline(5.77)
        plt.show()

    def calculate_minimum_density_for_station_at_R(self, R):
        densities = self.calculate_densities_for_station_at_R(R)
        return np.min(densities, axis=0)

    def calculate_densities_for_station_at_R(self, R):
        densities = []
        for detector in self.station.detectors:
            densities.append(self.calculate_densities_for_detector_at_R(
                                detector, R))
        return np.array(densities)

    def calculate_densities_for_detector_at_R(self, detector, R):
        x = 0
        y = R
        x0, y0 = detector.get_xy_coordinates()

        r = np.sqrt((x - x0) ** 2 + (y - y0) ** 2)
        return self.ldf.calculate_ldf_value(r)
 def test_init_calls_cache_c_s_value(self):
     with patch.object(KascadeLdf, '_cache_c_s_value') as mock_cache:
         sim = KascadeLdf()
         mock_cache.assert_called_once_with()
 def setUp(self):
     self.ldf = KascadeLdf()
def plot_N_vs_R(data):
    stations = range(501, 507)
    station_ids = range(6)
    cluster = clusters.ScienceParkCluster(stations)

    c_index = data.root.coincidences.c_index
    observables = data.root.coincidences.observables

    figure()
    #clf()
    global c_x, c_y
    if 'c_x' in globals():
        scatter(c_x, c_y)
    else:
        stations_in_coincidence = []
        for coincidence_events in c_index:
            stations = [observables[u]['station_id'] for u in
                        coincidence_events]
            stations_in_coincidence.append(stations)

        c_x = []
        c_y = []
        for station1, station2 in itertools.combinations(station_ids, 2):
            condition = [station1 in u and station2 in u for u in
                         stations_in_coincidence]
            N = sum(condition)
            R, phi = cluster.calc_r_and_phi_for_stations(station1, station2)
            scatter(R, N)
            c_x.append(R)
            c_y.append(N)
            print R, N, station1, station2

    ldf = KascadeLdf()
    R = linspace(100, 500)
    E = linspace(1e14, 1e19, 100)
    F = E ** -2.7
    N = []
    for r in R:
        x = []
        for f, e in zip(F, E):
            Ne = e / 1e15 * 10 ** 4.8
            density = ldf.get_ldf_value_for_size(r, Ne)
            prob = 1 - exp(-.5 * density)
            x.append(f * prob)
        N.append(mean(x))
    N = array(N)
    f = lambda x, S: S * interp(x, R, N)
    c_x = array(c_x)
    c_y = array(c_y)
    # WTF wrong with point at slightly less than 100 m? 501 / 502??
    sc_x = c_x.compress(c_x >= 100)
    sc_y = c_y.compress(c_x >= 100)
    popt, pcov = curve_fit(f, sc_x, sc_y, p0=(1e45))
    plot(R, f(R, popt[0]))
    #ylim(0, 150000)
    ylim(0, 500000)
    xlim(0, 500)

    xlabel("Distance [m]")
    ylabel("Number of coincidences")

    utils.saveplot()
    utils.savedata([sc_x, sc_y], suffix='data')
    utils.savedata([R, f(R, popt[0])], suffix='fit')
示例#10
0
def plot_N_vs_R(data):
    stations = range(501, 507)
    station_ids = range(6)
    cluster = clusters.ScienceParkCluster(stations)

    c_index = data.root.coincidences.c_index
    observables = data.root.coincidences.observables

    figure()
    #clf()
    global c_x, c_y
    if 'c_x' in globals():
        scatter(c_x, c_y)
    else:
        stations_in_coincidence = []
        for coincidence_events in c_index:
            stations = [
                observables[u]['station_id'] for u in coincidence_events
            ]
            stations_in_coincidence.append(stations)

        c_x = []
        c_y = []
        for station1, station2 in itertools.combinations(station_ids, 2):
            condition = [
                station1 in u and station2 in u
                for u in stations_in_coincidence
            ]
            N = sum(condition)
            R, phi = cluster.calc_r_and_phi_for_stations(station1, station2)
            scatter(R, N)
            c_x.append(R)
            c_y.append(N)
            print R, N, station1, station2

    ldf = KascadeLdf()
    R = linspace(100, 500)
    E = linspace(1e14, 1e19, 100)
    F = E**-2.7
    N = []
    for r in R:
        x = []
        for f, e in zip(F, E):
            Ne = e / 1e15 * 10**4.8
            density = ldf.get_ldf_value_for_size(r, Ne)
            prob = 1 - exp(-.5 * density)
            x.append(f * prob)
        N.append(mean(x))
    N = array(N)
    f = lambda x, S: S * interp(x, R, N)
    c_x = array(c_x)
    c_y = array(c_y)
    # WTF wrong with point at slightly less than 100 m? 501 / 502??
    sc_x = c_x.compress(c_x >= 100)
    sc_y = c_y.compress(c_x >= 100)
    popt, pcov = curve_fit(f, sc_x, sc_y, p0=(1e45))
    plot(R, f(R, popt[0]))
    #ylim(0, 150000)
    ylim(0, 500000)
    xlim(0, 500)

    xlabel("Distance [m]")
    ylabel("Number of coincidences")

    utils.saveplot()
    utils.savedata([sc_x, sc_y], suffix='data')
    utils.savedata([R, f(R, popt[0])], suffix='fit')
示例#11
0
        prob_temp = []
        for distance in core_distances:
            prob_temp.append(P_2(ldf, distance, size))
        probabilities.append(prob_temp)
    probabilities = array(probabilities)

    plot = Plot('semilogx')

    low = []
    mid = []
    high = []
    for p in probabilities:
        # Using `1 -` to ensure x (i.e. p) is increasing.
        low.append(interp(1 - 0.10, 1 - p, core_distances))
        mid.append(interp(1 - 0.50, 1 - p, core_distances))
        high.append(interp(1 - 0.90, 1 - p, core_distances))
    plot.plot(low, energies, linestyle='densely dotted', mark=None)
    plot.plot(mid, energies, linestyle='densely dashed', mark=None)
    plot.plot(high, energies, mark=None)
    plot.set_ylimits(13, 20)
    plot.set_xlimits(1., 1e4)

    plot.set_xlabel(r'Core distance [\si{\meter}]')
    plot.set_ylabel(r'Energy [log10(E/\si{\eV})]')
    plot.save_as_pdf('efficiency_distance_energy_' + ldf.__class__.__name__)


if __name__ == "__main__":
    for ldf in [NkgLdf(), KascadeLdf(), EllipsLdf()]:
        plot_E_d_P(ldf)
示例#12
0
class EnergySensitivity(object):
    def __init__(self):
        # Detectors
        stations = [501, 503, 506]
        self.cluster = ScienceParkCluster(stations=stations)

        # Conditions
        self.detection_probability = 0.5
        self.min_detectors = 2
        self.min_stations = 3

        # Shower parameters
        self.ldf = KascadeLdf()
        self.grid_points = 2500
        self.max_radius = 1000
        self.min_energy = 1e12
        self.max_energy = 1e21
        self.start_energy = 1e17
        self.bisections = 11

        # Throw showers in a regular grid around center mass of the station
        xc, yc, _ = self.cluster.calc_center_of_mass_coordinates()
        self.xx = np.linspace(-self.max_radius + xc, self.max_radius + xc,
                              np.sqrt(self.grid_points))
        self.yy = np.linspace(-self.max_radius + yc, self.max_radius + yc,
                              np.sqrt(self.grid_points))

    def main(self):
        # Cache detector positions
        for station in self.cluster.stations:
            for detector in station.detectors:
                detector.xy_coordinates = detector.get_xy_coordinates()
        # Results
        self.results = self.get_min_energy_per_bin()

    def show_restults(self):
        self.plot_scintillators_in_cluster()
        self.plot_energy_acceptance()


#         self.draw_background_map()

    def get_area_energy(self, energy):
        n_bins = np.sum(self.results < energy)
        bin_area = abs((self.xx[1] - self.xx[0]) * (self.yy[1] - self.yy[0]))
        area = n_bins * bin_area

        return area

    def get_min_energy_per_bin(self):

        worker_pool = Pool()
        temp_multi_find_min_energy = partial(multi_find_min_energy, self)
        results = worker_pool.map(temp_multi_find_min_energy,
                                  product(self.xx, self.yy))
        worker_pool.close()
        worker_pool.join()
        #         results = [temp_multi_find_min_energy(xy) for xy in product(self.xx, self.yy)]
        results = np.array(results).reshape((len(self.xx), len(self.yy))).T

        return results

    def find_min_energy(self, xc, yc):
        # Use bisection to quickly get the final energy
        energy = self.start_energy
        lo = self.min_energy
        hi = self.max_energy
        for _ in range(self.bisections):
            n_electrons = 10**(np.log10(energy) - 15 + 4.8)
            station_densities = self.calculate_densities_for_cluster(
                xc, yc, n_electrons)
            p_cluster = self.detection_probability_for_cluster(
                station_densities)
            if p_cluster == self.detection_probability:
                break
            elif p_cluster < self.detection_probability:
                lo = energy
            else:
                hi = energy
            energy = 10**((np.log10(lo) + np.log10(hi)) / 2.0)

        return energy

    def detection_probability_for_cluster(self, station_densities):
        """Determine the probability of 'coincidence'

        Calculate the probability of the requested coincidence using
        statistics. Fist the probability of a good detection in each station
        is determined. Then it looks for the probability that at least a given
        number of stations detects the shower.

        :param station_densities: list of densities at each detectors in
                                  each of the stations.
        :return: probability of a coicidence.

        """
        if len(station_densities) < self.min_stations:
            # To few stations
            return 0

        p_stations = [
            self.detection_probability_for_station(detector_densities)
            for detector_densities in station_densities
        ]
        p0_stations = [1. - p for p in p_stations]
        p_cluster = self.calculate_p(p_stations, p0_stations,
                                     self.min_stations)

        return p_cluster

    def detection_probability_for_station(self, detector_densities):
        """Determine the probability of 'trigger'

        Calculate the probability of the requested detection using Poisson
        statistics. Each detector will be marked as hit when it has at least
        one particle. At least `min_detectors` need to be hit to count towards
        the probability of a good detection.

        :param detector_densities: list of densities at each detectors in
                                  the station.
        :return: list of probabilities for each station.

        """
        if len(detector_densities) < self.min_detectors:
            # To few detectors
            return 0

        p0_detectors = [self.p0(density) for density in detector_densities]
        p_detectors = [1. - p0 for p0 in p0_detectors]
        p_station = self.calculate_p(p_detectors, p0_detectors,
                                     self.min_detectors)
        return p_station

    def calculate_p(self, p, p0, min_n):
        n_p = len(p)
        p_total = 0
        for n in range(min_n, n_p + 1):
            for i in combinations(range(n_p), n):
                p_combination = 1.
                for j in range(n_p):
                    if j in i:
                        # Probability of trigger
                        p_combination *= p[j]
                    else:
                        # Probability of no trigger
                        p_combination *= p0[j]
                p_total += p_combination

        return p_total

    def p(self, detector_density):
        """Chance of at least one particle in detector"""

        return 1.0 - self.p0(detector_density)

    def p0(self, detector_density):
        """Chance of detecting no particle in a detector"""

        return np.exp(-detector_density / 2.)

    def calculate_densities_for_cluster(self, x, y, n_electrons):
        densities = [
            self.calculate_densities_for_station(station, x, y, n_electrons)
            for station in self.cluster.stations
        ]

        return densities

    def calculate_densities_for_station(self, station, x, y, n_electrons):
        densities = [
            self.calculate_densities_for_detector(detector, x, y, n_electrons)
            for detector in station.detectors
        ]

        return densities

    def calculate_densities_for_detector(self, detector, x, y, n_electrons):
        r = self.calculate_detector_core_distance(detector, x, y)
        density = self.ldf.calculate_ldf_value(r, n_electrons)

        return density

    def calculate_detector_core_distance(self, detector, x, y):
        x0, y0 = detector.xy_coordinates
        r = np.sqrt((x - x0)**2 + (y - y0)**2)

        return r

    def plot_scintillators_in_cluster(self):
        # Draw station locations on a map
        for station in self.cluster.stations:
            for detector in station.detectors:
                detector_x, detector_y = detector.get_xy_coordinates()
                plt.scatter(detector_x,
                            detector_y,
                            marker=',',
                            c='r',
                            edgecolor='none',
                            s=6)
            station_x, station_y, station_a = station.get_xyalpha_coordinates()
            plt.scatter(station_x,
                        station_y,
                        marker=',',
                        c='b',
                        edgecolor='none',
                        s=3)

    def plot_energy_acceptance(self):
        # Grid
        min_energy = np.log10(self.min_energy)
        max_energy = np.log10(self.max_energy)
        levels = (max_energy - min_energy) * 3 + 1
        label_levels = (max_energy - min_energy) + 1
        contour = plt.contour(self.xx, self.yy, self.results,
                              np.logspace(min_energy, max_energy, levels))
        plt.clabel(contour,
                   np.logspace(min_energy, max_energy, label_levels),
                   inline=1,
                   fontsize=8,
                   fmt='%.0e')

    def draw_background_map(self):
        self_path = os.path.dirname(__file__)
        map_path = os.path.join(self_path,
                                "backgrounds/ScienceParkMap_1.092.png")

        # Draw Science Park Map on 1:1 scale (1 meter = 1 pixel)
        background = plt.imread(map_path)
        # determine pixel:meter ratio for different OSM zoom levels at Science Park..
        bg_scale = 1.092
        bg_width = background.shape[1] * bg_scale
        bg_height = background.shape[0] * bg_scale
        plt.imshow(background,
                   aspect='equal',
                   alpha=0.5,
                   extent=[-bg_width, bg_width, -bg_height, bg_height])
 def setUp(self):
     self.ldf = KascadeLdf()