Esempio n. 1
0
    graph = PolarPlot(use_radians=True)
    times = generate_discrete_times(station, detector_ids=ids)
    detectors = [station.detectors[id].get_coordinates() for id in ids]
    x, y, z = zip(*detectors)

    theta, phi = itertools.izip(*(dirrec.reconstruct_common((0, ) + t, x, y, z)
                                  for t in times))

    thetaa = [t for t in theta if not np.isnan(t)]
    phia = [p for p in phi if not np.isnan(p)]
    graph.scatter(phia, thetaa, markstyle='mark size=.5pt')

    graph.set_ylimits(0, np.pi / 2)
    graph.set_yticks([0, np.pi / 6, np.pi / 3, np.pi / 2])
    graph.set_ytick_labels([
        r'$0$', r'$\frac{1}{6}\pi$', r'$\frac{2}{6}\pi$', r'$\frac{1}{2}\pi$'
    ])
    graph.set_ylabel('Zenith [rad]')
    graph.set_xlabel('Azimuth [rad]')
    graph.save_as_pdf('discrete_directions_%d_%s' %
                      (station.number, '_'.join(str(i) for i in ids)))


if __name__ == '__main__':

    dirrec = DirectAlgorithmCartesian3D
    station = HiSPARCStations([STATION]).get_station(STATION)

    for combo in itertools.combinations(range(4), 3):
        reconstruct_for_detectors(station, combo, dirrec)
Esempio n. 2
0
import itertools

import tables

from numpy import abs, arange, array, histogram, std
from scipy.optimize import curve_fit

from artist import MultiPlot

from sapphire import HiSPARCStations

DATA_PATH = '/Users/arne/Datastore/station_offsets/'
SIM_PATH = '/Users/arne/Datastore/expected_dt/test_station_dt_spa.h5'

SPA_STAT = [501, 502, 503, 504, 505, 506, 508, 509, 510, 511]
CLUSTER = HiSPARCStations(SPA_STAT)
DAY = 86400.
HALF_DAY = DAY / 2
WEEK = 7 * DAY
FORTNIGHT = 2 * WEEK
XWEEK = 3 * WEEK
MONTH = 30 * DAY
QUARTER = 3 * MONTH
HALFYEAR = 6 * MONTH
YEAR = 365 * DAY


def get_station_dt(data, station):
    table = data.get_node('/s%d' % station)
    return table
Esempio n. 3
0
File: main.py Progetto: 153957/topaz
import tables

from numpy import arange

from artist import Plot

from sapphire import CoincidenceQuery, HiSPARCStations
from sapphire.analysis.event_utils import relative_detector_arrival_times
from sapphire.utils import distance_between

COIN_DATA = '/Users/arne/Datastore/esd_coincidences/coincidences_n7_120101_140801.h5'
DETECTOR_IDS = [0, 1, 2, 3]
STATIONS = [501, 502, 503, 504, 505, 506, 508, 509]
CLUSTER = HiSPARCStations(STATIONS)
OFFSETS = {
    501: [-1.10338, 0.0000, 5.35711, 3.1686],
    502: [-8.11711, -8.5528, -8.72451, -9.3388],
    503: [-22.9796, -26.6098, -22.7522, -21.8723],
    504: [-15.4349, -15.2281, -15.1860, -16.5545],
    505: [-21.6035, -21.3060, -19.6826, -25.5366],
    506: [-20.2320, -15.8309, -14.1818, -14.1548],
    508: [-26.2402, -24.9859, -24.0131, -23.2882],
    509: [-24.8369, -23.0218, -20.6011, -24.3757]
}
COLORS = {
    501: 'black',
    502: 'red!80!black',
    503: 'blue!80!black',
    504: 'green!80!black',
    505: 'orange!80!black',
    506: 'pink!80!black',
Esempio n. 4
0
def distances_netherlands():
    sn = Network(force_stale=True).station_numbers(country=0)
    cluster = HiSPARCStations(sn, force_stale=True, skip_missing=True)
    distances_stations(cluster, name='_netherlands')
Esempio n. 5
0
from sapphire import CoincidenceQuery, HiSPARCStations, Station
from sapphire.analysis import event_utils
from sapphire.transformations import geographic

COIN_DATA = '/Users/arne/Datastore/esd_coincidences/sciencepark_n11_150701_151105.h5'
# OFFSETS =  {501: [-1.10338, 0.0000, 5.35711, 3.1686],
#             502: [-8.11711, -8.5528, -8.72451, -9.3388],
#             503: [-22.9796, -26.6098, -22.7522, -21.8723],
#             504: [-15.4349, -15.2281, -15.1860, -16.5545],
#             505: [-21.6035, -21.3060, -19.6826, -25.5366],
#             506: [-20.2320, -15.8309, -14.1818, -14.1548],
#             508: [-26.2402, -24.9859, -24.0131, -23.2882],
#             509: [-24.8369, -23.0218, -20.6011, -24.3757]}
DETECTOR_IDS = [0, 1, 2, 3]
STATIONS = range(501, 512)
CLUSTER = HiSPARCStations(STATIONS)
COLORS = ['black', 'red!80!black', 'green!80!black', 'blue!80!black']


def make_map(cluster=CLUSTER):
    latitudes = []
    longitudes = []

    for station in cluster.stations:
        for detector in station.detectors:
            latitude, longitude, _ = detector.get_lla_coordinates()
            latitudes.append(latitude)
            longitudes.append(longitude)
    map = Map(
        (min(latitudes), min(longitudes), max(latitudes), max(longitudes)))
    return map
Esempio n. 6
0
def get_cluster():
    """Get latest position from API"""

    return HiSPARCStations(STATIONS)
Esempio n. 7
0
COLORS = ['black', 'red', 'green', 'blue']


def plot_detectors(cluster):
    station = cluster.stations[0]
    detectors = station.detectors
    timestamps = set(station.timestamps).union(detectors[0].timestamps)

    plot = Plot()

    for timestamp in sorted(timestamps):
        cluster.set_timestamp(timestamp)
        for i in range(4):
            x, y = detectors[i].get_xy_coordinates()
            plot.scatter([x], [y], mark='*', markstyle=COLORS[i])
        x, y = station.get_xy_coordinates()
        plot.scatter([x], [y], markstyle='purple')
        # print timestamp, gps_to_datetime(timestamp), x, y

    plot.set_xlabel(r'Easting [\si{\meter}]')
    plot.set_ylabel(r'Northing [\si{\meter}]')
    plot.set_axis_equal()
    plot.save_as_pdf('locations_%d' % station.number)


if __name__ == "__main__":
    for sn in range(501, 512):
        # print sn
        cluster = HiSPARCStations([sn], force_stale=True)
        plot_detectors(cluster)
Esempio n. 8
0
def distance_between_stations(s1, s2):
    cluster = HiSPARCStations([s1, s2], force_stale=True)
    xyz = [
        array(s.calc_center_of_mass_coordinates()) for s in cluster.stations
    ]
    return distance(*xyz)
Esempio n. 9
0
import warnings

import tables

from sapphire import GroundParticlesSimulation, HiSPARCStations

if __name__ == "__main__":
    with warnings.catch_warnings():
        warnings.simplefilter("ignore")
        cluster = HiSPARCStations(
            [501, 502, 503, 504, 505, 506, 508, 509, 510, 511])
    corsika_path = 'corsika.h5'

    with tables.open_file('result.h5', 'w') as result:
        sim = GroundParticlesSimulation(corsikafile_path=corsika_path,
                                        max_core_distance=1006.58,
                                        cluster=cluster,
                                        datafile=result,
                                        output_path='/',
                                        N=100000,
                                        seed=153957,
                                        progress=False)
        sim.run()
        sim.finish()
Esempio n. 10
0
def merge(stations, output = None, orig_stations=None, directory='.', verbose=True,
          overwrite=False, reconstruct=False, save_coordinates=False,
          only_original=False, coincidences=1, cluster=None, photontimes=False):
    """
    Merges the simulation data from individual 'the_simulation.h5' files from the
    core.* directories inside a certain directory

    PS: Deze functie is ook een beetje een draak en heeft ook nodig onderhoud nodig.
    Feitelijk gebeurt er niet heel veel: je leest alleen alle the_simulation.h5
    bestanden uit en combineert ze. Het is dan waarschijnlijker ook makkelijker als je
    zelf even iets klust.

    :param stations:        List of stations to use
    :param output:          The output file, by default main_data[503_505_...].h5
    :param orig_stations:   If there are more stations than those listed in the parameter
                            in the simulation files you need to list them here. By
                            default this is None, indicating that the stations are the
                            same
    :param directory:       The directory to look in, by default the current directory
    :param only_original:   If True only look for directories in the format coreXX,
                            else look in every directory
    :param verbose:         If True print more output statements
    :param overwrite:       If True overwrite the Sapphire reconstruction in the
                            individual the_simulation.h5 files
    :param reconstruct:     If True reconstruct directions using Sapphire
    :param save_coordinates:If True store the impact coordinates of the first particle
                            that hit the detector (requires a version of sapphire that
                            saves this information)
    :param coincidences:    Minimum number of coincidences to include
    :param cluster:         A Cluster object, if None creates a Cluster object itself
                            using the list of stations provided.
    :param photontimes:     If True stores the photontimes histogram. Also adds the
                            individual timings to the photontimes, so traces are correctly
                            timed relative to each other.
    :return: nothing
    """

    # process parameters
    STATIONS = stations
    if orig_stations is not None:
        ORIG_STATIONS = orig_stations
    else:
        ORIG_STATIONS = STATIONS

    ORIG_LENGTH = len(ORIG_STATIONS)
    if output is not None:
        output_file = output
    else:
        output_file = 'main_data_%s.h5' % str(STATIONS).replace(', ', '_')

    # if the length of the original stations is 1 then sapphire will not have included
    # a coincidence table, so just grab all the events from the one station
    if ORIG_LENGTH==1:
        IGNORE_COINCIDENCES = True
    else:
        IGNORE_COINCIDENCES = False

    combined_regex = "(" + ")|(".join([str(a) for a in STATIONS]) + ")"
    N = len(STATIONS)
    if cluster is None:
        cluster = HiSPARCStations(STATIONS)

    if only_original:
        core_re = re.compile(r"core.*\d$")
    else:
        core_re = re.compile(r"core.*")
    dirs = [os.path.join(directory, o) for o in os.listdir(directory) if
            os.path.isdir(os.path.join(directory, o)) and core_re.match(o) is not None]

    # The pytables class that describes the table to store in the h5 file
    class Traces(tables.IsDescription):
        id = tables.Int32Col()
        N = tables.Int32Col()
        azimuth = tables.Float32Col()
        zenith = tables.Float32Col()
        traces = tables.Int16Col(shape=(len(STATIONS), 4, 80))
        energy = tables.Float32Col()
        timings = tables.Float32Col(shape=(len(STATIONS), 4))
        pulseheights = tables.Int16Col(shape=(len(STATIONS), 4))
        x = tables.Float32Col()
        y = tables.Float32Col()
        z = tables.Float32Col()
        azimuth_rec = tables.Float32Col(dflt=np.nan)
        zenith_rec = tables.Float32Col(dflt=np.nan)
        core_distance = tables.Float32Col()
        core_position = tables.Float32Col(shape=(2,))
        photontimes = tables.Float32Col(shape=(len(STATIONS), 4, 80,))
        arrivaltimes_particles = tables.Float32Col(shape=(len(STATIONS), 4))
        if save_coordinates:
            inslag_coordinates = tables.Float32Col((4,2))
            n_electron_muons = tables.Int16Col(shape=4)

    with tables.open_file(output_file, mode='w',
                          title='Collected data from %s' % STATIONS) as collected_traces:

        # create the table /traces/traces
        group = collected_traces.create_group('/', 'traces',
                                              'Traces with azimuth and zenith information')
        table = collected_traces.create_table(group, 'Traces', Traces, 'Traces')
        row = table.row

        # keep track of numbers in order to print those at the end
        total = 0
        throwing_away = 0
        # loop over all core* directories
        for d in tqdm(dirs):
            # wrap in try except block, because sometimes a core* dir exists,
            # but without the_simulation.h5 file
            try:
                template = '%s/the_simulation.h5' % d
                # open only in append mode if reconstructing direction and if there is
                # a sim.py.e1091201 file in the directory, which indicates that the
                # simulation is done running and we can safely open the file in 'a' mode
                list_of_files = os.listdir(d)
                re_sim = re.compile('sim\.py\.e[0-9]*')
                reconstruct_local = False
                if reconstruct:
                    if np.count_nonzero([re_sim.match(x) for x in list_of_files])>0:
                        reconstruct_local = True


                if reconstruct_local:
                    fmode = 'a'
                else:
                    fmode = 'r'
                with tables.open_file(template, fmode) as data:
                    if IGNORE_COINCIDENCES and reconstruct_local:
                        # only possible if there is 1 station (for now)

                        station_path = '/cluster_simulations/station_%s/' % STATIONS[0]
                        station = STATIONS[0]
                        rec = ReconstructSimulatedEvents(data, station_path, station,
                                                         verbose=False, overwrite=overwrite,
                                                         progress=False)
                        try:
                            rec.reconstruct_and_store()
                        except:
                            if verbose:
                                print('Already reconstructed')
                        recs = data.get_node(station_path).reconstructions
                    if IGNORE_COINCIDENCES: # create one entry for every event in every
                        #  station
                        for station in data.root.cluster_simulations:
                            if photontimes:
                                photontimes_table = station.photontimes
                            for station_event in station.events:

                                timings_station = np.array(
                                    [station_event['t1'], station_event['t2'],
                                     station_event['t3'], station_event['t4']])
                                # due to a bug in the simulation sometimes a timing
                                #  of -999 is included, so filter these out (bug is
                                # fixed now, so should no longer be neccessary unless
                                # when working with old data)
                                if filter_timings(timings_station):
                                    # create empty arrays to fill
                                    trace = np.zeros([4, 80], dtype=np.float32)
                                    timings = np.zeros([4], dtype=np.float32)
                                    pulseheights = np.zeros([4], dtype=np.int16)

                                    # fill using data from h5 file
                                    trace_local = station_event['traces']
                                    trace_local[trace_local < -MAX_VOLTAGE] = -MAX_VOLTAGE
                                    trace[:, :] = trace_local
                                    zenith = station_event['zenith']
                                    azimuth = station_event['azimuth']
                                    energy = station_event['shower_energy']
                                    distance_core = station_event['core_distance']

                                    if np.count_nonzero(np.isnan(timings_station))>0:
                                        print(timings_station)

                                    # remove the -999 timings and set to 0
                                    timings_station[timings_station < 0] = 0
                                    timings[:] = timings_station
                                    pulseheights_local = station_event['pulseheights']
                                    pulseheights_local[pulseheights_local >
                                                       MAX_VOLTAGE] = MAX_VOLTAGE
                                    pulseheights[:] = pulseheights_local

                                    if np.count_nonzero(
                                            (pulseheights>((4096*0.57/10e3)*1e3-1))
                                            &
                                            (pulseheights<((4096*0.57/10e3)*1e3+1))
                                            )>0:
                                        throwing_away += 1
                                        continue
                                    # write to new h5 file

                                    # do some magic to recreate timings and shift
                                    # photontimes (this is really a bit of a pointless
                                    # exercise, since you should just do this right in
                                    # the simulation anyway)
                                    particle_timings = np.zeros((4,))
                                    earliest_particle = np.inf
                                    non_zero_idx = []
                                    for i, (t, t0) in enumerate(zip(trace, timings)):
                                        old_trigger_delay = 0
                                        for i_local, value in enumerate(t):
                                            if value < -30.0:
                                                old_trigger_delay = i_local * 2.5
                                                break
                                        else:
                                            particle_timings[i] = np.nan
                                            continue
                                        non_zero_idx.append(i)
                                        particle_timings[i] = timings[i] - old_trigger_delay
                                        if particle_timings[i] < earliest_particle:
                                            earliest_particle = particle_timings[i]
                                    particle_timings -= np.nanmin(particle_timings)
                                    row['arrivaltimes_particles'] = particle_timings
                                    if photontimes:
                                        row_photontimes = np.zeros((4,80))
                                        for i, idx in enumerate(station_event['photontimes_idx']):
                                            if np.isnan(particle_timings[i]): #there is
                                                #  no incident particle or the
                                                # particles that were incidident did
                                                # not produce a sufficiently high trace
                                                #  to trigger
                                                continue
                                            pt = photontimes_table[idx] + \
                                                 particle_timings[i]
                                            local_hist, _ = np.histogram(pt,
                                                                       bins=np.linspace(0,200,81))
                                            row_photontimes[i,:] = local_hist
                                        row['photontimes'] = row_photontimes



                                    row['traces'] = trace
                                    row['N'] = 1
                                    row['azimuth'] = azimuth
                                    row['zenith'] = zenith
                                    row['energy'] = energy
                                    row['timings'] = timings
                                    # convert zenith and azimuth to x y z
                                    x, y, z = azimuth_zenith_to_cartestian(zenith, azimuth)
                                    row['x'] = x
                                    row['y'] = y
                                    row['z'] = z
                                    if reconstruct:
                                        row['azimuth_rec'] = recs.col('azimuth')[station_event['event_id']]
                                        row['zenith_rec'] = recs.col('zenith')[station_event['event_id']]
                                    row['pulseheights'] = pulseheights
                                    row['core_distance'] = distance_core
                                    row['id'] = total

                                    if save_coordinates:
                                        row['inslag_coordinates'] = station_event[
                                            'coordinates']
                                        row['n_electron_muons'] = \
                                            [station_event["n_electrons1"] +
                                             station_event["n_muons1"],
                                             station_event["n_electrons2"] +
                                             station_event["n_muons2"],
                                             station_event["n_electrons3"] +
                                             station_event["n_muons3"],
                                             station_event["n_electrons4"] +
                                             station_event["n_muons4"]]
                                    row.append()
                                    total += 1
                                else:
                                    throwing_away += 1
                        data.close()
                    else:
                        if reconstruct and reconstruct_local:

                            rec = ReconstructSimulatedCoincidences(data,
                                                                   destination='reconstructions',
                                                                   overwrite=overwrite,
                                                                   progress=False)
                            rec.reconstruct_and_store()
                            recs = data.get_node('/coincidences/reconstructions')
                        # recreating coincidences, so per coincidence a list of the
                        # traces etc. per station

                        # for every incoming shower the coincidences are saved for some
                        # reason, but only if there is a hit the timestamp>0

                        # WARNING: I have not used this for a long time,
                        # so it probably is broken (or does not the same things as with
                        #  the single station). Basically do not use.
                        for coin in data.root.coincidences.coincidences.where(
                                'timestamp>0'):
                            if coin['N'] >= coincidences:
                                trace = np.zeros([len(STATIONS), 4, 80], dtype=np.float32)
                                timings = np.zeros([len(STATIONS), 4], dtype=np.float32)
                                pulseheights = np.zeros([len(STATIONS), 4], dtype=np.int16)

                                # the c_index maps the coincidences to the events in
                                # the format [[station_id, event_id],...]. You then use
                                # the
                                # s_index table to look up the path to the station
                                # using the station_id
                                c_index = data.root.coincidences.c_index[coin['id']]
                                for station, event_idx in c_index:
                                    station_path = data.root.coincidences.s_index[station].\
                                        decode('UTF-8')
                                    # make sure the path exists
                                    if re.search(combined_regex, station_path) is not None:
                                        station_event = data.get_node(station_path, 'events')[
                                            event_idx]
                                        station = STATIONS.index(ORIG_STATIONS[station])
                                        trace_local = station_event['traces']
                                        trace_local[trace_local<-MAX_VOLTAGE] = -MAX_VOLTAGE
                                        trace[station, :, :] = trace_local
                                        zenith = station_event['zenith']
                                        azimuth = station_event['azimuth']
                                        energy = station_event['shower_energy']
                                        timings_station = np.array(
                                            [station_event['t1'], station_event['t2'],
                                             station_event['t3'], station_event['t4']])

                                        timings_station[timings_station < 0] = 0
                                        timings[station, :] = timings_station
                                        pulseheights_local = station_event['pulseheights']
                                        pulseheights_local[pulseheights_local>MAX_VOLTAGE] = MAX_VOLTAGE
                                        pulseheights[station, :] = station_event['pulseheights']
                                # due to stupidity I set the cap to 4096*0.57/10e3
                                # mV instead of 4096*0.57/1e3 ...
                                if np.count_nonzero(
                                        (pulseheights>((4096*0.57/10e3)*1e3-1))
                                        &
                                        (pulseheights<((4096*0.57/10e3)*1e3+1))
                                        )>0:
                                    throwing_away += 1
                                    continue



                                row['traces'] = trace
                                row['N'] = coin['N']
                                row['azimuth'] = azimuth
                                row['zenith'] = zenith
                                row['energy'] = energy
                                row['timings'] = timings
                                x, y, z = azimuth_zenith_to_cartestian(zenith, azimuth)
                                row['x'] = x
                                row['y'] = y
                                row['z'] = z
                                if reconstruct:
                                    row['azimuth_rec'] = recs.col('azimuth')[coin['id']]
                                    row['zenith_rec'] = recs.col('zenith')[coin['id']]
                                row['pulseheights'] = pulseheights
                                row['id'] = total
                                row['core_position'] = [coin['x'], coin['y']]

                                row.append()
                                total += 1
                            else:
                                throwing_away += 1
            except Exception as e:
                if verbose:
                    print('Error occurred in %s' % (d))
                    print(e)
        table.flush()
        print('Total entries: %d' % total)
        print('Thrown away: %s' % throwing_away)
Esempio n. 11
0
from pprint import PrettyPrinter
from random import sample

from sapphire import HiSPARCStations, Station
from sapphire.analysis.direction_reconstruction import CoincidenceDirectionReconstruction

# SN = [501, 502, 503, 504, 505, 506, 507, 508, 509, 510, 511]
SN = [501, 502, 503, 505, 506, 508, 510, 511]
cluster = HiSPARCStations(SN, force_stale=True)
crec = CoincidenceDirectionReconstruction(cluster)
offsets = {sn: Station(sn, force_stale=True) for sn in SN}

ref_sn = 501
station_number = 502

ts0 = 1461542400

print crec.determine_best_offsets(SN, ts0, offsets)
print offsets[station_number].station_timing_offset(ref_sn, ts0)
print offsets[station_number].detector_timing_offset(ts0)

offs = crec.determine_best_offsets([sn for sn in sample(SN, len(SN))], ts0,
                                   offsets)
reference = offs[ref_sn][1]
offs = {sn: round(off[1] - reference, 1) for sn, off in offs.items()}
pp = PrettyPrinter()
pp.pprint(offs)
Esempio n. 12
0
from artist import Plot

from sapphire import HiSPARCStations
from sapphire.transformations.clock import datetime_to_gps

STATIONS = [502, 503, 504, 505, 506, 508, 509, 510]
DAY = 86400
HALF_DAY = DAY / 2
WEEK = 7 * DAY
FORTNIGHT = 2 * WEEK
XWEEK = 3 * WEEK
MONTH = 30 * DAY
YEAR = 365 * DAY

SPA_STAT = [501, 502, 503, 504, 505, 506, 508, 509, 510]
CLUSTER = HiSPARCStations(SPA_STAT)

DATA_PATH = '/Users/arne/Datastore/station_offsets/'


def get_station_dt(data, station):
    table = data.get_node('/s%d' % station)
    return table


if __name__ == '__main__':
    t_start = datetime_to_gps(datetime(2010, 1, 1))
    t_end = datetime_to_gps(datetime(2015, 4, 1))

    for i, station in enumerate(STATIONS, 1):
        with tables.open_file(DATA_PATH + 'dt_ref501_%d.h5' % station, 'r') as data:
Esempio n. 13
0
def get_cluster():
    """Get latest position from API"""

    return HiSPARCStations(STATIONS, skip_missing=True)
Esempio n. 14
0
def get_cluster():
    return HiSPARCStations(STATIONS)
Esempio n. 15
0
def do_simulation_small():
    with tables.open_file(RESULT_DATA_SMALL, 'w') as data:
        cluster = HiSPARCStations([501], force_stale=True)
        sim = ModGroundParticlesSimulation(CORSIKA_DATA_SMALL, 100, cluster, data,
                                           N=4000, progress=True)
        sim.run()