def update_additional_local_tsv():
    """Get location tsv data for all stations"""

    station_numbers = Network().station_numbers()

    for type in ['eventtime', 'detector_timing_offsets']:
        try:
            mkdir(path.join(LOCAL_BASE, type))
        except OSError:
            pass
        for number in pbar(station_numbers):
            url = API.src_urls[type].format(station_number=number,
                                            year='', month='', day='', hour='')
            try:
                data = API._retrieve_url(url.strip('/'), base=SRC_BASE)
            except:
                print 'Failed to get %s data for station %d' % (type, number)
                continue
            data = '\n'.join(d for d in data.split('\n')
                             if len(d) and d[0] != '#')
            if data:
                tsv_path = path.join(LOCAL_BASE,
                                     url.strip('/') + extsep + 'tsv')
                with open(tsv_path, 'w') as tsvfile:
                    tsvfile.write(data)

    type = 'station_timing_offsets'
    network = HiSPARCNetwork()

    try:
        mkdir(path.join(LOCAL_BASE, type))
    except OSError:
        pass
    for number1, number2 in pbar(combinations(station_numbers, 2)):
        distance = network.calc_distance_between_stations(number1, number2)
        if distance is None or distance > 1e3:
            continue
        try:
            mkdir(path.join(LOCAL_BASE, type, str(number1)))
        except OSError:
            pass
        url = API.src_urls[type].format(station_1=number1, station_2=number2)
        try:
            data = API._retrieve_url(url.strip('/'), base=SRC_BASE)
        except:
            print ('Failed to get %s data for station pair %d-%d' %
                   (type, number1, number2))
            continue
        data = '\n'.join(d for d in data.split('\n') if len(d) and d[0] != '#')
        if data:
            tsv_path = path.join(LOCAL_BASE, url.strip('/') + extsep + 'tsv')
            with open(tsv_path, 'w') as tsvfile:
                tsvfile.write(data)
Beispiel #2
0
def determine_rate(path):
    file = os.path.basename(path)
    pair = tuple([int(s) for s in file[:-3].split('_')])

    net = HiSPARCNetwork(force_stale=True)
#     if net.calc_distance_between_stations(*pair) > 1e3:
#         print pair, 'far apart'

    with tables.open_file(path, 'r') as data:
        ets = data.root.coincidences.coincidences.col('ext_timestamp')

    dts = ets[1:] - ets[:-1]
    dts.sort()

    if len(dts) > 25:
        expected_interval = mean(dts[:-5])
    else:
        expected_interval = mean(dts)

    bins = linspace(0, 1.5 * expected_interval, 10)
    c, b = histogram(dts, bins=bins)
    x = (bins[1:] + bins[:-1]) / 2.
    filter = c > 0
    slope, intercept, r_value, _, _ = linregress(x[filter], log(c[filter]))
    # print pair, expected_interval, 1e9 / expected_interval, r_value ** 2
    rate = slope * -1e9
    with tables.open_file(path, 'a') as data:
        data.set_node_attr('/', 'interval_rate', rate)
Beispiel #3
0
def close_triples_in_network(min=MIN_DISTANCE, max=MAX_DISTANCE):
    """Find triples of stations

    The distances between each of the station pairs in the set must be within
    the min and max value.

    """
    cluster = HiSPARCNetwork(force_stale=True)
    return close_triples_in_cluster(cluster, min, max)
Beispiel #4
0
def distances_all_stations():
    cluster = HiSPARCNetwork(force_stale=True)
    distances_stations(cluster, name='_all')
Beispiel #5
0
from sapphire.analysis.calibration import determine_station_timing_offset
from sapphire.transformations.clock import datetime_to_gps
"""
Reference stations

501 for Science Park stations, data starting at 2010/1.
"""

SPA_STAT = [501, 502, 503, 504, 505, 506, 508, 509, 510]
CLUSTER = HiSPARCStations(SPA_STAT)
DATA_PATH = '/Users/arne/Datastore/station_offsets/offsets_ref%d_s%d.tsv'
DAYS = 10

DT_DATAPATH_GLOB = '/Users/arne/Datastore/station_offsets/dt_ref*_*.h5'
DT_DATAPATH = '/Users/arne/Datastore/station_offsets/dt_ref%d_%d.h5'
CLUSTER = HiSPARCNetwork()


def get_available_station_pairs():
    paths = glob(DT_DATAPATH_GLOB)
    pairs = [(int(s1), int(s2))
             for s1, s2 in [re.findall(r'\d+', path[:-3]) for path in paths]]
    return pairs


def determine_offsets():
    args = get_available_station_pairs()
    worker_pool = multiprocessing.Pool()
    worker_pool.map(determine_offsets_for_pair, args)
    worker_pool.close()
    worker_pool.join()
Beispiel #6
0
def close_pairs_in_network(min=MIN_DISTANCE, max=MAX_DISTANCE):
    cluster = HiSPARCNetwork(force_stale=True)
    return close_pairs_in_cluster(cluster, min, max)
Beispiel #7
0
    plot.histogram(*histogram(distances, bins))
    plot.set_ylimits(min=0)
    plot.set_xlimits(min=0, max=10)
    plot.set_ylabel('Counts')
    plot.set_xlabel(r'Distance to center mass location [\si{\meter}]')
    plot.set_label('67\%% within %.1fm' % percentile(distances, 67))
    plot.save_as_pdf('gps_distance_cm' + name)


def plot_distributions_all(distances, distances_hor, distances_ver):
    bins = arange(0, 10.001, 0.25)
    plot = Plot()
    # plot.histogram(*histogram(distances, bins))
    plot.histogram(*histogram(distances_hor, bins))
    plot.histogram(*histogram(distances_ver, bins - 0.02), linestyle='gray')
    plot.set_ylimits(min=0)
    plot.set_xlimits(min=0, max=6)
    plot.set_ylabel('Counts')
    plot.set_xlabel(r'Distance to center mass location [\si{\meter}]')
    plot.save_as_pdf('gps_distance_cm_all')


if __name__ == "__main__":
    if "network" not in globals():
        network = HiSPARCNetwork()
    distr, dist_hor, dist_ver = calculate_distances_to_cm(network)
    plot_distributions(distr)
    plot_distributions(dist_hor, '_horizontal')
    plot_distributions(dist_ver, '_vertical')
    plot_distributions_all(distr, dist_hor, dist_ver)
Beispiel #8
0
 def __init__(self, data):
     self.data = data
     self.singles_dtype = \
         tables.description.dtype_from_descr(self.HisparcSingle)
     self.network = HiSPARCNetwork(force_stale=True)
Beispiel #9
0
class MigrateSingles(object):
    """Migrate singles to new table format
       If the station has no slave *and* slave columns are all zero,
       replace slave columns with `-1` to correctly represent missing slave.
    """

    class HisparcSingle(tables.IsDescription):
        event_id = tables.UInt32Col(pos=0)
        timestamp = tables.Time32Col(pos=1)
        mas_ch1_low = tables.Int32Col(dflt=-1, pos=2)
        mas_ch1_high = tables.Int32Col(dflt=-1, pos=3)
        mas_ch2_low = tables.Int32Col(dflt=-1, pos=4)
        mas_ch2_high = tables.Int32Col(dflt=-1, pos=5)
        slv_ch1_low = tables.Int32Col(dflt=-1, pos=6)
        slv_ch1_high = tables.Int32Col(dflt=-1, pos=7)
        slv_ch2_low = tables.Int32Col(dflt=-1, pos=8)
        slv_ch2_high = tables.Int32Col(dflt=-1, pos=9)

    def __init__(self, data):
        self.data = data
        self.singles_dtype = \
            tables.description.dtype_from_descr(self.HisparcSingle)
        self.network = HiSPARCNetwork(force_stale=True)

    def migrate_table(self, table_path):
        """Migrate datatable to new format. Fix slave columns."""

        logging.info('Migrating table: %s' % table_path)
        group, table_name, sn = self._parse_path(table_path)

        if table_name != 'singles':
            logging.error('Table %s not `singles` skipping!' % table_path)
            return None

        tmp_table_name = '_t_%s' % table_name

        try:
            tmptable = self.data.create_table(group, tmp_table_name,
                                              description=self.HisparcSingle)
        except tables.NodeError:
            logging.error('%s/_t_%s exists. Removing.' % (group, table_name))
            self.data.remove_node(group, '_t_%s' % table_name)
            tmptable = self.data.create_table(group, tmp_table_name,
                                              description=self.HisparcSingle)

        table = self.data.get_node(table_path)
        data = table.read()
        data = data.astype(self.singles_dtype)
        if not self._has_slave(sn):
            data = self._mark_slave_columns_as_missing(data)

        tmptable.append(data)
        tmptable.flush()
        self.data.rename_node(table, 'singles_old')
        self.data.rename_node(tmptable, 'singles')

    def _parse_path(self, path):
        """ '/cluster/s501/singles' ---> '/cluster/s501' 'singles', 501 """

        group, table_name = tables.path.split_path(path)
        re_number = re.compile('[0-9]+$')
        numbers = [int(re_number.search(group).group())]
        sn = numbers[-1]
        return group, table_name, sn

    def _has_slave(self, sn):
        """Return True if station (sn) has slave (4 detectors)"""
        try:
            n_detectors = len(self.network.get_station(sn).detectors)
        except AttributeError:
            logging.error('No information in HiSPARCNetwork() for sn %d' % sn)
            n_detectors = 4
        return n_detectors == 4

    def _mark_slave_columns_as_missing(self, table):
        """Replace slave columns with `-1`"""

        cols = ['slv_ch1_low', 'slv_ch2_low', 'slv_ch1_high', 'slv_ch2_high']
        for col in cols:
            if not np.all(table[col] == 0):
                logging.error("Slave columns are not all zero. "
                              "Leaving data untouched!")
                return table

        n = len(table)
        for col in cols:
            table[col] = n * [-1]

        logging.debug("Set all slave columns to `-1`.")
        return table
Beispiel #10
0
def get_coincidence_count(close_pairs):
    network = HiSPARCNetwork(force_stale=True)
    distances = {4: [], 6: [], 8: []}
    distance_errors = {4: [], 6: [], 8: []}
    coincidence_rates = {4: [], 6: [], 8: []}
    interval_rates = {4: [], 6: [], 8: []}
    coincidence_rate_errors = {4: [], 6: [], 8: []}
    pairs = {4: [], 6: [], 8: []}
    for pair in pbar(close_pairs, show=True):
        path = DATAPATH % tuple(pair)
        if not os.path.exists(path):
            continue
        # Do not plot points for stations with known issues
        bad_stations = [22, 507, 1001, 2103, 13007, 20001, 20002, 20003]
        if pair[0] in bad_stations or pair[1] in bad_stations:
            continue

        with tables.open_file(path, 'r') as data:
            try:
                total_exposure = data.get_node_attr('/', 'total_exposure')
                distance = network.calc_distance_between_stations(*pair)
                n_rate = data.get_node_attr('/', 'n_rate')
                interval_rate = data.get_node_attr('/', 'interval_rate')
                n_coincidences = data.get_node_attr('/', 'n_coincidences')
            except AttributeError:
                # print 'failed reading attributes', pair
                continue
        if not n_coincidences:
            continue
        if n_coincidences < 5:
            # Exclude pairs with very few coincidences
            continue
        n = (len(network.get_station(pair[0]).detectors) +
             len(network.get_station(pair[1]).detectors))
        distances[n].append(distance)
        # Distance error due to unknown detector locations or moving stations
        if pair[0] in NO_LAYOUT and pair[1] in NO_LAYOUT:
            gps_layout_error = 20
        elif pair[0] in NO_LAYOUT or pair[1] in NO_LAYOUT:
            gps_layout_error = 10
        else:
            gps_layout_error = 3
        distance_error = [
            abs(d - distance) + gps_layout_error
            for d in min_max_distance_pair(pair)
        ]
        if distance_error[0] > distance:
            distance_error[0] = distance - 1e-15
        distance_errors[n].append(distance_error)

        coincidence_rates[n].append(n_rate)
        interval_rates[n].append(interval_rate)
        err = sqrt(n_coincidences + 1) / total_exposure
        # Prevent plotting issue due to log scale
        rate = n_rate
        if err > rate:
            err = rate - 1e-15
        coincidence_rate_errors[n].append(err)
        pairs[n].append(pair)

    return (distances, coincidence_rates, interval_rates, distance_errors,
            coincidence_rate_errors, pairs)