Esempio n. 1
0
 def test_t_lists_diff_len8(self):
     with self.assertRaises(ValueError):
         pu.create_bulk_str('a', 'c', '00', 'BHZ', [
             UTCDateTime(2015, 8, 1, 0, 0, 1),
             UTCDateTime(2016, 8, 1, 0, 0, 1)
         ], [
             UTCDateTime(2015, 8, 1, 0, 0, 1),
             UTCDateTime(2016, 8, 1, 0, 0, 1),
             UTCDateTime(2016, 8, 1, 0, 0, 1)
         ])
Esempio n. 2
0
 def test_net_list_stat_str2(self):
     exp = [('bla', '*', '00', 'BHZ', UTCDateTime(2015, 8, 1, 0, 0, 1),
             UTCDateTime(2016, 8, 1, 0, 0, 1)),
            ('blub', '*', '00', 'BHZ', UTCDateTime(2015, 8, 1, 0, 0, 1),
             UTCDateTime(2016, 8, 1, 0, 0, 1))]
     self.assertListEqual(
         exp,
         pu.create_bulk_str(['bla', 'blub'], '*', '00', 'BHZ', [
             UTCDateTime(2015, 8, 1, 0, 0, 1),
             UTCDateTime(2015, 8, 1, 0, 0, 1)
         ], [
             UTCDateTime(2016, 8, 1, 0, 0, 1),
             UTCDateTime(2016, 8, 1, 0, 0, 1)
         ]))
Esempio n. 3
0
def download_small_db(
    phase: str, min_epid: float, max_epid: float, model: TauPyModel,
    event_cat: Catalog, tz: float, ta: float, statloc: str,
    rawloc: str, clients: list, network: str, station: str, channel: str,
        saveasdf: bool):
    """
    see corresponding method :meth:`~pyglimer.waveform.request.Request.\
    download_waveforms_small_db`
    """

    # logging
    logger = logging.getLogger('pyglimer.request')

    # If station and network are None
    station = station or '*'
    network = network or '*'
    # First we download the stations to subsequently compute the times of
    # theoretical arrival
    clients = pu.get_multiple_fdsn_clients(clients)

    logger.info('Requesting data from the following FDSN servers:\n %s' % str(
        clients))

    bulk_stat = pu.create_bulk_str(network, station, '*', channel, '*', '*')

    logger.info('Bulk_stat parameter created.')
    logger.debug('Bulk stat parameters: %s' % str(bulk_stat))

    logger.info('Initialising station response download.')

    # Create Station Output folder
    os.makedirs(statloc, exist_ok=True)

    # Run parallel station loop.
    out = Parallel(n_jobs=-1, prefer='threads')(
        delayed(pu.__client__loop__)(client, statloc, bulk_stat)
        for client in clients)
    inv = pu.join_inv([inv for inv in out])

    logger.info(
        'Computing theoretical times of arrival and checking available data.')

    # Now we compute the theoretical arrivals using the events and the station
    # information
    # We make a list of dicts akin to
    d = {'event': [], 'startt': [], 'endt': [], 'net': [], 'stat': []}
    for net in inv:
        for stat in net:
            logger.info(f"Checking {net.code}.{stat.code}")
            for evt in event_cat:
                try:
                    toa, _, _, _, delta = compute_toa(
                        evt, stat.latitude, stat.longitude, phase, model)
                except (IndexError, ValueError):

                    # occurs when there is no arrival of the phase at stat
                    logger.debug(
                        'No valid arrival found for station %s,' % stat.code
                        + 'event %s, and phase %s' % (evt.resource_id, phase))
                    continue

                # Already in DB?
                if saveasdf:
                    if wav_in_asdf(net, stat, '*', channel, toa-tz, toa+ta):
                        logger.info(
                            'File already in database. %s ' % stat.code
                            + 'Event: %s' % evt.resource_id)
                        continue
                else:
                    o = (evt.preferred_origin() or evt.origins[0])
                    ot_loc = UTCDateTime(
                        o.time, precision=-1).format_fissures()[:-6]
                    evtlat_loc = str(roundhalf(o.latitude))
                    evtlon_loc = str(roundhalf(o.longitude))
                    folder = os.path.join(
                        rawloc, '%s_%s_%s' % (ot_loc, evtlat_loc, evtlon_loc))
                    fn = os.path.join(folder, '%s.%s.mseed' % (net, stat))
                    if os.path.isfile(fn):
                        logger.info(
                            'File already in database. %s ' % stat.code
                            + 'Event: %s' % evt.resource_id)
                        continue
                # It's new data, so add to request!
                d['event'].append(evt)
                d['startt'].append(toa-tz)
                d['endt'].append(toa+ta)
                d['net'].append(net.code)
                d['stat'].append(stat.code)

    # Create waveform download bulk list
    bulk_wav = pu.create_bulk_str(
        d['net'], d['stat'], '*', channel, d['startt'], d['endt'])

    if len(bulk_wav) == 0:
        logger.info('No new data found.')
        return

    # Sort bulk request
    bulk_wav.sort()

    # This does almost certainly need to be split up, so we don't overload the
    # RAM with the downloaded mseeds
    logger.info('Initialising waveform download.')
    logger.debug('The request string looks like this:')
    for _bw in bulk_wav:
        logger.debug(f"{_bw}")

    # Create waveform directories
    os.makedirs(rawloc, exist_ok=True)

    if len(clients) == 1:
        pu.__client__loop_wav__(clients[0], rawloc, bulk_wav, d, saveasdf, inv)
    else:
        Parallel(n_jobs=-1, prefer='threads')(
            delayed(pu.__client__loop_wav__)(
                client, rawloc, bulk_wav, d, saveasdf, inv)
            for client in clients)
Esempio n. 4
0
 def test_other_error(self):
     with self.assertRaises(ValueError):
         pu.create_bulk_str(['a', 'b'], 'c', '00', 'BHZ', '*', '*')
Esempio n. 5
0
 def test_net_stat_list_diff_len(self):
     with self.assertRaises(ValueError):
         pu.create_bulk_str(['bla', 1], ['blub', 'blib', 0], '00', 'BHZ',
                            '*', '*')
Esempio n. 6
0
 def test_net_list_stat_str(self):
     exp = [('bla', '*', '00', 'BHZ', '*', '*'),
            ('blub', '*', '00', 'BHZ', '*', '*')]
     self.assertListEqual(
         exp, pu.create_bulk_str(['bla', 'blub'], '*', '00', 'BHZ', '*',
                                 '*'))
Esempio n. 7
0
 def test_all_len_x2(self):
     exp = [('bla', 'blo', '00', 'BHZ', '*', '*')] * 2
     self.assertListEqual(
         exp,
         pu.create_bulk_str(['bla', 'bla'], ['blo', 'blo'], '00', 'BHZ',
                            '*', '*'))
Esempio n. 8
0
 def test_all_len_x(self):
     exp = [('bla', 'bla', '00', 'BHZ', UTCDateTime(
         2015, 8, 1, 0, 0, 1), UTCDateTime(2016, 8, 1, 0, 0, 1))] * 3
     inp = [['bla'] * 3, ['bla'] * 3, '00', 'BHZ',
            ['2015-08-1 00:00:01.0'] * 3, ['2016-08-1 00:00:01.0'] * 3]
     self.assertListEqual(exp, pu.create_bulk_str(*inp))
Esempio n. 9
0
 def test_netstatstr(self):
     exp = [('bla', 'bla', '00', 'BHZ', '*', '*')]
     self.assertListEqual(
         exp, pu.create_bulk_str('bla', 'bla', '00', 'BHZ', '*', '*'))