コード例 #1
0
def get_waveforms():
    events = get_events()[::-1]
    client = Client('GFZ')
    stream_raw = Stream()
    stream = RFStream()
    coords = inventory.get_coordinates(seedid[:-1] + 'Z')
    for i, event in enumerate(events):
        t = event.preferred_origin().time
        args = seedid.split('.') + [t + 4.9 * 60, t + 14.1 * 60]
        s = client.get_waveforms(*args)
        s.trim(t + 5 * 60, t + 14 * 60)
        s.decimate(int(round(s[0].stats.sampling_rate)) // 5, no_filter=True)
        stream_raw.extend(s)
        if i in (0, 2, 4):
            s = s.copy()
            stats = rfstats(station=coords, event=event, dist_range=(20, 95))
            if stats is None:
                continue
            s.trim(stats.onset - 25, stats.onset + 75)
            stats = obj2stats(station=coords, event=event)
            s = RFStream(s)
            for tr in s:
                tr.stats.update(stats)
            stream.extend(s)
    stream_raw.write(wavname, 'MSEED')
    stream.write(wavname2, 'SAC')
コード例 #2
0
ファイル: create_example_files.py プロジェクト: trichter/rf
def get_waveforms():
    events = get_events()[::-1]
    client = Client('GFZ')
    stream_raw = Stream()
    stream = RFStream()
    coords = inventory.get_coordinates(seedid[:-1] + 'Z')
    for i, event in enumerate(events):
        t = event.preferred_origin().time
        args = seedid.split('.') + [t + 4.9 * 60, t + 14.1 * 60]
        s = client.get_waveforms(*args)
        s.trim(t+5*60, t+14*60)
        s.decimate(int(round(s[0].stats.sampling_rate)) // 5, no_filter=True)
        stream_raw.extend(s)
        if i in (0, 2, 4):
            s = s.copy()
            stats = rfstats(station=coords, event=event, dist_range=(20, 95))
            if stats is None:
                continue
            s.trim(stats.onset - 25, stats.onset + 75)
            stats = obj2stats(station=coords, event=event)
            s = RFStream(s)
            for tr in s:
                tr.stats.update(stats)
            stream.extend(s)
    stream_raw.write(wavname, 'MSEED')
    stream.write(wavname2, 'SAC')
コード例 #3
0
ファイル: rf_process_io.py プロジェクト: lid-idsse/hiperseis
def async_write(rfstream_queue, outfile_name, max_buffered=100):
    """Monitors asynchronous queue for data, removes from queue to buffer, then
       flushes buffer intermittently and when queue termination signal is put.

       When None is received on the queue, this is taken as the signal to terminate
       monitoring the queue.

    :param rfstream_queue: Queue into which RFStreams are pushed for writing to file.
    :type rfstream_queue: multiprocessing.Manager.Queue
    :param outfile_name: Name of file into which queued RFStream results are periodically written.
    :type outfile_name: str or Path
    :param max_buffered: Maximum number of RFStreams to buffer before flushing to file, defaults to 100
    :type max_buffered: int, optional
    """
    buffered_streams = []
    logger = logging.getLogger(__name__)
    logger.setLevel(logging.INFO)
    logger.info("Starting async write thread")
    first_write = True
    while True:
        # Passing None into the queue is taken as signal to flush buffer and terminate thread
        rfstream = rfstream_queue.get()

        terminating = (rfstream is None)
        if not terminating:
            buffered_streams.append(rfstream)
        else:
            logger.info("Flushing result buffer...")

        if len(buffered_streams) >= max_buffered or terminating:
            stream = RFStream()
            for rf in buffered_streams:
                stream.extend(rf)
            if first_write:
                mode = 'w'
                first_write = False
            else:
                mode = 'a'
            stream.write(outfile_name, 'H5', mode=mode)
            logger.info("Flushed {} streams to output file {}".format(
                len(buffered_streams), outfile_name))

            while buffered_streams:
                buffered_streams.pop()
                rfstream_queue.task_done()

        if terminating:
            rfstream_queue.task_done()
            break

    logger.info("Terminating async write thread")
コード例 #4
0
# exclude bad stations
inc_set = list(set([tr.stats.inclination for tr in data]))
data_filtered = RFStream([
    tr for tr in data if tr.stats.inclination in inc_set
    and tr.stats.station not in ['MIJ2', 'MIL2']
])

stream = RFStream()
for stream3c in tqdm(IterMultipleComponents(data, 'onset', 3)):
    stream3c.detrend('linear').resample(100)
    stream3c.taper(0.01)
    stream3c.filter('bandpass', freqmin=0.01, freqmax=15)
    if len(stream3c) != 3:
        continue
    a1 = stream3c[0].stats['asdf']
    a2 = stream3c[1].stats['asdf']
    a3 = stream3c[2].stats['asdf']
    stream3c[0].stats['asdf'] = []
    stream3c[1].stats['asdf'] = []
    stream3c[2].stats['asdf'] = []

    stream3c.rf()
    stream3c[0].stats['asdf'] = a1
    stream3c[1].stats['asdf'] = a2
    stream3c[2].stats['asdf'] = a3
    stream3c.trim2(-25, 75, 'onset')
    stream.extend(stream3c)

stream.write('DATA/7X-rf_qlt', 'H5')
コード例 #5
0
        return st


# ---+----------Main---------------------------------

if __name__ == '__main__':

    # we use centre of Australia to calculate radius and gather events from 15 to 90 degrees
    lonlat = [133.88, -23.69]

    # Change parameters below
    data = os.path.join('DATA', '')
    invfile = data + '7X-inventory.xml'
    datafile = data + '7X-event_waveforms_for_rf.h5'

    start_time = '2009-12-01 00:00:00'
    end_time = '2011-04-01 00:00:00'
    inventory = read_inventory(invfile)

    # ----------------- End ----------------------

    catalog = get_events(lonlat, UTC(start_time), UTC(end_time)) 

    stream = RFStream()
    with tqdm() as pbar:
        for s in iter_event_data(catalog, inventory, custom_get_waveforms, pbar=pbar):
            for trace in s:
                stream.extend(s)

    stream.write(datafile, 'H5')
コード例 #6
0
    def download_data(self,
                      catalogtxtloc,
                      datafileloc,
                      tot_evnt_stns,
                      rem_evnts,
                      plot_stations=True,
                      plot_events=True,
                      dest_map="./",
                      locations=[""]):

        self.logger.info(f"Total data files to download: {tot_evnt_stns}")
        rem_dl = rem_evnts
        succ_dl, num_try = 0, 0
        rf_stalons, sks_stalons = [], []
        rf_stalats, sks_stalats = [], []
        rf_staNetNames, sks_staNetNames = [], []

        all_stns_df = pd.read_csv(self.inventorytxtfile, sep="|")

        all_sta_lats = all_stns_df['Latitude'].values
        all_sta_lons = all_stns_df['Longitude'].values
        all_sta_nms = all_stns_df['Station'].values
        all_sta_nets = all_stns_df['#Network'].values

        sta_str_list = []
        #Retrive waveform data for the events
        for slat, slon, stn, net in zip(all_sta_lats, all_sta_lons,
                                        all_sta_nms, all_sta_nets):
            sta_str = f"{net}-{stn}-{slon}-{slat}"
            if sta_str in sta_str_list:
                continue
            else:
                sta_str_list.append(sta_str)

            catfile = catalogtxtloc + f"{net}-{stn}-events-info-{self.method}.txt"
            cattxtnew = catalogtxtloc + f"{net}-{stn}-events-info-available-{self.method}.txt"

            if self.method == 'RF':
                print("\n")
                self.logger.info(
                    f"Searching and downloading data for {self.method}; {net}-{stn}"
                )
                rfdatafile = datafileloc + f"{net}-{stn}-{str(inpRFdict['filenames']['data_rf_suffix'])}.h5"
                # rfdatafile = datafileloc+f"{net}-{stn}-{str(inpRF.loc['data_rf_suffix','VALUES'])}.h5"
                # rfdatafile = datafileloc+f"{net}-{stn}-{str(inpRF.loc['data_rf_suffix','VALUES'])}.h5"
                if os.path.exists(catfile) and not os.path.exists(
                        rfdatafile) and tot_evnt_stns > 0:
                    stream = RFStream()
                    df = pd.read_csv(catfile, sep=",")
                    # df = pd.read_csv(catfile,delimiter="\||,", names=['evtime','evlat','evlon','evdp','evmg','evmgtp'],header=None,engine="python")
                    evmg = df['evmg'].values
                    evmgtp = ["Mww" for val in df['evmg']]
                    # evmg = [float(val.split()[0]) for val in df['evmg']]
                    # evmgtp = [str(val.split()[1]) for val in df['evmg']]

                    fcat = open(cattxtnew, 'w')
                    for evtime, evdp, elat, elon, em, emt in zip(
                            df['evtime'], df['evdp'], df['evlat'], df['evlon'],
                            evmg, evmgtp):
                        rem_dl -= 1
                        num_try += 1

                        strm, res, msg = multi_download(
                            self.client,
                            self.inv,
                            net,
                            stn,
                            slat,
                            slon,
                            elat,
                            elon,
                            evdp,
                            evtime,
                            em,
                            emt,
                            fcat,
                            stalons=rf_stalons,
                            stalats=rf_stalats,
                            staNetNames=rf_staNetNames,
                            phase='P',
                            locations=locations)
                        if res:
                            succ_dl += 1

                        if not msg:
                            self.logger.info(
                                f"Event: {evtime}; rem: {rem_dl}/{tot_evnt_stns}; dl: {succ_dl}/{num_try}"
                            )
                        else:
                            self.logger.info(
                                f"{msg}; rem: {rem_dl}/{tot_evnt_stns}; dl: {succ_dl}/{num_try}"
                            )

                        if strm:
                            stream.extend(strm)

                    if not len(stream):
                        self.logger.warning(f"No data {rfdatafile}")
                    stream.write(rfdatafile, 'H5')
                    fcat.close()
                ### Event map plot
                # cattxtnew = catalogtxtloc+f'{net}-{stn}-events-info-rf.txt'
                if os.path.exists(
                        cattxtnew
                ) and plot_events and not os.path.exists(
                        f"{net}-{stn}-{str(inpRFdict['filenames']['events_map_suffix'])}.png"
                ):
                    # if os.path.exists(cattxtnew) and plot_events and not os.path.exists(f"{net}-{stn}-{str(inpRF.loc['events_map_suffix','VALUES'])}.png"):
                    df = pd.read_csv(cattxtnew,
                                     delimiter="\||,",
                                     names=[
                                         'evtime', 'evlat', 'evlon', 'evdp',
                                         'evmg', 'client'
                                     ],
                                     header=None,
                                     engine="python")
                    if df.shape[0]:
                        evmg = [float(val.split()[0]) for val in df['evmg']]
                        event_plot_name = f"{net}-{stn}-{str(inpRFdict['filenames']['events_map_suffix'])}"
                        # event_plot_name=f"{net}-{stn}-{str(inpRF.loc['events_map_suffix','VALUES'])}"
                        if not os.path.exists(dest_map + event_plot_name +
                                              f".{self.fig_frmt}"):
                            self.logger.info(f"Plotting events map " +
                                             event_plot_name +
                                             f".{self.fig_frmt}")
                            events_map(evlons=df['evlon'],
                                       evlats=df['evlat'],
                                       evmgs=evmg,
                                       evdps=df['evdp'],
                                       stns_lon=slon,
                                       stns_lat=slat,
                                       destination=dest_map,
                                       figfrmt=self.fig_frmt,
                                       clon=slon,
                                       outname=f'{event_plot_name}')

            if self.method == 'SKS':
                print("\n")
                self.logger.info(
                    f"Searching and downloading data for {self.method}; {net}-{stn}"
                )

                sksdatafile = datafileloc + f"{net}-{stn}-{str(inpSKSdict['filenames']['data_sks_suffix'])}.h5"
                if os.path.exists(catfile) and not os.path.exists(
                        sksdatafile) and tot_evnt_stns > 0:
                    self.logger.info("Reading events catalog file")
                    stream = RFStream()

                    df = pd.read_csv(catfile, sep=",")
                    # df = pd.read_csv(catfile,delimiter="\||,", names=['evtime','evlat','evlon','evdp','evmg','evmgtp'],header=None,engine="python")
                    evmg = df['evmg'].values
                    evmgtp = ["Mww" for val in df['evmg']]
                    # evmg = [float(val.split()[0]) for val in df['evmg']]
                    # evmgtp = [str(val.split()[1]) for val in df['evmg']]
                    # cattxtnew = catalogtxtloc+f'{net}-{stn}-events-info-sks.txt'
                    fcat = open(cattxtnew, 'w')
                    for i, evtime, evdp, elat, elon, em, emt in zip(
                            range(len(df['evtime'])), df['evtime'], df['evdp'],
                            df['evlat'], df['evlon'], evmg, evmgtp):
                        rem_dl -= 1
                        num_try += 1
                        # self.logger.info(f"Event: {evtime}; rem: {rem_dl}/{tot_evnt_stns}; dl: {succ_dl}/{num_try}")
                        strm, res, msg = multi_download(
                            self.client,
                            self.inv,
                            net,
                            stn,
                            slat,
                            slon,
                            elat,
                            elon,
                            evdp,
                            evtime,
                            em,
                            emt,
                            fcat,
                            stalons=sks_stalons,
                            stalats=sks_stalats,
                            staNetNames=sks_staNetNames,
                            phase='SKS',
                            locations=locations)
                        if not msg:
                            self.logger.info(
                                f"Event: {evtime}; rem: {rem_dl}/{tot_evnt_stns}; dl: {succ_dl}/{num_try}"
                            )
                        else:
                            self.logger.info(
                                f"{msg}; rem: {rem_dl}/{tot_evnt_stns}; dl: {succ_dl}/{num_try}"
                            )

                        if strm:
                            stream.extend(strm)
                        if res:
                            succ_dl += 1
                    if not len(stream):
                        self.logger.warning(f"No data {sksdatafile}")

                    stream.write(sksdatafile, 'H5')
                    fcat.close()
                else:
                    if os.path.exists(catfile):
                        self.logger.info(f"catalog {catfile} exists!")
                    else:
                        self.logger.info(f"catalog {catfile} does not exist!")
                    if not os.path.exists(sksdatafile):
                        self.logger.info(
                            f"datafile {sksdatafile} does not exist!")
                    if tot_evnt_stns > 0:
                        self.logger.info(
                            f"Total files to download {tot_evnt_stns}")

                ### Event map plot
                # cattxtnew = catalogtxtloc+f'{net}-{stn}-events-info-sks.txt'
                if os.path.exists(
                        cattxtnew
                ) and plot_events and not os.path.exists(
                        f"{net}-{stn}-{str(inpSKSdict['filenames']['events_map_suffix'])}.png"
                ):
                    df = pd.read_csv(cattxtnew,
                                     delimiter="\||,",
                                     names=[
                                         'evtime', 'evlat', 'evlon', 'evdp',
                                         'evmg', 'client'
                                     ],
                                     header=None,
                                     engine="python")
                    if df.shape[0]:
                        evmg = [float(val.split()[0]) for val in df['evmg']]
                        event_plot_name = f"{net}-{stn}-{str(inpSKSdict['filenames']['events_map_suffix'])}"
                        if not os.path.exists(dest_map + event_plot_name +
                                              f".{self.fig_frmt}"):
                            self.logger.info(f"Plotting events map " +
                                             event_plot_name +
                                             f".{self.fig_frmt}")
                            events_map(evlons=df['evlon'],
                                       evlats=df['evlat'],
                                       evmgs=evmg,
                                       evdps=df['evdp'],
                                       stns_lon=slon,
                                       stns_lat=slat,
                                       destination=dest_map,
                                       figfrmt=self.fig_frmt,
                                       clon=slon,
                                       outname=f'{net}-{stn}-SKS')

        ## plot station map for all the stations for which the data has been successfully retrieved
        if plot_stations and self.method == 'RF' and len(rf_stalons):
            print("\n")
            self.logger.info("Plotting station map for RF")
            map = plot_merc(resolution='h',
                            llcrnrlon=self.minlongitude - 1,
                            llcrnrlat=self.minlatitude - 1,
                            urcrnrlon=self.maxlongitude + 1,
                            urcrnrlat=self.maxlatitude + 1,
                            topo=True)
            station_map(map,
                        stns_lon=rf_stalons,
                        stns_lat=rf_stalats,
                        stns_name=rf_staNetNames,
                        figname=str(
                            inpRFdict['filenames']['retr_station_prefix']),
                        destination=dest_map,
                        figfrmt=self.fig_frmt)
            # station_map(map, stns_lon=rf_stalons, stns_lat=rf_stalats,stns_name= rf_staNetNames,figname=str(inpRF.loc['retr_station_prefix','VALUES']), destination=dest_map,figfrmt=self.fig_frmt)

        if plot_stations and self.method == 'SKS' and len(sks_stalons):
            print("\n")
            self.logger.info("Plotting station map for SKS")
            map = plot_merc(resolution='h',
                            llcrnrlon=self.minlongitude - 1,
                            llcrnrlat=self.minlatitude - 1,
                            urcrnrlon=self.maxlongitude + 1,
                            urcrnrlat=self.maxlatitude + 1,
                            topo=True)
            station_map(map,
                        stns_lon=sks_stalons,
                        stns_lat=sks_stalats,
                        stns_name=sks_staNetNames,
                        figname=str(
                            inpSKSdict['filenames']['retr_station_prefix']),
                        destination=dest_map,
                        figfrmt=self.fig_frmt)
        ## Write the retrieved station catalog
        if self.method == 'RF':
            write_station_file(self.inventorytxtfile,
                               rf_staNetNames,
                               outfile=catalogtxtloc +
                               str(inpRFdict['filenames']['retr_stations']))
            # write_station_file(self.inventorytxtfile,rf_staNetNames,outfile=catalogtxtloc+str(inpRF.loc['retr_stations','VALUES']))
        elif self.method == 'SKS':
            write_station_file(self.inventorytxtfile,
                               sks_staNetNames,
                               outfile=catalogtxtloc +
                               str(inpSKSdict['filenames']['retr_stations']))
コード例 #7
0
    stream3c.trim2(-25, 75, 'onset')
    #   print np.max(stream3c[0].data),np.max(stream3c[1].data),np.max(stream3c[2].data)
    return stream3c


print "Lets start the show..."
#data = read_rf('DATA/7X-event_waveforms_for_rf.h5', 'H5')
data = read_rf('DATA/7X-MA12.h5', 'H5')
print "Data in..."
'''
# we can exclude bad stations
inc_set = list(set([tr.stats.inclination for tr in data]))
data_filtered = RFStream([tr for tr in data if tr.stats.inclination in inc_set and tr.stats.station not in ['MIJ2', 'MIL2']])
'''

stream = RFStream()

rf_streams = Parallel(n_jobs=-1,
                      verbose=1)(map(delayed(do_rf),
                                     IterMultipleComponents(data, 'onset', 3)))

for i, rf in enumerate(rf_streams):
    event_id = {'event_id': 0}
    event_id['event_id'] = i
    for tr in rf:
        tr.stats.update(event_id)
    stream.extend(rf)

stream.write('DATA/7X-MA12-rf_zrt', 'H5')
print "No worries, mate..."
コード例 #8
0
def compute_rf(dataRFfileloc):
    logger = logging.getLogger(__name__)
    all_rfdatafile = glob.glob(
        dataRFfileloc +
        f"*-{str(inpRFdict['filenames']['data_rf_suffix'])}.h5")
    for jj, rfdatafile in enumerate(all_rfdatafile):
        network = rfdatafile.split("-")[0]
        station = rfdatafile.split("-")[1]
        rffile = f"{network}-{station}-{str(inpRFdict['filenames']['rf_compute_data_suffix'])}.h5"
        datatmp = read_rf(rfdatafile, 'H5')
        if not os.path.exists(rffile):
            logger.info(
                f"--> Computing RF for {rfdatafile}, {jj+1}/{len(all_rfdatafile)}"
            )
            data = read_rf(rfdatafile, 'H5')
            stream = RFStream()
            for stream3c in tqdm.tqdm(IterMultipleComponents(data, 'onset',
                                                             3)):
                if len(stream3c) != 3:
                    continue

                ## check if the length of all three traces are equal
                lenphase = 100
                for tr in stream3c:
                    lentr = tr.stats.npts
                    lengt = tr.stats.sampling_rate * lenphase
                    if lentr != lengt:
                        if tr.stats.sampling_rate < 20:
                            logger.warning(
                                f"Sampling rate too low: {tr.stats.sampling_rate}, required >= 20Hz"
                            )
                            stream3c.remove(tr)
                            continue
                        elif tr.stats.sampling_rate >= 20:
                            if tr.stats.sampling_rate % 20 == 0:
                                factor = int(tr.stats.sampling_rate / 20)
                                tr.decimate(factor,
                                            strict_length=False,
                                            no_filter=True)
                                if tr.stats.npts > tr.stats.sampling_rate * lenphase:
                                    t = tr.stats.starttime
                                    tr.trim(
                                        t, t + lenphase -
                                        (1 / tr.stats.sampling_rate))
                                continue
                            else:
                                tr.resample(20.0)
                                if tr.stats.npts > tr.stats.sampling_rate * lenphase:
                                    t = tr.stats.starttime
                                    tr.trim(
                                        t, t + lenphase -
                                        (1 / tr.stats.sampling_rate))
                                continue
                        else:
                            pass
                    test_npts = []
                    for tr in stream3c:
                        lentr = tr.stats.npts
                        test_npts.append(lentr)
                    if len(set(test_npts)) > 1:
                        continue

                stream3c.filter(
                    'bandpass',
                    freqmin=float(inpRFdict['rf_filter_settings']['minfreq']),
                    freqmax=float(inpRFdict['rf_filter_settings']['maxfreq']))

                try:
                    stream3c.rf()
                except Exception as e:
                    logger.warning("Problem applying rf method", exc_info=True)
                stream3c.moveout()
                stream.extend(stream3c)
            stream.write(rffile, 'H5')
        else:
            # logger.info(f"--> {rffile} already exists!, {jj}/{len(all_rfdatafile)}")
            logger.info(
                f"--> Verifying RF computation {jj+1}/{len(all_rfdatafile)}")