Beispiel #1
0
VERDATE = '15/04/2019'
## imports ##
import logging
from obspy import UTCDateTime
if "iris-federator" in nodes or "eida-routing" in nodes:
    from obspy.clients.fdsn import RoutingClient as Client
else:
    from obspy.clients.fdsn import Client
# Logging params
logging.basicConfig(level=logging.DEBUG,
                    format='%(asctime)s %(levelname)s %(message)s')

# setup client #
for node in nodes:
    logging.info("Trying to get information from node '%s'" % node)
    clt = Client(node)
    inv = clt.get_stations(starttime=UTCDateTime(period[0]),
                           endtime=UTCDateTime(period[1]),
                           minlongitude=p1[0],
                           minlatitude=p1[1],
                           maxlongitude=p2[0],
                           maxlatitude=p2[1],
                           level=level)
    try:
        sumInv += inv
    except NameError:
        sumInv = inv

# how many stations did we find?
# correct end times while iterating to avoid bugs with matching metadata
now = UTCDateTime()
# Which stations do you want to get events on?
stations = ['FRD', 'RDM']

################################################################################

#load the catalog

catalog = quakeml.readQuakeML(catalog_file)
print 'Read catalog'

# If you're making pick files...
if make_pick_files:

    # Read in the events from the human readable text file; define the pick client
    events = genfromtxt(events_file, usecols=5, dtype='S')
    pick_client = Client(data_center)

    # For every event in the catalog, loop through to find the event id...
    for kevent in range(hot_start, len(events)):

        id_event = events[kevent]

        #if 'ci' in id_event: #SoCla event, get picks, otehrwise ignore
        ## We don't need the above line since all are in teh socal catalog, so instead
        #    add ci to all events
        #id_event = 'ci' + id_event

        # Make a pick file:
        print 'Creating pick file for event %d of %d' % (kevent, len(events))

        # Make the pick file path, open the file, and write the header.
Beispiel #3
0
def sonify(
    network,
    station,
    channel,
    starttime,
    endtime,
    location='*',
    freqmin=None,
    freqmax=None,
    speed_up_factor=200,
    fps=1,
    resolution='4K',
    output_dir=None,
    spec_win_dur=5,
    db_lim='smart',
    log=False,
    utc_offset=None,
):
    r"""
    Produce an animated spectrogram with a soundtrack derived from sped-up
    seismic or infrasound data.

    Args:
        network (str): SEED network code
        station (str): SEED station code
        channel (str): SEED channel code
        starttime (:class:`~obspy.core.utcdatetime.UTCDateTime`): Start time of
            animation (UTC)
        endtime (:class:`~obspy.core.utcdatetime.UTCDateTime`): End time of
            animation (UTC)
        location (str): SEED location code
        freqmin (int or float): Lower bandpass corner [Hz] (defaults to 20 Hz /
            `speed_up_factor`)
        freqmax (int or float): Upper bandpass corner [Hz] (defaults to 20,000
            Hz / `speed_up_factor` or the `Nyquist frequency`_, whichever is
            smaller)
        speed_up_factor (int): Factor by which to speed up the waveform data
            (higher values = higher pitches)
        fps (int): Frames per second of output video
        resolution (str): Resolution of output video; one of `'crude'` (640
            :math:`\times` 360), `'720p'` (1280 :math:`\times` 720), `'1080p'`
            (1920 :math:`\times` 1080), `'2K'` (2560 :math:`\times` 1440), or
            `'4K'` (3840 :math:`\times` 2160)
        output_dir (str or :class:`~pathlib.Path`): Directory where output video
            should be saved (defaults to :meth:`~pathlib.Path.cwd`)
        spec_win_dur (int or float): Duration of spectrogram window [s]
        db_lim (tuple or str): Tuple defining min and max colormap cutoffs [dB],
            `'smart'` for a sensible automatic choice, or `None` for no clipping
        log (bool): If `True`, use log scaling for :math:`y`-axis of spectrogram
        utc_offset (int or float): If not `None`, convert UTC time to local time
            using this offset [hours] before plotting

    .. _Nyquist frequency: https://en.wikipedia.org/wiki/Nyquist_frequency
    """

    # Capture args and format as string to store in movie metadata
    key_value_pairs = [f'{k}={repr(v)}' for k, v in locals().items()]
    call_str = 'sonify({})'.format(', '.join(key_value_pairs))

    # Use current working directory if none provided
    if not output_dir:
        output_dir = Path().cwd()
    output_dir = Path(str(output_dir)).expanduser().resolve()
    if not output_dir.exists():
        raise FileNotFoundError(f'Directory {output_dir} does not exist!')

    client = Client('IRIS')

    print('Retrieving data...')
    st = client.get_waveforms(
        network,
        station,
        location,
        channel,
        starttime - PAD,
        endtime + PAD,
        attach_response=True,
    )
    print('Done')

    # Merge Traces with the same IDs
    st.merge(fill_value='interpolate')

    if st.count() != 1:
        warnings.warn('Stream contains more than one Trace. Using first entry!')
        for tr in st:
            print(tr.id)
    tr = st[0]

    # Adjust starttime so we have nice numbers in time box (carefully!)
    offset = np.abs(tr.stats.starttime - (starttime - PAD))  # [s]
    if offset > tr.stats.delta:
        warnings.warn(
            f'Difference between requested and actual starttime is {offset} s, '
            f'which is larger than the data sample interval ({tr.stats.delta} s). '
            'Not adjusting starttime of downloaded data; beware of inaccurate timing!'
        )
    else:
        tr.stats.starttime = starttime - PAD

    # Apply UTC offset if provided
    if utc_offset is not None:
        signed_offset = f'{utc_offset:{"+" if utc_offset else ""}g}'
        print(f'Converting to local time using UTC offset of {signed_offset} hours')
        utc_offset_sec = utc_offset * mdates.SEC_PER_HOUR
        starttime += utc_offset_sec
        endtime += utc_offset_sec
        tr.stats.starttime += utc_offset_sec

    # All infrasound sensors have a "?DF" channel pattern
    if tr.stats.channel[1:3] == 'DF':
        is_infrasound = True
        rescale = 1  # No conversion
    # All high-gain seismometers have a "?H?" channel pattern
    elif tr.stats.channel[1] == 'H':
        is_infrasound = False
        rescale = 1e6  # Convert m to µm
    # We can't figure out what type of sensor this is...
    else:
        raise ValueError(
            f'Channel {tr.stats.channel} is not an infrasound or seismic channel!'
        )

    if not freqmax:
        freqmax = np.min(
            [tr.stats.sampling_rate / 2, HIGHEST_AUDIBLE_FREQUENCY / speed_up_factor]
        )
    if not freqmin:
        freqmin = LOWEST_AUDIBLE_FREQUENCY / speed_up_factor

    tr.remove_response()  # Units are m/s OR Pa after response removal
    tr.detrend('demean')
    tr.taper(max_percentage=None, max_length=PAD / 2)  # Taper away some of PAD
    print(f'Applying {freqmin:g}-{freqmax:g} Hz bandpass')
    tr.filter('bandpass', freqmin=freqmin, freqmax=freqmax, zerophase=True)

    # Make trimmed version
    tr_trim = tr.copy()
    tr_trim.trim(starttime, endtime)

    # Create temporary directory for audio and video files
    temp_dir = tempfile.TemporaryDirectory()

    # MAKE AUDIO FILE

    tr_audio = tr_trim.copy()
    target_fs = AUDIO_SAMPLE_RATE / speed_up_factor
    corner_freq = 0.4 * target_fs  # [Hz] Note that Nyquist is 0.5 * target_fs
    if corner_freq < tr_audio.stats.sampling_rate / 2:  # To avoid ValueError
        tr_audio.filter('lowpass', freq=corner_freq, corners=10, zerophase=True)
    tr_audio.interpolate(sampling_rate=target_fs, method='lanczos', a=20)
    tr_audio.taper(0.01)  # For smooth start and end
    audio_file = Path(temp_dir.name) / '47.wav'
    print('Saving audio file...')
    tr_audio.write(
        str(audio_file),
        format='WAV',
        width=4,
        rescale=True,
        framerate=AUDIO_SAMPLE_RATE,
    )
    print('Done')

    # MAKE VIDEO FILE

    # We don't need an anti-aliasing filter here since we never use the values,
    # just the timestamps
    timing_tr = tr_trim.copy().interpolate(sampling_rate=fps / speed_up_factor)
    times = timing_tr.times('UTCDateTime')[:-1]  # Remove extra frame

    # Define update function
    def _march_forward(frame, spec_line, wf_line, time_box, wf_progress):

        spec_line.set_xdata(times[frame].matplotlib_date)
        wf_line.set_xdata(times[frame].matplotlib_date)
        time_box.txt.set_text(times[frame].strftime('%H:%M:%S'))
        tr_progress = tr.copy().trim(endtime=times[frame])
        wf_progress.set_xdata(tr_progress.times('matplotlib'))
        wf_progress.set_ydata(tr_progress.data * rescale)

    # Store user's rc settings, then update font stuff
    original_params = matplotlib.rcParams.copy()
    matplotlib.rcParams.update(matplotlib.rcParamsDefault)
    matplotlib.rcParams['font.sans-serif'] = 'Tex Gyre Heros'
    matplotlib.rcParams['mathtext.fontset'] = 'custom'

    fig, *fargs = _spectrogram(
        tr,
        starttime,
        endtime,
        is_infrasound,
        rescale,
        spec_win_dur,
        db_lim,
        (freqmin, freqmax),
        log,
        utc_offset is not None,
        resolution,
    )

    # Create animation
    interval = ((1 / timing_tr.stats.sampling_rate) * MS_PER_S) / speed_up_factor
    animation = FuncAnimation(
        fig,
        func=_march_forward,
        frames=times.size,
        fargs=fargs,
        interval=interval,
    )

    video_file = Path(temp_dir.name) / '47.mp4'
    print('Saving animation. This may take a while...')
    animation.save(
        video_file,
        dpi=RESOLUTIONS[resolution][0] / FIGURE_WIDTH,  # Can be a float...
        progress_callback=lambda i, n: print(
            '{:.1f}%'.format(((i + 1) / n) * 100), end='\r'
        ),
    )
    print('\nDone')

    # Restore user's rc settings, ignoring Matplotlib deprecation warnings
    with warnings.catch_warnings():
        warnings.simplefilter('ignore')
        matplotlib.rcParams.update(original_params)

    # MAKE COMBINED FILE

    tr_id_str = '_'.join([code for code in tr.id.split('.') if code])
    output_file = output_dir / f'{tr_id_str}_{speed_up_factor}x.mp4'
    _ffmpeg_combine(audio_file, video_file, output_file, call_str)

    # Clean up temporary directory, just to be safe
    temp_dir.cleanup()
Beispiel #4
0
def main(inventory_file,
         waveform_database,
         event_catalog_file,
         event_trace_datafile,
         start_time,
         end_time,
         taup_model,
         distance_range,
         magnitude_range,
         catalog_only=False):

    log = logging.getLogger(__name__)
    log.setLevel(logging.INFO)

    waveform_db_is_web = is_url(
        waveform_database
    ) or waveform_database in obspy.clients.fdsn.header.URL_MAPPINGS
    if not waveform_db_is_web:
        assert os.path.exists(
            waveform_database), "Cannot find waveform database file {}".format(
                waveform_database)
    log.info("Using waveform data source: {}".format(waveform_database))

    min_dist_deg = distance_range[0]
    max_dist_deg = distance_range[1]
    min_mag = magnitude_range[0]
    max_mag = magnitude_range[1]

    inventory = read_inventory(inventory_file)
    log.info("Loaded inventory {}".format(inventory_file))

    # Compute reference lonlat from the inventory.
    channels = inventory.get_contents()['channels']
    lonlat_coords = []
    for ch in channels:
        coords = inventory.get_coordinates(ch)
        lonlat_coords.append((coords['longitude'], coords['latitude']))
    lonlat_coords = np.array(lonlat_coords)
    lonlat = np.mean(lonlat_coords, axis=0)
    log.info("Inferred reference coordinates {}".format(lonlat))

    # If start and end time not provided, infer from date range of inventory.
    if not start_time:
        start_time = inventory[0].start_date
        for net in inventory:
            start_time = min(start_time, net.start_date)
        log.info("Inferred start time {}".format(start_time))
    # end if
    if not end_time:
        end_time = inventory[0].end_date
        if end_time is None:
            end_time = UTC.now()
        for net in inventory:
            end_time = max(end_time, net.end_date)
        log.info("Inferred end time {}".format(end_time))
    # end if

    start_time = UTC(start_time)
    end_time = UTC(end_time)
    event_catalog_file = timestamp_filename(event_catalog_file, start_time,
                                            end_time)
    event_trace_datafile = timestamp_filename(event_trace_datafile, start_time,
                                              end_time)
    assert not os.path.exists(event_trace_datafile), \
        "Output file {} already exists, please remove!".format(event_trace_datafile)
    log.info("Traces will be written to: {}".format(event_trace_datafile))

    exit_after_catalog = catalog_only
    catalog = get_events(lonlat, start_time, end_time, event_catalog_file,
                         (min_dist_deg, max_dist_deg), (min_mag, max_mag),
                         exit_after_catalog)

    if waveform_db_is_web:
        log.info("Use fresh query results from web")
        client = Client(waveform_database)
        waveform_getter = client.get_waveforms
    else:
        # Form closure to allow waveform source file to be derived from a setting (or command line input)
        asdf_dataset = FederatedASDFDataSet(waveform_database, logger=log)

        def closure_get_waveforms(network, station, location, channel,
                                  starttime, endtime):
            return asdf_get_waveforms(asdf_dataset, network, station, location,
                                      channel, starttime, endtime)

        waveform_getter = closure_get_waveforms
    # end if

    with tqdm(smoothing=0) as pbar:
        stream_count = 0
        for s in iter_event_data(catalog,
                                 inventory,
                                 waveform_getter,
                                 tt_model=taup_model,
                                 pbar=pbar):
            # Write traces to output file in append mode so that arbitrarily large file
            # can be processed. If the file already exists, then existing streams will
            # be overwritten rather than duplicated.
            # Check first if rotation for unaligned *H1, *H2 channels to *HN, *HE is required.
            if not s:
                continue
            # end if
            if s.select(component='1') and s.select(component='2'):
                try:
                    s.rotate('->ZNE', inventory=inventory)
                except ValueError as e:
                    log.error('Unable to rotate to ZNE with error:\n{}'.format(
                        str(e)))
                    continue
                # end try
            # end if
            # Order the traces in ZNE ordering. This is required so that normalization
            # can be specified in terms of an integer index, i.e. the default of 0 in rf
            # library will normalize against the Z component.
            s.traces = sorted(s.traces, key=zne_order)
            # Assert the ordering of traces in the stream is ZNE.
            assert s[0].stats.channel[-1] == 'Z'
            assert s[1].stats.channel[-1] == 'N'
            assert s[2].stats.channel[-1] == 'E'
            # Iterator returns rf.RFStream. Write traces from obspy.Stream to decouple from RFStream.
            grp_id = '.'.join(s.traces[0].id.split('.')[0:3])
            event_time = str(s.traces[0].meta.event_time)[0:19]
            pbar.set_description("{} -- {}".format(grp_id, event_time))
            out_stream = obspy.Stream([tr for tr in s])
            assert out_stream[0].stats.channel[-1] == 'Z'
            assert out_stream[1].stats.channel[-1] == 'N'
            assert out_stream[2].stats.channel[-1] == 'E'
            write_h5_event_stream(event_trace_datafile, out_stream, mode='a')
            stream_count += 1
        # end for

        if stream_count == 0:
            log.warning("No traces found!")
        else:
            log.info("Wrote {} streams to output file".format(stream_count))
Beispiel #5
0
def main(args=None):

    # Get Input Options
    (opts, outp) = get_options()

    # Initialize the client
    stdout.writelines("Initializing Client ({0:s})...".format(opts.Server))
    if len(opts.UserAuth) == 0:
        client = Client(opts.Server)
    else:
        client = Client(opts.Server,
                        user=opts.UserAuth[0],
                        password=opts.UserAuth[1])
    stdout.writelines("Done\n\n")

    # Search the Client for stations
    stdout.writelines("Querying client...")
    try:
        inv = client.get_stations(network=opts.nets,
                                  station=opts.stns,
                                  channel=opts.chns,
                                  location=opts.locs,
                                  starttime=opts.stdate,
                                  endtime=opts.enddate,
                                  startbefore=opts.stbefore,
                                  startafter=opts.stafter,
                                  endbefore=opts.endbefore,
                                  endafter=opts.endafter,
                                  latitude=opts.lat,
                                  longitude=opts.lon,
                                  minradius=opts.minr,
                                  maxradius=opts.maxr,
                                  minlatitude=opts.minlat,
                                  maxlatitude=opts.maxlat,
                                  minlongitude=opts.minlon,
                                  maxlongitude=opts.maxlon,
                                  includeavailability=None,
                                  includerestricted=True,
                                  level='channel')
        stdout.writelines("Done\n\n")
    except:
        print('Exception: Cannot complete query or no data in query...')
        exit()

    # Summarize Search
    nstn = 0
    for net in inv.networks:
        for stn in net.stations:
            nstn = nstn + 1
    print("Search Complete: ")
    print("  {0:d} stations in {1:d} networks".format(nstn, len(inv.networks)))
    print(" ")

    # If Debug mode, pickle inventory and exit
    if opts.debug:
        stdout.writelines(
            "Pickling Inventory into {0:s}_query_debug.pkl...".format(outp))
        pickle.dump(inv, open('{0:s}_query_debug.pkl'.format(outp), 'wb'))
        stdout.writelines("Done\n\n")
        stdout.writelines(
            "Writing csv2kml format file to {0:s}_query_debug.kcsv\n".format(
                outp))
        fcsv = open("{0:s}_query_debug.kcsv".format(outp), 'w')
        for net in inv.networks:
            for stn in net.stations:
                lat = stn.latitude
                lon = stn.longitude
                stdt = stn.start_date
                eddt = stn.end_date
                fcsv.writelines("{0:11.6f},{1:10.6f},{2:2s},{3:5s},{4:s},{5:s}\n".format(\
                    lon, lat, net.code, stn.code, stdt.strftime("%Y-%m-%d"), eddt.strftime("%Y-%m-%d")))
        fcsv.close()
        aa = system(
            "csv2kml --field-names='lon,lat,net,station,start,end' {0:s}_query_debug.kcsv"
            .format(outp))
        if aa == 0:
            print(
                "Generated a KML file {0:s}_query_debug.kcsv.kml".format(outp))
        else:
            print("Generate a kml file using: ")
            print ("   csv2kml --no-random-colours --field-names='lon,lat,net,station,start,end' " \
                "{0:s}_query_debug.kcsv".format(outp))

        exit()

    #-- Split locations for later parsing
    opts.locs = opts.locs.split(',')
    #-- Sort selected location keys
    for i, l in enumerate(opts.locs):
        if len(l) == 0:
            opts.locs[i] == "--"

    # Initialize station dictionary
    stations = {}

    # Loop through results
    for net in inv.networks:
        network = net.code.upper()
        print("Network: {0:s}".format(network))
        for stn in net.stations:
            station = stn.code.upper()
            print("   Station: {0:s}".format(station))

            # get standard values
            lat = stn.latitude
            lon = stn.longitude
            elev = stn.elevation / 1000.
            stdt = stn.start_date
            if stn.end_date is None:
                eddt = UTCDateTime("2599-12-31")
            else:
                eddt = stn.end_date
            stat = stn.restricted_status

            print("     Lon, Lat, Elev: {0:9.4f}, {1:8.4f}, {2:7.3f}".format(
                lon, lat, elev))
            print("     Start Date: {0:s}".format(
                stdt.strftime("%Y-%m-%d %H:%M:%S")))
            print("     End Date:   {0:s}".format(
                eddt.strftime("%Y-%m-%d %H:%M:%S")))
            print("     Status:     {0:s}".format(stat))

            # Parse Channels
            if opts.lkey:
                # Select Multiple Channels based on those in the rank list
                # Do not keep overlapping time windows
                # Select Channels based on those available compared to channel rank
                chn = []
                for pchn in opts.chnrank:
                    stnchn = stn.select(channel=pchn + "Z")
                    if len(stnchn.channels) > 0:
                        chn.append(pchn)

                #-- If no channels with Z found, skip
                if chn is None:
                    if len(stn.select(channel='*Z')) == 0:
                        print("     Error: No Z component. Skipping")
                        continue

                #-- loop through channels and select time windows
                for pchn in chn:
                    locs = []
                    stdts = []
                    eddts = []
                    stnchn = stn.select(channel=pchn + "Z")
                    #--Collect Start/end Dates and locations
                    for chnl in stnchn:
                        chnlloc = chnl.location_code
                        if len(chnlloc) == 0: chnlloc = "--"
                        for selloc in opts.locs:
                            # print (selloc, chnlloc)
                            if selloc == '*' or chnlloc in selloc:
                                locs.append(chnlloc)
                                stdts.append(chnl.start_date)
                                if chnl.end_date is None:
                                    eddts.append(UTCDateTime("2599-12-31"))
                                else:
                                    eddts.append(chnl.end_date)

                    #-- Unique set of locids, get minmax time for channel across all locids
                    locs = list(set(locs))
                    stdts.sort()
                    eddts.sort()
                    stnchnstdt = stdts[0]
                    stnchneddt = eddts[-1]

                    print("       Selected Channel: {0:s}".format(pchn))
                    print("         Locations:  {0:s}".format(",".join(locs)))
                    print("         Start Date: {0:s}".format(
                        stnchnstdt.strftime("%Y-%m-%d %H:%M:%S")))
                    print("         End Date:   {0:s}".format(
                        stnchneddt.strftime("%Y-%m-%d %H:%M:%S")))

                    #-- Add single key to station database
                    key = "{0:s}.{1:s}.{2:2s}".format(network, station, pchn)
                    if key not in stations:
                        stations[key] = StDbElement(network=network, station=station, channel=pchn, \
                            location=locs, latitude=lat, longitude=lon, elevation=elev, polarity=1., \
                            azcorr=0., startdate=stnchnstdt, enddate=stnchneddt, restricted_status=stat)
                        print("       Added as: " + key)
                    else:
                        print("       Warning: " + key +
                              " already exists...Skip")

            else:
                # Select a single channel type if only short keys
                chn = None
                locs = []
                stdts = []
                eddts = []
                for pchn in opts.chnrank:
                    stnchn = stn.select(channel=pchn + "Z")
                    if len(stnchn.channels) > 0:
                        chn = pchn
                        #--Collect Start/end Dates and locations
                        for chnl in stnchn:
                            chnlloc = chnl.location_code
                            if len(chnlloc) == 0: chnlloc = "--"
                            for selloc in opts.locs:
                                # print (selloc, chnlloc)
                                if selloc == '*' or chnlloc in selloc:
                                    locs.append(chnlloc)
                                    stdts.append(chnl.start_date)
                                    if chnl.end_date is None:
                                        eddts.append(UTCDateTime("2599-12-31"))
                                    else:
                                        eddts.append(chnl.end_date)
                        if len(locs) > 0:
                            break

                if chn is None:
                    if len(stn.select(channel='*Z')) == 0:
                        print("     Error: No Z component. Skipping")
                        continue
                if len(locs) == 0:
                    print("     Error: Location {} not available. Skipping".
                          format(",".join(opts.locs)))
                    continue

                #-- Unique set of locids, get minmax time for channel across all locids
                locs = list(set(locs))
                stdts.sort()
                eddts.sort()
                stnchnstdt = stdts[0]
                stnchneddt = eddts[-1]

                # # return location codes for selected channel
                # locs = list(set([a.location_code for a in stn.select(channel=chn+'Z').channels]))

                # print ("     Selected Channel: {0:s}".format(chn))
                # print ("     Locations:        {0:s}".format(",".join(locs)))

                print("       Selected Channel: {0:s}".format(pchn))
                print("         Locations:  {0:s}".format(",".join(locs)))
                print("         Start Date: {0:s}".format(
                    stnchnstdt.strftime("%Y-%m-%d %H:%M:%S")))
                print("         End Date:   {0:s}".format(
                    stnchneddt.strftime("%Y-%m-%d %H:%M:%S")))

                key = "{0:s}.{1:s}".format(network, station)

                #-- Add single key to station database
                if key not in stations:
                    stations[key] = StDbElement(network=network, station=station, channel=chn, \
                        location=locs, latitude=lat, longitude=lon, elevation=elev, polarity=1., \
                        azcorr=0., startdate=stdt, enddate=eddt, restricted_status=stat)
                    print("    Added as: " + key)
                else:
                    print("    Warning: " + key + " already exists...Skip")
            print()

    # Save and Pickle
    print(" ")
    print("  Pickling to {0:s}.pkl".format(outp))
    write_db(fname=outp + '.pkl', stdb=stations, binp=opts.use_binary)

    # Save csv
    print("  Saving csv to: {0:s}.csv".format(outp))
    fcsv = open(outp + ".csv", 'w')
    stkeys = stations.keys()
    sorted(stkeys)  # python3!

    for stkey in stkeys:
        #                 net    stn   locs   chn   std      stt         edd      edt          lat       lon       elev       pol      azc       res
        fcsv.writelines(
            "{0:s},{1:s},{2:s},{3:s}*,{4:s},{5:s}.{6:1.0f},{7:s},{8:s}.{9:1.0f},{10:8.4f},{11:9.4f},{12:6.2f},{13:3.1f},{14:8.4f},{15:s}\n"
            .format(stations[stkey].network, stations[stkey].station, ":".join(
                stations[stkey].location), stations[stkey].channel[0:2],
                    stations[stkey].startdate.strftime("%Y-%m-%d"),
                    stations[stkey].startdate.strftime("%H:%M:%S"),
                    stations[stkey].startdate.microsecond / 100000.,
                    stations[stkey].enddate.strftime("%Y-%m-%d"),
                    stations[stkey].enddate.strftime("%H:%M:%S"),
                    stations[stkey].enddate.microsecond / 100000.,
                    stations[stkey].latitude, stations[stkey].longitude,
                    stations[stkey].elevation, stations[stkey].polarity,
                    stations[stkey].azcorr, stations[stkey].status))
#########################################################
################ PARAMETER SECTION ######################
#########################################################
tt0 = time.time()

# paths and filenames
rootpath = './'  # roothpath for the project
direc = os.path.join(rootpath,
                     'RAW_DATA')  # where to store the downloaded data
dlist = os.path.join(direc,
                     'station.txt')  # CSV file for station location info

# download parameters
client = Client(
    'IRIS'
)  # client/data center. see https://docs.obspy.org/packages/obspy.clients.fdsn.html for a list
down_list = False  # download stations from a pre-compiled list or not
flag = False  # print progress when running the script; recommend to use it at the begining
samp_freq = 2  # targeted sampling rate at X samples per seconds
rm_resp = 'no'  # select 'no' to not remove response and use 'inv','spectrum','RESP', or 'polozeros' to remove response
respdir = os.path.join(
    rootpath, 'resp'
)  # directory where resp files are located (required if rm_resp is neither 'no' nor 'inv')
freqmin = 0.02  # pre filtering frequency bandwidth
freqmax = 1  # note this cannot exceed Nquist freq

# targeted region/station information: only needed when down_list is False
lamin, lamax, lomin, lomax = 35.5, 36.5, -120.5, -119.5  # regional box: min lat, min lon, max lat, max lon (-114.0)
chan_list = ["HHE", "HHN", "HHZ"
             ]  # channel if down_list=false (format like "HN?" not work here)
from obspy import UTCDateTime, Stream, read, read_inventory
from obspy.taup import TauPyModel
from obspy.geodetics.base import locations2degrees
from matplotlib.transforms import blended_transform_factory
from os import path
import matplotlib.pyplot as plt
from geopy.geocoders import Nominatim
import numpy as np
from matplotlib.cm import get_cmap
from obspy.geodetics import gps2dist_azimuth
DATA_PROVIDER = "RASPISHAKE"
MODEL = 'iasp91'  # Velocity model to predict travel-times through
#MODEL = 'ak135'  # Velocity model to predict travel-times through
model = TauPyModel(model=MODEL)

client = Client(DATA_PROVIDER)

# Event details
URL = 'https://earthquake.usgs.gov/earthquakes/eventpage/us7000c7y0/executive'
EQNAME = 'M7 15 km NNE of Néon Karlovásion, Greece'
EQLAT = 37.9175
EQLON = 26.7901
EQZ = '2020-10-30 11:51:28'
EQTIME = 25.7859016291
FILE_STEM = 'Turkey-2020-10-30'
MAGNITUDE = 'M7'

RESP = "DISP"  # DISP, VEL or ACC
WINDOW = 20  # displacement plus/minus window
DECIMATION = 1  # decimation factor, can be up to 15 to keep 1 sample point in every 15, reduces memory usage on computer
# Things to change once for your station
Beispiel #8
0
###############################
#######PARAMETER SECTION#######
###############################
tt0 = time.time()

# paths and filenames
direc = "./data_download"  # where to store the downloaded data
dlist = os.path.join(direc,
                     'station.lst')  # CSV file for station location info

# check whether folder exists
if not os.path.isdir(direc): os.mkdir(direc)

# download parameters
client = Client('GEONET')  # client/data center
down_list = False  # download stations from pre-compiled list
oput_CSV = True  # output station.list to a CSV file to be used in later stacking steps
flag = True  # print progress when running the script
NewFreq = 10  # resampling at X samples per seconds
rm_resp = False  # boolean to remove instrumental response
respdir = 'none'  # output response directory
freqmin = 0.05  # pre filtering frequency bandwidth
freqmax = 4

# station information
lamin, lomin, lamax, lomax = -46.5, 168, -38, 175  # regional box: min lat, min lon, max lat, max lon
dchan = ['HH*']  # channel if down_list=false
dnet = ["NZ"]  # network
dsta = ["M?Z"]  # station (do either one station or *)
start_date = ["2018_05_01_0_0_0"]  # start date of download
Beispiel #9
0
def PSD_metrics(concierge):
    """
    Generate *PSD* metrics.

    :type concierge: :class:`~ispaq.concierge.Concierge`
    :param concierge: Data access expediter.
    
    :rtype: pandas dataframe 
    :return: Dataframe of PSD metrics. 

    .. rubric:: Example

    TODO:  doctest examples
    """
    # Get the logger from the concierge
    logger = concierge.logger

    # Default parameters
    channelFilter = '.[HLGNPYXD].'
    logger.debug("channelFilter %s" % channelFilter)

    # function metadata dictionary
    function_metadata = concierge.function_by_logic['PSD']

    # Container for all of the metrics dataframes generated
    dataframes = []

    if (
            concierge.resp_dir
    ):  # if resp_dir: run evalresp on local RESP file instead of web service
        logger.info("Searching for response files in '%s'" %
                    concierge.resp_dir)
    else:  # try to connect to irisws/evalresp
        try:
            resp_url = Client("IRIS")
        except Exception as e:
            logger.error(
                "Could not connect to 'http:/service.iris.edu/irisws/evalresp/1'"
            )
            return None

    # ----- All available SNCLs -------------------------------------------------

    # NEW: Loop over days
    start = concierge.requested_starttime
    end = concierge.requested_endtime

    delta = (end - start) / (24 * 60 * 60)
    nday = int(delta) + 1

    if nday > 1 and concierge.station_client is None:
        try:
            initialAvailability = concierge.get_availability(starttime=start,
                                                             endtime=end)
        except NoAvailableDataError as e:
            raise
        except Exception as e:
            logger.error("concierge.get_availability() failed: '%s'" % e)
            return None

    for day in range(nday):
        # On the first and last days, use the hour provided, otherwise use 00:00:00
        starttime = (start + day * 86400)
        starttime = UTCDateTime(starttime.strftime("%Y-%m-%d") + "T00:00:00Z")
        endtime = starttime + 86400

        if starttime == end:
            continue

        try:
            availability = concierge.get_availability(starttime=starttime,
                                                      endtime=endtime)
        except NoAvailableDataError as e:
            raise
        except Exception as e:
            logger.debug(e)
            logger.error('concierge.get_availability() failed')
            return None

        # If the day has no data, then skip it (used to raise NoAvailableDataError)
        if availability is None:
            continue

        # Apply the channelFilter and drop multiple metadata epochs
        availability = availability[availability.channel.str.contains(
            channelFilter)].drop_duplicates(['snclId'])

        # Loop over rows of the availability dataframe
        logger.info('Calculating PSD metrics for %d SNCLs on %s' %
                    (availability.shape[0], str(starttime).split('T')[0]))

        for (index, av) in availability.iterrows():
            logger.info('%03d Calculating PSD metrics for %s' %
                        (index, av.snclId))

            # Get the data ----------------------------------------------

            # NOTE:  Use the requested starttime and endtime
            try:
                r_stream = concierge.get_dataselect(av.network, av.station,
                                                    av.location, av.channel,
                                                    starttime, endtime)
            except Exception as e:
                logger.debug(e)
                if str(e).lower().find('no data') > -1:
                    logger.info('No data available for %s' % (av.snclId))
                elif str(e).lower().find('multiple epochs'):
                    logger.info(
                        'Skipping %s because multiple metadata epochs found' %
                        (av.snclId))
                else:
                    logger.warning('No data available for %s from %s' %
                                   (av.snclId, concierge.dataselect_url))
                continue

            # Run the PSD metric ----------------------------------------

            if any(key in function_metadata for key in ("PSD", "PSDText")):
                try:
                    evalresp = None
                    if (
                            concierge.resp_dir
                    ):  # if resp_dir: run evalresp on local RESP file instead of web service
                        sampling_rate = utils.get_slot(r_stream,
                                                       'sampling_rate')
                        evalresp = utils.getSpectra(r_stream, sampling_rate,
                                                    concierge)

                    # get corrected PSDs
                    try:
                        (df, PSDcorrected,
                         PDF) = irismustangmetrics.apply_PSD_metric(
                             r_stream, evalresp=evalresp)
                    except Exception as e:
                        raise

                    if not df.empty:
                        dataframes.append(df)

                    if "psd_corrected" in concierge.metric_names:
                        # Write out the corrected PSDs
                        # Do it this way to have each individual day file properly named with starttime.date
                        filename = '%s_%s_PSDCorrected.csv' % (av.snclId,
                                                               starttime.date)
                        filepath = concierge.csv_dir + '/' + filename
                        logger.info('Writing corrected PSD to %s' %
                                    os.path.basename(filepath))
                        try:
                            # Add target
                            PSDcorrected['target'] = av.snclId
                            PSDcorrected = PSDcorrected[[
                                'target', 'starttime', 'endtime', 'freq',
                                'power'
                            ]]
                            utils.write_numeric_df(PSDcorrected,
                                                   filepath,
                                                   sigfigs=concierge.sigfigs)
                        except Exception as e:
                            logger.debug(e)
                            logger.error('Unable to write %s' % (filepath))
                            raise

                    if "pdf_text" in concierge.metric_names:
                        # Write out the PDFs
                        filename = '%s_%s_PDF.csv' % (av.snclId,
                                                      starttime.date)
                        filepath = concierge.csv_dir + '/' + filename
                        logger.info('Writing PDF text to %s' %
                                    os.path.basename(filepath))
                        try:
                            # Add target, start- and endtimes
                            PDF['target'] = av.snclId
                            PDF['starttime'] = starttime
                            PDF['endtime'] = endtime
                            PDF = PDF[[
                                'target', 'starttime', 'endtime', 'freq',
                                'power', 'hits'
                            ]]
                            utils.write_numeric_df(PDF,
                                                   filepath,
                                                   sigfigs=concierge.sigfigs)
                        except Exception as e:
                            logger.debug(e)
                            logger.error('Unable to write %s' % (filepath))
                            raise

                except Exception as e:
                    if str(e).lower().find(
                            'could not resolve host: service.iris.edu') > -1:
                        logger.debug(e)
                        logger.error(
                            'getEvalresp failed to find service.iris.edu')
                    elif str(e).lower().find('no psds returned') > -1:
                        logger.warning(
                            "IRISMustangMetrics: No PSDs returned for %s" %
                            (av.snclId))
                    else:
                        logger.error(e)
                    logger.warning('"PSD" metric calculation failed for %s' %
                                   (av.snclId))
                    continue

            # Run the PSD plot ------------------------------------------

            if 'PSDPlot' in function_metadata:
                try:
                    filename = '%s_%s_PDF.png' % (av.snclId, starttime.date)
                    filepath = concierge.png_dir + '/' + filename
                    evalresp = None
                    if (
                            concierge.resp_dir
                    ):  # if resp_dir: run evalresp on local RESP file instead of web service
                        sampling_rate = utils.get_slot(r_stream,
                                                       'sampling_rate')
                        evalresp = utils.getSpectra(r_stream, sampling_rate,
                                                    concierge)
                    status = irismustangmetrics.apply_PSD_plot(
                        r_stream, filepath, evalresp=evalresp)
                    logger.info('Writing PDF plot %s' %
                                os.path.basename(filepath))
                except Exception as e:
                    if str(e).lower().find('no psds returned') > -1:
                        logger.warning(
                            "IRISMustangMetrics: No PSDs returned for %s" %
                            (av.snclId))
                    else:
                        logger.warning(e)
                    logger.warning('"PSD" plot generation failed for %s' %
                                   (av.snclId))

    # Concatenate and filter dataframes before returning -----------------------

    if len(dataframes) == 0 and 'PSD' in function_metadata:
        logger.warning('"PSD" metric calculation generated zero metrics')
        return None

    else:
        # make a dummy data frame in the case of just creating PSDPlots with no supporting DF statistics
        result = pd.DataFrame({
            'metricName': ['PSDPlot', 'PSDPlot'],
            'value': [0, 1]
        })

        # Create a boolean mask for filtering the dataframe
        def valid_metric(x):
            return x in concierge.metric_names

        if 'PSD' in function_metadata:
            # Concatenate dataframes before returning ----------------------------------
            result = pd.concat(dataframes, ignore_index=True)
            mask = result.metricName.apply(valid_metric)
            result = result[(mask)]
            result.reset_index(drop=True, inplace=True)

        return (result)
Beispiel #10
0
    def convierte(self):

        ti = time.time()

        #Lee un archivo .mseed con trazas de una unica estacion y varios sensores
        st1 = read('{0}'.format(self.miniseed))
        # Nos aseguramos que sea un acelerogrado HN
        st = st1.select(id="*10*")
        tr = st[0]

        # Extrae la metadata del miniseed
        numero_puntos = tr.stats.npts
        duracion = tr.stats.endtime - tr.stats.starttime
        codigo_estacion = tr.stats.station
        muestreo = tr.stats.sampling_rate

        # Elimina respuesta instrumental y transforma en aceleraciones sin aplicar filtro
        # Para leer el inventory es necesario ingresar una fecha inicial y final, por defecto inicial 01/01/1993 y final la actual

        #starttime =UTCDateTime("1993-01-01T00:00:00")
        #endtime= UTCDateTime()

        starttime = tr.stats.starttime
        endtime = tr.stats.endtime

        # Se conecta al cliente FDSNWS de SeisComp3 y lee el inventory de la estacion relacionada en el mseed
        cliente = Client('http://10.100.100.232:8091')
        inventory = cliente.get_stations(network='CM',
                                         level="response",
                                         station=codigo_estacion,
                                         starttime=starttime,
                                         endtime=endtime)

        # Se extrae la informacion de las coordenadas del inventory
        estacion_coordenadas = inventory.get_coordinates(
            f"CM.{codigo_estacion}.10.HNZ", endtime)
        latitud_estacion = estacion_coordenadas["latitude"]
        longitud_estacion = estacion_coordenadas["longitude"]

        #pre_filt = (0.005, 0.006, 35, 40)

        # Se convierte la traza mseed de cuentas a aceleracion
        acel = st.remove_response(inventory=inventory, output="ACC")
        tiempo = np.arange(numero_puntos)
        #Selecciono los datos del miniseed de aceleraciones
        datosE = acel.select(component="E")[0]
        datosN = acel.select(component="N")[0]
        datosZ = acel.select(component="Z")[0]

        # Se convierte aceleraciones a cm/s2
        EW = datosE.data * 100
        NS = datosN.data * 100
        VER = datosZ.data * 100

        # Verificamos coherencia en la longitud de los canales
        if len(EW) == len(NS) == len(VER):
            lineas = len(EW)
        else:
            print("Dimensiones de array invalidas")

        #Crea el encabezado del archivo y la metadata de la estacion
        f = open('aceleraciones_{0}.anc'.format(codigo_estacion), 'w')
        f.write(
            'SERVICIO GEOLOGICO COLOMBIANO- RED NACIONAL DE ACELEROGRAFOS DE COLOMBIA\n'
        )
        f.write('SISMO DE BAHIA SOLANO (CHOCO) 2017/01/12 16:06:32 MW=5.3\n')
        f.write('LATITUD DEL EVENTO(GRADOS): 5.958\n')
        f.write('LONGITUD DEL EVENTO(GRADOS): -77.932\n')
        f.write('PROFUNDIDAD DEL EVENTO (Km): 20.5\n')
        f.write(f'CODIGO DE LA ESTACION: {codigo_estacion}\n')
        f.write(
            f'Estacion:{codigo_estacion} Geol:POR_IDENTIFICAR Topo:ONDULADA\n')
        f.write(f'LATITUD DE LA ESTACION (GRADOS): {latitud_estacion}\n')
        f.write(f'LONGITUD DE LA ESTACION (GRADOS): {longitud_estacion}\n')
        f.write('DISTANCIA EPICENTRAL: 65.466 km\n')
        f.write('DISTANCIA HIPOCENTRAL: 68.601 km\n')
        f.write(f'INTERVALO DE MUESTREO (SEGUNDOS): {muestreo}\n')
        f.write(f'NUMERO DE DATOS: {numero_puntos}\n')
        f.write(f'DURACION (SEGUNDOS): {duracion}\n')
        f.write('UNIDADES: cm/s^2\n')
        f.write('TIPO DE EQUIPO: EPISENSOR+Q330\n')
        f.write('ESCALA MAXIMA (G): 2\n')
        f.write('CORRECCION DE LINEA BASE: LINEA BASE NO REMOVIDA\n')
        f.write('TIPO DE DATOS: NO CORREGIDO\n')
        f.write('         EW                  VER                  NS\n')

        #Escribe uno a uno el dato de un array a una columnda del formato ascii

        for i in np.arange(lineas):

            f.write(
                f'    {EW[i]:11.8f}           {VER[i]:11.8f}         {NS[i]:11.8f} \n'
            )

        f.close()

        tf = time.time()

        self.tiempo_rutina = timedelta(seconds=tf - ti)
        '''
# instructions

import logging

logging.basicConfig(level=logging.INFO,
                    format="%(asctime)s\t%(name)s\t%(levelname)s\t%(message)s")

from obspy import UTCDateTime, Catalog
from collections import Counter
from obspy.clients.fdsn import Client
from eqcorrscan.utils.catalog_utils import filter_picks
from eqcorrscan import Tribe

# -access the geonet database
client = Client("http://service.geonet.org.nz")
t1 = UTCDateTime(2019, 12, 6)
t2 = t1 + 86400
# -if I just keep pushing out the date does this push out the length of time the templates will match against?
detection_endtime = UTCDateTime(2019, 12, 8)

# -Gets specific earthquake events that fit within the given specifications.
# -Each earthquake has a bunch of associated stations and each station has a bunch of channels each channel has a p and s wave pick for the earthquake
# -What is the "WARNING Data for KUZ.HHN is 3.783291666666667 hours long, which is less than 80 percent of the desired length, will not use"?
# Surely each event stored in the catalog is only a few seconds long so having 3 hours of data is still significant
# -"TypeError: The parameter 'includearrivals' is not supported by the service."? this ok without it?
catalog = client.get_events(starttime=t1,
                            endtime=t2,
                            minmagnitude=2.5,
                            minlatitude=-38.0,
                            maxlatitude=-37.0,
                            minlongitude=177.0,
Beispiel #12
0
协议的网络服务器,功能强大,基本上可以满足科研需求,
比如下载波形数据、地震目录和台站元数据等。

.. warning:: 

    由于数据中心和 Web 服务可能会有变化,所以本节的相关示例未来可能会失效,
    当出现这种情况的时候请自行参阅最新文档。

"""

#%%
# 第一步,初始化 client 对象,总是需要执行这一步:

from obspy.clients.fdsn import Client

client = Client("IRIS")  # 这里既可以用数据中心的简写,也可以用 URL 地址

#%%
# 显示当前可用的数据中心列表:

from obspy.clients.fdsn.header import URL_MAPPINGS
for key in sorted(URL_MAPPINGS.keys()):
    print("{0:<7} {1}".format(key, URL_MAPPINGS[key]))

#%%
# 下载波形数据
# ---------------
#
# 通过 :meth:`get_waveforms() <obspy.clients.fdsn.client.Client.get_waveforms>` 方法
# 从服务器下载波形数据时,可以添加关键字参数自定义申请数据。
#
from obspy import UTCDateTime
from obspy.core import AttribDict
from obspy.io.sac import SACTrace
from obspy.geodetics import gps2dist_azimuth, locations2degrees
import numpy as np
import os
from datetime import datetime
import calendar
import urllib

# %% codecell
if not os.path.exists(search_dir):
    os.makedirs(search_dir)
    
# LOAD CLIENT
client = Client(webservice)
print(client)
# %% codecell
# LOAD EVENT CATALOGUE
t1 = UTCDateTime(tstart)
t2 = UTCDateTime(tend)
if isCMT_params==1 :
    # Load events from GCMT catalogue using IRIS SPUD
    url_query = 'http://ds.iris.edu/spudservice/momenttensor/ids?' \
               +'evtstartdate='+t1.strftime('%Y-%m-%dT%H:%M:%S') \
               +'&evtenddate='+t2.strftime('%Y-%m-%dT%H:%M:%S') \
               +'&evtminmag='+str(minmagnitude)
    evids = urllib.request.urlopen(url_query)
    events_str = '&'.join([line.decode("utf-8").replace("\n", "") for line in evids])+'&'
    url_ndk = 'http://ds.iris.edu/spudservice/momenttensor/bundleids/ndk?'+events_str
    cat_evts = obspy.read_events(url_ndk)
Beispiel #14
0
def fetch_rf_data(network, location, channel, data_directory, output_units,
                  minimum_magnitude, maximum_magnitude, station):

    # Track execution time for logging purposes
    t1 = time.time()

    ntwk = network
    stat = station
    loc = location
    chan = channel

    # Define the client that hosts the desired data
    client = Client("IRIS")

    # Define directory where seismic data will be saved as SAC files
    if output_units == 'counts':
        sac_dir = data_directory + ntwk + '/' + stat + '/' + loc + '/RFQUAKES_COUNTS/'
    elif output_units == 'displacement':
        sac_dir = data_directory + ntwk + '/' + stat + '/' + loc + '/RFQUAKES_DISP/'
    elif output_units == 'velocity':
        sac_dir = data_directory + ntwk + '/' + stat + '/' + loc + '/RFQUAKES_VEL/'
    elif output_units == 'acceleration':
        sac_dir = data_directory + ntwk + '/' + stat + '/' + loc + '/RFQUAKES_ACC/'
    else:
        print(
            'ERROR: Invalid output units. Acceptable options are \'displacement,\' \'velocity,\' or \'counts\''
        )
        quit()

    # For now: delete the directory if it exists...
    if os.path.exists(sac_dir):
        print('Directory exists. Terminiating process...')
        quit()
        # shutil.rmtree(sac_dir)

    if not os.path.exists(sac_dir):
        os.makedirs(sac_dir)

    # Define amount of data desired (minutes)
    duration = 60

    # Log potential errors to a .log file
    logFileName = sac_dir + ntwk + '.' + stat + '.log'

    # Fetch station information for data retrieval
    if loc == "NULL":
        loc = ""
        try:
            inv = client.get_stations(network=ntwk,
                                      station=stat,
                                      channel=chan,
                                      level="response")
        except Exception as error:
            with open(logFileName, "a") as log:
                log.write(str(error))
                log.write(
                    'Error fetching station information with the IRIS client...'
                )
            return
    else:
        try:
            inv = client.get_stations(network=ntwk,
                                      station=stat,
                                      loc=loc,
                                      channel=chan,
                                      level="response")
        except Exception as error:
            with open(logFileName, "a") as log:
                log.write(str(error))
                log.write(
                    'Error fetching station information with the IRIS client...'
                )
            return

    # Save the pole zero files
    nstats = len(inv.networks[0])
    resp_t0 = []
    resp_tf = []
    pre_filt = []
    for i in range(0, nstats):
        nresp = len(inv.networks[0].stations[i].channels)
        # Tag the PZ files and SAC files with a number indicating the period of operation
        for j in range(0, nresp):
            fileName = sac_dir + "SAC_PZs_" + ntwk + '_' + stat + '_' + inv.networks[0].stations[i].channels[j].code + \
                       '.' + str(j)
            with open(fileName, "a") as pzFile:
                pzFile.write('* **********************************\n')
                pzFile.write('* NETWORK   (KNETWK): ' + inv.networks[0].code +
                             '\n')
                pzFile.write('* STATION    (KSTNM): ' +
                             inv.networks[0].stations[i].code + '\n')
                pzFile.write(
                    '* LOCATION   (KHOLE): ' +
                    inv.networks[0].stations[i].channels[j].location_code +
                    '\n')
                pzFile.write('* CHANNEL   (KCMPNM): ' +
                             inv.networks[0].stations[i].channels[j].code +
                             '\n')
                pzFile.write('* CREATED           : ' +
                             str(UTCDateTime.now()).split('.')[0] + '\n')
                pzFile.write('* START             : ' +
                             str(inv.networks[0].stations[i].channels[j].
                                 start_date).split('.')[0] + '\n')
                pzFile.write('* END               : ' +
                             str(inv.networks[0].stations[i].channels[j].
                                 end_date).split('.')[0] + '\n')
                pzFile.write('* DESCRIPTION       : ' +
                             inv.networks[0].stations[i].site.name + '\n')
                pzFile.write('* LATITUDE          : %0.6f\n' %
                             inv.networks[0].stations[i].latitude)
                pzFile.write('* LONGITUDE         : %0.6f\n' %
                             inv.networks[0].stations[i].longitude)
                pzFile.write('* ELEVATION         : %0.1f\n' %
                             inv.networks[0].stations[i].channels[j].elevation)
                pzFile.write('* DEPTH             : %0.1f\n' %
                             inv.networks[0].stations[i].channels[j].depth)
                pzFile.write(
                    '* DIP               : %0.1f\n' %
                    (90.0 -
                     np.abs(inv.networks[0].stations[i].channels[j].dip)))
                pzFile.write('* AZIMUTH           : %0.1f\n' %
                             inv.networks[0].stations[i].channels[j].azimuth)
                pzFile.write(
                    '* SAMPLE RATE       : %0.1f\n' %
                    inv.networks[0].stations[i].channels[j].sample_rate)
                pzFile.write('* INPUT UNIT        : M\n')
                pzFile.write('* OUTPUT UNIT       : COUNTS\n')
                pzFile.write('* INSTTYPE          : ' + inv.networks[0].
                             stations[i].channels[j].sensor.description + '\n')
                pzFile.write('* INSTGAIN          : %e (M/S)\n' %
                             inv.networks[0].stations[i].channels[j].response.
                             get_paz().stage_gain)
                pzFile.write('* COMMENT           : \n')
                pzFile.write('* SENSITIVITY       : %e (M/S)\n' %
                             inv.networks[0].stations[i].channels[j].response.
                             instrument_sensitivity.value)
                pzFile.write('* A0                : %e\n' %
                             inv.networks[0].stations[i].channels[j].response.
                             get_paz().normalization_factor)
                pzFile.write('* **********************************\n')

                # Save the poles, zeros, and constant
                nzeros = 3
                zeros = inv.networks[0].stations[i].channels[
                    j].response.get_paz().zeros
                nz = np.nonzero(zeros)
                pzFile.write('ZEROS   ' + str(len(nz[0]) + nzeros) + '\n')
                pzFile.write("        %+e   %+e\n" % (0, 0))
                pzFile.write("        %+e   %+e\n" % (0, 0))
                pzFile.write("        %+e   %+e\n" % (0, 0))
                if len(nz[0]) != 0:
                    for k in range(0, len(nz[0])):
                        pzFile.write("        %+e   %+e\n" % (np.real(
                            zeros[nz[0][k]]), np.imag(zeros[nz[0][k]])))

                poles = inv.networks[0].stations[i].channels[
                    j].response.get_paz().poles
                pzFile.write('POLES   ' + str(len(poles)) + '\n')
                for k in range(0, len(poles)):
                    pzFile.write(
                        "        %+e   %+e\n" %
                        (np.real(inv.networks[0].stations[i].channels[j].
                                 response.get_paz().poles[k]),
                         np.imag(inv.networks[0].stations[i].channels[j].
                                 response.get_paz().poles[k])))

                pzFile.write(
                    'CONSTANT        %e' %
                    (inv.networks[0].stations[i].channels[j].response.get_paz(
                    ).normalization_factor * inv.networks[0].stations[i].
                     channels[j].response.instrument_sensitivity.value))
                # pzFile.write(inv.networks[0].stations[i].channels[j].response.get_sacpz())

    # Loop over time-periods during which the station was operational and fetch data
    for i in range(0, nstats):
        for j in range(0, nresp):
            if inv.networks[0].stations[i].channels[
                    j].end_date > UTCDateTime.now():
                t0 = inv.networks[0].stations[i].channels[j].start_date
                tf = UTCDateTime.now()
            else:
                t0 = inv.networks[0].stations[i].channels[j].start_date
                tf = inv.networks[0].stations[i].channels[j].end_date
            # Get station coordinates for event selection
            stla = inv.networks[0].stations[i].latitude
            stlo = inv.networks[0].stations[i].longitude
            # Fetch relevant events in time-window during which station was operational
            try:
                catalog = client.get_events(starttime=t0,
                                            endtime=tf,
                                            minmagnitude=minimum_magnitude,
                                            maxmagnitude=maximum_magnitude,
                                            latitude=stla,
                                            longitude=stlo,
                                            minradius=30,
                                            maxradius=90)
            except Exception as error:
                with open(logFileName, "a") as log:
                    log.write(str(error))
                    log.write('Error fetching event catalog...')
                continue

            nEvents = len(catalog.events)
            # Initialize list of events used for bulk request
            bulk = []
            # Fill 'bulk' with desired event information
            for k in range(0, nEvents):
                teq = catalog.events[k].origins[0].time
                chan = inv.networks[0].stations[i].channels[j].code
                bulk.append((ntwk, stat, loc, chan, teq, teq + duration * 60))

            # Fetch the data!
            if output_units == 'counts':
                try:
                    st = client.get_waveforms_bulk(bulk)
                except Exception as error:
                    with open(logFileName, "a") as log:
                        log.write(str(error))
                        log.write('Unable to complete fetch request for: ' +
                                  stat + '.' + loc + '.' + chan)
                    continue
            else:
                try:
                    st = client.get_waveforms_bulk(bulk, attach_response=True)
                except Exception as error:
                    with open(logFileName, "a") as log:
                        log.write(str(error))
                        log.write('Unable to complete fetch request for: ' +
                                  stat + '.' + loc + '.' + chan)
                    continue

            # Do some file-formatting and optional minor pre-processing
            for k in range(0, len(st)):
                teq = st[k].meta.starttime

                # Optional instrument response removal goes here...

                # Prepare filename for saving
                evchan = st[k].meta.channel
                evid = st[k].meta.starttime.isoformat().replace(
                    '-', '.').replace('T', '.').replace(':',
                                                        '.').split('.')[:-1]
                evid.extend([ntwk, stat, loc, evchan, str(j), 'SAC'])
                evid = ".".join(evid)
                # Add station specific metadata to SAC files
                st[k].stats.sac = {}
                st[k].stats.sac.stla = stla
                st[k].stats.sac.stlo = stlo
                # Channel orientation (CMPAZ)
                azid = [ntwk, stat, loc, evchan]
                azid = ".".join(azid)
                st[k].stats.sac.cmpaz = inv.get_orientation(azid,
                                                            teq)["azimuth"]

                # Add event-specific metadata to SAC files (surely there must be a faster way to do this...?)
                for l in range(0, nEvents):
                    if catalog.events[l].origins[0].time - 5 <= st[k].meta.starttime <= \
                            catalog.events[l].origins[0].time + 5:
                        st[k].stats.sac.evla = catalog.events[l].origins[
                            0].latitude
                        if st[k].stats.sac.evla is None:
                            with open(logFileName, "a") as log:
                                log.write('Couldn'
                                          't find event latitude for: ' +
                                          evid + '\n')
                            st[k].stats.sac.evla = 0.0
                        st[k].stats.sac.evlo = catalog.events[l].origins[
                            0].longitude
                        if st[k].stats.sac.evlo is None:
                            with open(logFileName, "a") as log:
                                log.write('Couldn'
                                          't find event longitude for: ' +
                                          evid + '\n')
                            st[k].stats.sac.evlo = 0.0
                        st[k].stats.sac.evdp = catalog.events[l].origins[
                            0].depth
                        if st[k].stats.sac.evdp is None:
                            with open(logFileName, "a") as log:
                                log.write('Couldn'
                                          't find event depth for: ' + evid +
                                          '\n')
                            st[k].stats.sac.evdp = 0.0
                        st[k].stats.sac.mag = catalog.events[l].magnitudes[
                            0].mag
                        if st[k].stats.sac.mag is None:
                            with open(logFileName, "a") as log:
                                log.write('Couldn'
                                          't find event magnitude for: ' +
                                          evid + '\n')
                            st[k].stats.sac.mag = 0.0
                        # Calculate great circle distance and back-azimuth
                        gcarc, baz = su.haversine(stla, stlo,
                                                  st[k].stats.sac.evla,
                                                  st[k].stats.sac.evlo)
                        st[k].stats.sac.gcarc = gcarc
                        st[k].stats.sac.baz = baz
                        # Get theoretical P arrival time, and assign to header 'T0'
                        model = TauPyModel(model="iasp91")
                        phases = ["P"]
                        arrivals = model.get_travel_times(
                            source_depth_in_km=st[k].stats.sac.evdp / 1000.0,
                            distance_in_degree=gcarc,
                            phase_list=phases)
                        st[k].stats.sac.t0 = arrivals[0].time

                        # Save the Pole Zero file index in 'USER0' Header
                        st[k].stats.sac.user0 = j

                        # Save the P-wave ray parameter in 'USER9' Header
                        st[k].stats.sac.user9 = arrivals[0].ray_param * (
                            np.pi / 180)

                        # Write the data to a SAC file
                        st[k].write(sac_dir + evid, format='SAC')

    elapsed = time.time() - t1
    with open(logFileName, "a") as log:
        log.write('Time required to complete fetch request: ' + str(elapsed))
Beispiel #15
0
def check_stations(maxradius, SEARCH_TYPE, search_netstat, excl_net, excl_stat,
                   lat, lon, Year, Month, Day, Hour, Minute, Second):

    sta_lat = lat
    sta_lon = lon
    maxradius = maxradius / 111  # conversion from kn to degrees
    start_1 = (str(Year) + '-' + str(Month) + '-' + str(Day) + '-' +
               str(Hour) + '-' + str(Minute) + '-' + str(Second))
    UTCDateTime.DEFAULT_PRECISION = 0
    t0 = UTCDateTime(start_1)

    #################################
    # if only stations in Ireland
    if SEARCH_TYPE == 1:
        sta_lat = 53.522
        sta_lon = -8.744
        maxradius = 2.36

    ##############################
    # load inventories
    inv = Inventory(networks=[])

    if SEARCH_TYPE != 2:  # if no specific stations are specified
        # Raspberry Shake Network
        client = Client(base_url='https://fdsnws.raspberryshakedata.com/')
        try:
            inv += client.get_stations(longitude=sta_lon,
                                       latitude=sta_lat,
                                       maxradius=maxradius,
                                       starttime=t0,
                                       endtime=t0 + 60,
                                       channel="EHZ,SHZ",
                                       level="channel")
        except:
            print("No Raspberry Shake station data found")
            print('')
        # BGS network
        client = Client('http://eida.bgs.ac.uk')
        try:
            inv += client.get_stations(longitude=sta_lon,
                                       latitude=sta_lat,
                                       maxradius=maxradius,
                                       starttime=t0,
                                       endtime=t0 + 60,
                                       channel="HHZ",
                                       level="channel")
        except:
            print("No GB station data found")
            print('')
        # networks provided by GFZ service
        client = Client('GFZ')
        try:
            inv += client.get_stations(longitude=sta_lon,
                                       latitude=sta_lat,
                                       maxradius=maxradius,
                                       starttime=t0,
                                       endtime=t0 + 60,
                                       channel="HHZ",
                                       level="channel")
        except:
            print("No GFZ station data found")
            print('')

    if SEARCH_TYPE == 2:  # if specific stations are specified
        for stat in search_netstat:
            net = stat.split(".")[0]
            station = stat.split(".")[1]
            # Raspberry Shake Network
            if net == 'AM':
                client = Client(
                    base_url='https://fdsnws.raspberryshakedata.com/')
                try:
                    inv += client.get_stations(network=net,
                                               station=station,
                                               starttime=t0,
                                               endtime=t0 + 60,
                                               channel="EHZ,SHZ",
                                               level="channel")
                except:
                    print("No Raspberry Shake station data found")
                    print('')
            # BGS network
            if net == 'GB':
                client = Client('http://eida.bgs.ac.uk')
                try:
                    inv += client.get_stations(network=net,
                                               station=station,
                                               starttime=t0,
                                               endtime=t0 + 60,
                                               channel="HHZ",
                                               level="channel")
                except:
                    print("No GB station data found")
                    print('')
            # networks provided by GFZ service
            if net != 'AM' and net != 'GB':
                client = Client('GFZ')
                try:
                    inv += client.get_stations(network=net,
                                               station=station,
                                               starttime=t0,
                                               endtime=t0 + 60,
                                               channel="HHZ",
                                               level="channel")
                except:
                    print("No GFZ station data found")
                    print('')

    ########################################
    # populate station entries nslc (network station location channel)
    nslc = []
    for network in inv:
        for station in network:
            for channel in station:
                nslc.extend([
                    network.code + "." + station.code + "." +
                    channel.location_code + "." + channel.code
                ])

    # remove multiple entries:
    nslc = list(dict.fromkeys(nslc))

    print(
        'found data for these stations in provided search area and time frame:'
    )
    print(nslc)

    ###################################
    # exclude certain networks/stations

    excl_netstat = []
    for item in nslc:
        for ex_n in excl_net:
            if re.search(ex_n + '.+',
                         item):  # The .+ symbol is used in place of * symbol
                excl_netstat.append(item)
    for item in excl_netstat:
        nslc.remove(item)

    excl_netstat = []
    for item in nslc:
        for ex_s in excl_stat:
            if re.search('.+' + ex_s + '.+',
                         item):  # The .+ symbol is used in place of * symbol
                excl_netstat.append(item)
    for item in excl_netstat:
        nslc.remove(item)

    print('')
    print('new list after excluding networks and/or stations:')
    print(nslc)
    #print(inv)

    return nslc, t0, inv
Beispiel #16
0
from __future__ import print_function
import datetime
from obspy import UTCDateTime
import timeit
from obspy.clients.fdsn import Client

###########  Here are the parameters to play around with ######
datacenter = "SCEDC"   #  IRIS, NCEDC, SCEDC
chanfile = "ShakeAlertList.SCEDCZ"
duration = 3726
starttime = datetime.datetime(2018,1,8,9,57,54)
bulk = 1     #  0 = serial downloading, 1 = bulkdownloading
##########

client = Client(datacenter)
endtime = starttime + datetime.timedelta(0,duration)

f1 = open(chanfile)
lines = f1.readlines()
f1.close()
T1 = UTCDateTime(starttime)
T2 = UTCDateTime(starttime + datetime.timedelta(0,duration))

if ( bulk == 0 ):
    for i in range(0,len(lines)):
        bulkrequest = []
        net = lines[i].split()[0]
        stat = lines[i].split()[1]
        loc = lines[i].split()[2]
        chan = lines[i].split()[3]
import pathlib

from obspy import UTCDateTime, Inventory
from obspy.clients.fdsn import Client

from quakemigrate.io import read_stations

# --- i/o paths ---
station_file = "./inputs/iceland_stations.txt"
data_path = pathlib.Path("./inputs/mSEED")
response_file = "./inputs/Z7_dataless.xml"

# --- Set network code & client ---
network = "Z7"
datacentre = "IRIS"
client = Client(datacentre)

# --- Set time period over which download data ---
starttime = UTCDateTime("2014-236T00:00:00")
endtime = UTCDateTime("2014-236T00:15:00")

#  --- Read in station file ---
stations = read_stations(station_file)

# --- Download instrument response inventory ---
inv = Inventory()
for station in stations["Name"]:
    inv += client.get_stations(network=network,
                               station=station,
                               starttime=starttime,
                               endtime=endtime,
Beispiel #18
0
def main():

    # Run the Input Parser
    args = arguments.get_dl_calc_arguments()

    # Load Database
    db, stkeys = stdb.io.load_db(fname=args.indb, keys=args.stkeys)

    # Loop over station keys
    for stkey in list(stkeys):

        sta = db[stkey]

        # Output directory
        outdir = Path(args.saveloc) / Path(stkey.upper())
        if not outdir.exists():
            outdir.mkdir(parents=True)

        # Establish client for catalogue
        if args.verb > 1:
            print("   Establishing Catalogue Client...")
        cat_client = Client(args.cat_client)
        if args.verb > 1:
            print("      Done")

        # Establish client for waveforms
        if args.verb > 1:
            print("   Establishing Waveform Client...")
        if len(args.UserAuth) == 0:
            wf_client = Client(args.wf_client)
        else:
            wf_client = Client(args.wf_client,
                               user=args.UserAuth[0],
                               password=args.UserAuth[1])
        if args.verb > 1:
            print("      Done")
            print(" ")

        # Get catalogue search start time
        if args.startT is None:
            tstart = sta.startdate
        else:
            tstart = args.startT

        # Get catalogue search end time
        if args.endT is None:
            tend = sta.enddate
        else:
            tend = args.endT
        if tstart > sta.enddate or tend < sta.startdate:
            continue

        # Temporary print locations
        tlocs = sta.location
        if len(tlocs) == 0:
            tlocs = ['']
        for il in range(0, len(tlocs)):
            if len(tlocs[il]) == 0:
                tlocs[il] = "--"
        sta.location = tlocs

        # Update Display
        if args.verb > 1:
            print("|==============================================|")
            print("|                   {0:>8s}                   |".format(
                sta.station))
            print("|==============================================|")
            print("|  Station: {0:>2s}.{1:5s}                           |".format(
                sta.network, sta.station))
            print("|      Channel: {0:2s}; Locations: {1:15s} |".format(
                sta.channel, ",".join(tlocs)))
            print("|      Lon: {0:7.2f}; Lat: {1:6.2f}               |".format(
                sta.longitude, sta.latitude))
            print("|      Start time: {0:19s}         |".format(
                sta.startdate.strftime("%Y-%m-%d %H:%M:%S")))
            print("|      End time:   {0:19s}         |".format(
                sta.enddate.strftime("%Y-%m-%d %H:%M:%S")))
            print("| Output Directory: ", args.saveloc)
            print("| Save Progress: ", args.constsave)
            print("|----------------------------------------------|")
            print("| Searching Possible events:                   |")
            print("|   Start: {0:19s}                 |".format(
                tstart.strftime("%Y-%m-%d %H:%M:%S")))
            print("|   End:   {0:19s}                 |".format(
                tend.strftime("%Y-%m-%d %H:%M:%S")))
            print("|   Mag:   >{0:3.1f}", format(args.minmag) +
                  "                       |")
            print("|   Min Distance:  {0:.1f}".format(args.mindist))
            print("|   Max Distance:  {0:.1f}".format(args.maxdist))
            print("|   Max Depth:     {0:.1f}".format(args.maxdep))

        # Retrieve Event Catalogue
        if args.verb > 1:
            print("|   Request Event Catalogue...                 |")
            print("| ...                                          |")

        try:
            cat = cat_client.get_events(starttime=tstart, endtime=tend,
                                        minmagnitude=args.minmag)

            # get index of repeat events, save for later
            reps = np.unique(utils.catclean(cat))

        except:
            raise(Exception("  Fatal Error: Cannot download Catalogue"))

        if args.verb > 1:
            print("|   Retrieved {0} events ".format(len(cat.events)))
            print()

        for i, ev in enumerate(cat):

            if i in reps:
                continue

            # Initialize BNGData object with station info
            dldata = DL(sta)

            # Add event to object
            accept = dldata.add_event(
                ev, gacmin=args.mindist, gacmax=args.maxdist,
                depmax=args.maxdep, returned=True)

            # Define time stamp
            yr = str(dldata.meta.time.year).zfill(4)
            jd = str(dldata.meta.time.julday).zfill(3)
            hr = str(dldata.meta.time.hour).zfill(2)

            # If event is accepted (data exists)
            if accept:

                # Display Event Info
                print(" ")
                print("**************************************************")
                print("* ({0:d}/{1:d}):  {2:13s} {3}".format(
                    i+1, len(cat), dldata.meta.time.strftime(
                        "%Y%m%d_%H%M%S"), stkey))
                if args.verb > 1:
                    print("*   Origin Time: " +
                          dldata.meta.time.strftime("%Y-%m-%d %H:%M:%S"))
                    print(
                        "*   Lat: {0:6.2f};        Lon: {1:7.2f}".format(
                            dldata.meta.lat, dldata.meta.lon))
                    print(
                        "*   Dep: {0:6.2f} km;     Mag: {1:3.1f}".format(
                            dldata.meta.dep, dldata.meta.mag))
                    print("*   Dist: {0:7.2f} km;".format(dldata.meta.epi_dist) +
                          "   Epi dist: {0:6.2f} deg\n".format(dldata.meta.gac) +
                          "*   Baz:  {0:6.2f} deg;".format(dldata.meta.baz) +
                          "   Az: {0:6.2f} deg".format(dldata.meta.az))

                # Event Folder
                timekey = dldata.meta.time.strftime("%Y%m%d_%H%M%S")
                evtdir = outdir / timekey
                evtdata = evtdir / 'Raw_data.pkl'
                evtmeta = evtdir / 'Meta_data.pkl'

                # Check if DL data already exist and overwrite has been set
                if evtdir.exists():
                    if evtdata.exists():
                        if not args.ovr:
                            continue

                # Get data
                t1 = 0.
                t2 = 4.*60.*60.
                has_data = dldata.download_data(
                    client=wf_client, stdata=args.localdata,
                    ndval=args.ndval, new_sr=2., t1=t1, t2=t2,
                    returned=True, verbose=args.verb)

                if not has_data:
                    continue

                # Check data length
                if utils.checklen(dldata.data, 4.*60.*60.):
                    print("      Error: Length Incorrect")
                    continue

                # Create Folder if it doesn't exist
                if not evtdir.exists():
                    evtdir.mkdir(parents=True)

                # Save raw Traces
                pickle.dump(dldata.data, open(evtdata, "wb"))

                # Calculate DL orientation
                dldata.calc(showplot=False)

                if args.verb > 1:
                    print("* R1PHI: {}".format(dldata.meta.R1phi))
                    print("* R2PHI: {}".format(dldata.meta.R2phi))
                    print("* R1CC: {}".format(dldata.meta.R1cc))
                    print("* R2CC: {}".format(dldata.meta.R2cc))

                # Save event meta data
                pickle.dump(dldata.meta, open(evtmeta, "wb"))
Beispiel #19
0
        plt.xlim((min(times), max(times)))
        plt.ylim((-1,1))
        plt.ylabel('Correlation')
        plt.xlabel('Time (year)')
    
    ax = plt.subplot(3,1,3)
    handles, labels = ax.get_legend_handles_labels()
    leg = fig.legend(handles, labels, loc = 'lower center', ncol = 5, fontsize = 15)
    plt.savefig(net + '_' + sta + '_summary.png', format='PNG')
    #plt.show()
    plt.clf()
    plt.close()
    
    return
    
client = Client()
stime = UTCDateTime('2017-001T00:00:00')
etime = UTCDateTime('2019-196T00:01:00')

net = 'N4'
inv = plot_utils.get_dataless(net)
#comp ='Z'


dicZ = plot_utils.get_dic(net, 'Z')
dicR = plot_utils.get_dic(net, 'R')


stas = []
for nets in inv:
    if nets.code == net:
Beispiel #20
0
def main():
    '''
    Main routine to collect commandline argumnts and to make fdsn client request
    '''
    parser = argparse.ArgumentParser(
        prog=progname,
        formatter_class=argparse.ArgumentDefaultsHelpFormatter,
        description='''
            Grab station metadata from IRIS FDSN server for stations that are: 1) within a
            certain search radius of a given lat,lon, and 2) operating during a
            particular time frame. It outputs two files: a CSV file
            and a StationXML file.
            ''',
        epilog='''

            ''')

    parser.add_argument(
        "-b",
        "--begin",
        type=str,
        required=True,
        help="Start time in iso-format e.g. 2019001T00:00. Station \
            only operating between begin/endtime will be returned")

    parser.add_argument("-e",
                        "--end",
                        type=str,
                        required=True,
                        help="End time in iso-format e.g. 2019001T00:00")

    parser.add_argument(
        "-n",
        "--net",
        type=str,
        default=None,
        required=False,
        help=
        "Net code. Used to narrow search results. Used to narrow search results "
    )

    parser.add_argument("-c",
                        "--chan",
                        type=str,
                        default=None,
                        required=False,
                        help="""Chan codes, defaults to all available. E.g.
            ``BH?,HH?,*H*``. Used to narrow search results """)

    parser.add_argument("-r",
                        "--resp",
                        action="store_true",
                        default=False,
                        required=False,
                        help="Set to include response in StationXML file.")

    parser.add_argument(
        "--lon",
        type=float,
        required=True,
        help="Center longitude for search radius. Decimal degrees.")

    parser.add_argument(
        "--lat",
        type=float,
        required=True,
        help="Center latitude for search radius. Decimal degrees.")

    parser.add_argument("--radmin",
                        type=float,
                        required=True,
                        help="Minimum search radius in km (>0)")

    parser.add_argument("--radmax",
                        type=float,
                        required=True,
                        help="Maximum search radius in km (>radmin)")

    parser.add_argument(
        "-o",
        "--output",
        type=str,
        required=False,
        default="sta_info.csv",
        help=
        "Output filename for CSV. CSV suffix is replaced with .staxml for StationXML format"
    )

    parser.add_argument(
        "-v",
        "--verbose",
        action="count",
        default=0,
        help="increase debug spewage spewage (e.g. -v, -vv, -vvv)")

    parser.add_argument(
        '--version',
        action='version',
        version='%(prog)s {version}'.format(version=__version__))

    args = parser.parse_args()

    startt = UTCDateTime(args.begin)
    endt = UTCDateTime(args.end)
    net = args.net
    chan = args.chan
    do_resp = args.resp
    lon = args.lon
    lat = args.lat
    radmin = args.radmin / 111.195
    radmax = args.radmax / 111.195
    output = args.output
    debug = args.verbose

    if do_resp:
        level = 'response'
    else:
        level = 'channel'

    if debug > 0:
        print('Command line arguments....')
        print("begin: ", startt)
        print("end: ", endt)
        print("net: ", net)
        print("chan: ", chan)
        print("lon:", lon)
        print("lat: ", lat)
        print("radmin: ", radmin)
        print("radmax: ", radmax)
        print("level: ", level)

    client = Client(timeout=240, base_url="http://service.iris.edu")
    try:
        inv = client.get_stations(starttime=startt,
                                  endtime=endt,
                                  network=net,
                                  channel=chan,
                                  latitude=lat,
                                  longitude=lon,
                                  minradius=radmin,
                                  maxradius=radmax,
                                  level=level)
        dump_output(inv, output, debug, lon, lat)
    except Exception as e:
        print(e)
    df[['networkcode','Times','TrInfo']] = df['Trace'].str.split('|',expand=True)
    df[['StartTime','EndTime']] = df['Times'].str.split(' - ',expand=True)

    return df

if __name__=='__main__':
  
  filename = 'EQ_ANALYSED_DATAFRAME_LIB.csv'

  df = read_EQ_df(filename)
  # Creating Residual magnitude column
  df['ML BGS - ML Station Magnitude'] = df["BGS Magnitude"] - df["Station Magnitude"]

  pre_filt = (0.01, 0.02, 35, 45)
  client = Client('http://fdsnws.raspberryshakedata.com')

  

#==================================================================================
  # This allows the user to specifically targer one waveform to look at it's features
  # x is the index in the CSV file.
  x = 5
  
  stat = df.iloc[x,9]
  station = stat[3:8]
    
  
  StartTime = df.iloc[x,12]
  StartTime = UTCDateTime(StartTime)
  amp = df.iloc[x,6]
Beispiel #22
0
    def retrieveData(self):
        """Retrieve data from many FDSN services, turn into StreamCollection.

        Args:
            event (dict):
                Best dictionary matching input event, fields as above
                in return of getMatchingEvents().

        Returns:
            StreamCollection: StreamCollection object.
        """
        # Bail out if FDSNFetcher not configured
        if 'FDSNFetcher' not in self.config['fetchers']:
            return
        rawdir = self.rawdir
        if self.rawdir is None:
            rawdir = tempfile.mkdtemp()
        else:
            if not os.path.isdir(rawdir):
                os.makedirs(rawdir)

        # use the mass downloader to retrieve data of interest from any FSDN
        # service.
        origin_time = UTCDateTime(self.time)

        # The Obspy mass downloader has it's own logger - grab that stream
        # and write it to our own log file
        ldict = logging.Logger.manager.loggerDict
        if OBSPY_LOGGER in ldict:
            root = logging.getLogger()
            fhandler = root.handlers[0]
            obspy_logger = logging.getLogger(OBSPY_LOGGER)
            obspy_stream_handler = obspy_logger.handlers[0]
            obspy_logger.removeHandler(obspy_stream_handler)
            obspy_logger.addHandler(fhandler)

        # Circular domain around the epicenter.
        domain = CircularDomain(latitude=self.lat,
                                longitude=self.lon,
                                minradius=0,
                                maxradius=self.radius)

        min_dist = self.minimum_interstation_distance_in_m
        restrictions = Restrictions(
            # Define the temporal bounds of the waveform data.
            starttime=origin_time - self.time_before,
            endtime=origin_time + self.time_after,
            network=self.network,
            station='*',
            location='*',
            location_priorities=['*'],
            reject_channels_with_gaps=self.reject_channels_with_gaps,
            # Any trace that is shorter than 95 % of the
            # desired total duration will be discarded.
            minimum_length=self.minimum_length,
            sanitize=self.sanitize,
            minimum_interstation_distance_in_m=min_dist,
            exclude_networks=self.exclude_networks,
            exclude_stations=self.exclude_stations,
            channel_priorities=self.channels)

        # For each of the providers, check if we have a username and password
        # provided in the config. If we do, initialize the client with the
        # username and password. Otherwise, use default initalization.
        client_list = []
        for provider_str in URL_MAPPINGS.keys():
            if provider_str == GEO_NET_ARCHIVE_KEY:
                dt = UTCDateTime.utcnow() - UTCDateTime(self.time)
                if dt < GEONET_ARCHIVE_DAYS:
                    provider_str = GEONET_REALTIME_URL
            try:
                fdsn_config = self.config['fetchers']['FDSNFetcher']
                if provider_str in fdsn_config:
                    client = Client(
                        provider_str,
                        user=fdsn_config[provider_str]['user'],
                        password=fdsn_config[provider_str]['password'])
                else:
                    client = Client(provider_str)
                client_list.append(client)
            # If the FDSN service is down, then an FDSNException is raised
            except FDSNException:
                logging.warning('Unable to initalize client %s' % provider_str)
            except KeyError:
                logging.warning('Unable to initalize client %s' % provider_str)

        if len(client_list):
            # Pass off the initalized clients to the Mass Downloader
            mdl = MassDownloader(providers=client_list)

            logging.info('Downloading new MiniSEED files...')
            # The data will be downloaded to the ``./waveforms/`` and
            # ``./stations/`` folders with automatically chosen file names.
            mdl.download(domain,
                         restrictions,
                         mseed_storage=rawdir,
                         stationxml_storage=rawdir)

            seed_files = glob.glob(os.path.join(rawdir, '*.mseed'))
            streams = []
            for seed_file in seed_files:
                try:
                    tstreams = read_obspy(seed_file, self.config)
                except BaseException as e:
                    tstreams = None
                    fmt = 'Could not read seed file %s - "%s"'
                    logging.info(fmt % (seed_file, str(e)))
                if tstreams is None:
                    continue
                else:
                    streams += tstreams

            stream_collection = StreamCollection(
                streams=streams, drop_non_free=self.drop_non_free)
            return stream_collection
Beispiel #23
0
def get_events(lonlat,
               starttime,
               endtime,
               cat_file,
               distance_range,
               magnitude_range,
               early_exit=True):
    """Load event catalog (if available) or create event catalog from FDSN server.

    :param lonlat: (Longitude, latitude) of reference location for finding events
    :type lonlat: tuple(float, float)
    :param starttime: Start time of period in which to query events
    :type starttime: obspy.UTCDateTime or str in UTC datetime format
    :param endtime: End time of period in which to query events
    :type endtime: obspy.UTCDateTime or str in UTC datetime format
    :param cat_file: File containing event catalog, or file name in which to store event catalog
    :type cat_file: str or Path
    :param distance_range: Range of distances over which to query seismic events
    :type distance_range: tuple(float, float)
    :param magnitude_range: Range of event magnitudes over which to query seismic events.
    :type magnitude_range: tuple(float, float)
    :param early_exit: If True, exit as soon as new catalog has been generated, defaults to True
    :type early_exit: bool, optional
    :return: Event catalog
    :rtype: obspy.core.event.catalog.Catalog
    """
    log = logging.getLogger(__name__)

    # If file needs to be generated, then this function requires internet access.
    if os.path.exists(cat_file):
        # For HPC systems with no internet access, the catalog file must be pre-generated
        log.warning(
            "Loading catalog from file {} irrespective of command line options!!!"
            .format(cat_file))
        log.info("Using catalog file: {}".format(cat_file))
        catalog = read_events(cat_file)
    else:
        min_magnitude = magnitude_range[0]
        max_magnitude = magnitude_range[1]
        client = Client('ISC')
        kwargs = {
            'starttime': starttime,
            'endtime': endtime,
            'latitude': lonlat[1],
            'longitude': lonlat[0],
            'minradius': distance_range[0],
            'maxradius': distance_range[1],
            'minmagnitude': min_magnitude,
            'maxmagnitude': max_magnitude
        }

        log.info(
            "Following parameters will be used for earthquake event query:\n{}"
            .format(kwargs))
        catalog = client.get_events(**kwargs)
        log.info("Catalog loaded from FDSN server")

        log.info("Creating catalog file: {}".format(cat_file))
        catalog.write(cat_file, 'QUAKEML')

        if early_exit:
            print("Run this process again using qsub")
            exit(0)
        # end if
    # end if

    # Filter catalog before saving
    catalog = _filter_catalog_events(catalog)

    return catalog
Beispiel #24
0
from obspy import UTCDateTime
from obspy.clients.fdsn import Client
import os
from datetime import date, timedelta

directory = '/home/csmi310/msnoise/data/'
os.chdir(directory)

client = Client('GEONET')

today = UTCDateTime(date.today())
days = timedelta(days=1)

for d in range(-9, -8):
    start = UTCDateTime(today + d * days)
    end = start + 24 * 60 * 60
    year = start.strftime('%Y')
    print('Getting data for:', start)
    st = client.get_waveforms(network='NZ',
                              station='??AZ',
                              location='*',
                              channel='EHZ',
                              starttime=start,
                              endtime=end,
                              attach_response=True)

    pre_filt = (0.01, 0.05, 30.0, 35.0)
    st.remove_response(output='VEL', pre_filt=pre_filt, water_level=60)

    st.merge(fill_value=0)
    st.sort()
Beispiel #25
0
plt.xlabel('Frequency [Hz]')
plt.ylabel('Amplitude')

plt.show()
# -

# ### Time-frequency plots - spectrograms
#
# So far, we analysed the data only in either time- or frequency-domain. Sometimes it is helpful to look at both dimensions together. Such a time-frequency plot is called *spectrogram*. In cell 3, we first download and prepare data from the $M_w$ 9.1 Tohoku earthquake from 11 March 2011. In cell 4, we then create a spectrogram from these data.
#
# 7) What happens when you increase the number of sampe points $NFFT$ in cell 4 and why?
# 8) Zoom in to the start of the signal by changing $xstart$ and $xend$. Does a longer or shorter window length allow you to identify the start of the signal more easily? Why?
# 9) Now adapt the time limits to look at the surface waves, what do you observe?

# Cell 3: prepare data from Tohoku earthquake.
client = Client("BGR")
t1 = UTCDateTime("2011-03-11T05:00:00.000")
st = client.get_waveforms("GR",
                          "WET",
                          "",
                          "BHZ",
                          t1,
                          t1 + 6 * 60 * 60,
                          attach_response=True)
st.remove_response(output="VEL")
st.detrend('linear')
st.detrend('demean')
st.plot()

# +
# Cell 4 - spectrogram
Beispiel #26
0
distSta = df_setup.distStation[0]
distEv = df_setup.distEvent[0]
minMag = df_setup.minMag[0]
print('Search for stations and events within ',distSta,' and ',distEv,' km from coordinate (',lonCenter,',',latCenter,')')
print('Enforcing minimum magnitude of ',minMag)

#
#eventFileOut=('event_list_dist'+str(distEv)+'km.csv')
#stationFileOut=('station_list_dist'+str(distEv)+'km.csv')
eventFileOut=('event_list_subset.csv')
stationFileOut=('station_list_subset.csv')
print('Output files: ',eventFileOut,stationFileOut)

# Set up IRIS client
#fdsn_client = Client('https://service.iris.edu')
fdsn_client = Client('IRIS')

# do a bulk call for the need stations
networks = df_station.network.tolist()
stations = df_station.station.tolist()
locations = df_station.location.tolist()
channels = df_station.channel.tolist()

# Some location codes might need fixing in the station file
for idx, loc in enumerate(locations):
    if loc == '__':
        locations[idx] = '--'

# preallocate pick arrays
ev_time_file = []
p_pick_time = []
Beispiel #27
0
from nmpy.util.data_request import data_request
from obspy.clients.fdsn import Client
import obspy
import os

path = '/data/simons/3-comp-data'

client = Client('IRIS')

events = client.get_events(minmagnitude=8)
oldfolder = None
for event in events[::-1]:
    eventy = str(event.origins[0].time.year)
    eventm = str(event.origins[0].time.month)
    eventd = str(event.origins[0].time.day)
    eventh = str(event.origins[0].time.hour)
    newfolder = '/' + eventy + '-' + eventm + '-' + eventd
    if newfolder == oldfolder:
        newfolder = newfolder + 'B'
    oldfolder = newfolder

    newpath = path + newfolder
    print(newpath)
    os.mkdir(newpath)
    os.chdir(newpath)
    cat = obspy.core.event.Catalog()
    cat.append(event)
    data_request('IRIS',
                 cat=cat,
                 channels="VHE,VHN,VHZ",
                 savefile='station',
Beispiel #28
0
def plot(nslc, correct, t0, lat, lon, length, pretime, R_hypos, freqmin,
         freqmax):

    if (correct != 'counts' and correct != 'disp' and correct != 'vel'
            and correct != 'acc'):
        sys.exit(
            'No valid amplitude parameter specified, exiting program now.')

    no_st = len(nslc)
    plt.style.use("default")  # or in jupyter get grey background
    #   plt.style.use("classic") # or in jupyter get grey background
    plt.figure(figsize=(10, no_st * 2 + 1), dpi=75)

    if freqmin == 'none' and freqmax == 'none':
        filter = 'none'
    if freqmin != 'none' and freqmax == 'none':
        filter = 'HP'
    if freqmin == 'none' and freqmax != 'none':
        filter = 'LP'
    if freqmin != 'none' and freqmax != 'none':
        filter = 'BP'

    for i in range(no_st):
        str_nslc = str(nslc[i])
        net = str_nslc.split(".")[0]
        station = str_nslc.split(".")[1]
        location = str_nslc.split(".")[2]
        channel = str_nslc.split(".")[3]
        R_hypo = R_hypos[i]
        print(net, station, "{:.0f}".format(round(R_hypo, 2)), "km")

        if net != 'AM' and net != 'GB':
            #         client = Client('IRIS')
            client = Client('GFZ')
        elif net == 'AM':
            #         client = Client(base_url='https://fdsnws.raspberryshakedata.com/')
            client = Client('https://fdsnws.raspberryshakedata.com')
        elif net == 'GB':
            client = Client('http://eida.bgs.ac.uk')

        # 60 extra seconds in case of filtering...
        st = client.get_waveforms(net,
                                  station,
                                  location,
                                  channel,
                                  t0 - 60,
                                  t0 + length + 60,
                                  attach_response=True)

        # instrument correction
        if correct == 'disp':
            st.remove_response(output="DISP")
        if correct == 'vel':
            st.remove_response(output="VEL")
        if correct == 'acc':
            st.remove_response(output="ACC")

        if filter == 'HP':
            st.filter('highpass', freq=freqmin, corners=4, zerophase=True)
        if filter == 'LP':
            st.filter('lowpass', freq=freqmax, corners=4, zerophase=True)
        if filter == 'BP':
            st.filter('bandpass',
                      freqmin=freqmin,
                      freqmax=freqmax,
                      corners=4,
                      zerophase=True)

        #st.slice(t0, t0 + length)
        tr = st.slice(t0 - pretime, t0 + length)[0]
        if (correct == 'disp' or correct != 'vel' or correct != 'acc'):
            tr.data *= 1e6  # plot in units of micro

        t = np.arange(tr.stats.npts) / tr.stats.sampling_rate
        t = t - pretime
        #convert from seconds to minutes:
        t = t / 60.0
        i = i + 1
        plt.subplot(no_st, 1, i)
        x1 = plt.gca()

        plt.plot(t, tr.data, 'k', label='%s km' % (str(int(round(R_hypo)))))
        # add legend
        legend = x1.legend(loc='upper right')
        # remove line in legend + use small font
        leg = x1.legend(handlelength=0,
                        handletextpad=0,
                        fancybox=True,
                        fontsize='small')
        for item in leg.legendHandles:
            item.set_visible(False)
        plt.grid()
        # x- and y-limits
        (ymin, ymax) = plt.ylim()
        #ymax_abs = max(-ymin,ymax)
        #plt.ylim(-ymax_abs, ymax_abs)
        plt.xlim(-pretime / 60., (length - pretime) / 60.)

        if i < no_st:
            x1.axes.xaxis.set_ticklabels([])
        if i == no_st:
            plt.xlabel('time [minutes since origin time]')
        #plt.xlim((t0-60)/60, (t0 + length)/60)

        if correct == 'counts':
            plt.ylabel('amplitude [counts]')
        if correct == 'disp':
            plt.ylabel('amplitude [' + r'$\mu m}$' + ']')
        if correct == 'vel':
            plt.ylabel('amplitude [' + r'$\mu m/s$' + ']')
        if correct == 'acc':
            plt.ylabel('amplitude [' + r'$\mu m/s^{2}$' + ']')

        if filter == 'HP':
            str_filter = str(freqmin) + "Hz HP"
        if filter == 'LP':
            str_filter = str(freqmax) + "Hz LP"
        if filter == 'BP':
            str_filter = str(freqmin) + "Hz - " + str(freqmax) + "Hz"


#      plt.title('%s.%s.%s.%s    %s' %(net,station,location,channel,str_filter),loc='right')
        plt.title('%s.%s    %s' % (net, station, str_filter), loc='right')
        plt.plot((0, 0), (ymin, ymax), 'r-')  # time of earthquake
        #######################################################################################
        # plot titles
        if i == 1:
            plt.title('%s, %s, %s' %
                      (t0.strftime("%d.%m.%Y, %H:%M:%S"), lat, lon))

        place = ''
        if net == 'AM':
            place = 'Citizen Station (Rasp. Shake)'
        if station == 'DSB':
            place = 'Dublin, Ireland (DIAS)'
        if station == 'IDGL':
            place = 'Donegal, Ireland (DIAS)'
        if station == 'IGLA':
            place = 'Galway, Ireland (DIAS)'
        if station == 'ILTH':
            place = 'Louth, Ireland (DIAS)'
        if station == 'IWEX':
            place = 'Wexford, Ireland (DIAS)'
        if station == 'VAL':
            place = 'Kerry, Ireland (DIAS)'
        if station == 'LEWI':
            place = ' Isle of Lewis, Scotland (BGS)'
        if station == 'KPL':
            place = ' Plockton, Scotland (BGS)'
        if station == 'LAWE':
            place = ' Loch Awe, Scotland (BGS)'
        if station == 'PGB1':
            place = ' Glasgow, Scotland (BGS)'
        if station == 'CLGH':
            place = ' Antrim, Northern Ireland (BGS)'
        if station == 'NEWG':
            place = ' New Galloway, Scotland (BGS)'
        if station == 'GAL1':
            place = ' Galloway, Scotland (BGS)'
        if station == 'ESK':
            place = ' Eskdalemuir, Scotland (BGS)'
        if station == 'KESW':
            place = ' Keswick, England (BGS)'
        if station == 'IOMK':
            place = 'Isle of Man (BGS)'
        if station == 'WLF1':
            place = 'Anglesey, Wales (BGS)'
        if station == 'WPS':
            place = 'Anglesey, Wales (BGS)'
        if station == 'FOEL':
            place = ' Llangollen, Wales (BGS)'
        if station == 'HLM1':
            place = ' Shropshire, England (BGS)'
        if station == 'RSBS':
            place = 'Pembrokeshire, Wales (BGS)'
        if station == 'HTL':
            place = ' Devon, England (BGS)'
        if station == 'CCA1':
            place = ' Cornwall, England (BGS)'
        if station == 'JSA':
            place = ' Jersey, Channel Islands (BGS)'
        if station == 'ROSA':
            place = ' Azores, Portugal'
        plt.title('%s' % (place), loc='left')

    plt.tight_layout()
from obspy.clients.fdsn import Client
from obspy import UTCDateTime

client = Client("IRIS")

# save waveform
t = UTCDateTime("2018-03-01T00:00:00.000")
st = client.get_waveforms("TA", "121A", "--", "HHZ", t, t + 86400)
st.write("TA.121A.--.HHZ.mseed", format="MSEED")

# save inventory
inventory = client.get_stations(starttime=t,
                                endtime=t + 86400,
                                network="TA",
                                sta="121A",
                                loc="--",
                                channel="HHZ",
                                level="response")
inventory.write("TA.121A.xml", format="stationxml")

#st.plot()
Beispiel #30
0
    def test_download_urls_for_custom_mapping(self, download_url_mock):
        """
        Tests the downloading of data with custom mappings.
        """
        base_url = "http://example.com"

        # More extensive mock setup simulation service discovery.
        def custom_side_effects(*args, **kwargs):
            if "version" in args[0]:
                return 200, "1.0.200"
            elif "event" in args[0]:
                with open(
                        os.path.join(self.datapath,
                                     "2014-01-07_iris_event.wadl"),
                        "rb") as fh:
                    return 200, fh.read()
            elif "station" in args[0]:
                with open(
                        os.path.join(self.datapath,
                                     "2014-01-07_iris_station.wadl"),
                        "rb") as fh:
                    return 200, fh.read()
            elif "dataselect" in args[0]:
                with open(
                        os.path.join(self.datapath,
                                     "2014-01-07_iris_dataselect.wadl"),
                        "rb") as fh:
                    return 200, fh.read()
            return 404, None

        download_url_mock.side_effect = custom_side_effects

        # Some custom urls
        base_url_event = "http://example.com/beta/event_service/11"
        base_url_station = "http://example.org/beta2/station/7"
        base_url_ds = "http://example.edu/beta3/dataselect/8"

        # An exception will be raised if not actual WADLs are returned.
        # Catch warnings to avoid them being raised for the tests.
        with warnings.catch_warnings(record=True) as w:
            warnings.simplefilter("always")
            c = Client(base_url=base_url,
                       service_mappings={
                           "event": base_url_event,
                           "station": base_url_station,
                           "dataselect": base_url_ds,
                       })
        for warning in w:
            self.assertTrue("Could not parse" in str(warning)
                            or "cannot deal with" in str(warning))

        # Test the dataselect downloading.
        download_url_mock.reset_mock()
        download_url_mock.side_effect = None
        download_url_mock.return_value = 404, None
        try:
            c.get_waveforms("A", "B", "C", "D",
                            UTCDateTime() - 100, UTCDateTime())
        except:
            pass
        self.assertTrue(
            base_url_ds in download_url_mock.call_args_list[0][0][0])

        # Test the station downloading.
        download_url_mock.reset_mock()
        download_url_mock.side_effect = None
        download_url_mock.return_value = 404, None
        try:
            c.get_stations()
        except:
            pass
        self.assertTrue(
            base_url_station in download_url_mock.call_args_list[0][0][0])

        # Test the event downloading.
        download_url_mock.reset_mock()
        download_url_mock.side_effect = None
        download_url_mock.return_value = 404, None
        try:
            c.get_events()
        except:
            pass
        self.assertTrue(
            base_url_event in download_url_mock.call_args_list[0][0][0])