Ejemplo n.º 1
0
        day_ref_comp_sts[j].merge(method=1)
    for j, st in enumerate(night_deck_sts):
        night_deck_comp_sts.append(st.select(component=component))
        night_deck_comp_sts[j].merge(method=1)
    for j, st in enumerate(night_ref_sts):
        night_ref_comp_sts.append(st.select(component=component))
        night_ref_comp_sts[j].merge(method=1)

    day_ppsd_deck = PPSD(day_deck_comp_sts[0][0].stats,
                         TrillC,
                         ppsd_length=600.0)
    for st in day_deck_comp_sts:
        day_ppsd_deck.add(st)
    # plotfile = 'day_deck_ppsd_' + component + '.png'
    # day_ppsd_deck.plot(plotfile, show_coverage=False)
    (meanpd, meanpsd) = day_ppsd_deck.get_mean()
    day_deck_mean_pd.append(meanpd)
    day_deck_mean_psd.append(meanpsd)

    night_ppsd_deck = PPSD(night_deck_comp_sts[0][0].stats,
                           TrillC,
                           ppsd_length=600.0)
    for st in night_deck_comp_sts:
        night_ppsd_deck.add(st)
    # plotfile = 'night_deck_ppsd_' + component + '.png'
    # night_ppsd_deck.plot(plotfile, show_coverage=False)
    (meanpd, meanpsd) = night_ppsd_deck.get_mean()
    night_deck_mean_pd.append(meanpd)
    night_deck_mean_psd.append(meanpsd)

    day_ppsd_ref = PPSD(day_ref_comp_sts[0][0].stats,
Ejemplo n.º 2
0
# channels = ['EHU', 'EHV', 'EHW']
# channels = ['EHU']
# channels = ['SHU', 'MHV', 'MHW']

st = read(ondeckdatafile)
inv = read_inventory(ondeckmetadata)

# On deck SP data
print("Working on on-deck data")
chn = 'EHU'
tr = st.select(channel=chn)[1] #first one may have metadata problem
ppsd = PPSD(tr.stats, metadata=inv, ppsd_length=600.0, skip_on_gaps=True,
            period_limits=(0.02, 100.0), db_bins=(-200,-50, 1.))
st_select = st.select(channel=chn)
ppsd.add(st_select)
(ondeckpd, ondeckpsd) = ppsd.get_mean()
(ondeck05pd, ondeck05psd) = ppsd.get_percentile(percentile=5)
(ondeck95pd, ondeck95psd) = ppsd.get_percentile(percentile=95)

weeklydir = 'datafiles/weeklies/'
metadata = 'datafiles/ELYSE.all.dl0226.response.xml'

# On ground SP
print("Working on on-ground data")
chn = 'SHU'
loc = '68'
weeks = np.array(['2019-01-06-2019-01-12', '2019-01-13-2019-01-19',
                  '2019-01-20-2019-01-26', '2019-01-27-2019-02-02'])

# Do first file
week = weeks[0]
def calculate_PPSD_noise(data, filter_type, minimum_frequency,
                         maximum_frequency, starttime, endtime):
    """
    Calculate data quality via data completeness and RMS value.
    :param data: obspy Stream object containing single trace to calculate data quality for
    :param filter_type: obspy filter type
    :param minimum_frequency: minimum frequency to use in filter
    :param maximum_frequency: maximum frequency to use in filter
    :param starttime: start time of data query, for FDSN completeness calculation, as ISO8601 string
    :param endtime: end time of data query, for FDSN completeness calculation, as ISO8601 string
    :return: data RMS value, number of data values
    """

    # Apply a filter to the data
    if filter_type:
        data = data.filter(type=filter_type,
                           freqmin=minimum_frequency,
                           freqmax=maximum_frequency)

    # Build probabilistic power spectral density objects for each trace
    client = Client("https://service.geonet.org.nz")
    try:
        metadata = client.get_stations(network='NZ',
                                       station=data.stats.station,
                                       location=data.stats.location,
                                       channel=data.stats.channel,
                                       starttime=UTCDateTime(starttime),
                                       endtime=UTCDateTime(endtime),
                                       level='response')
        ppsd = PPSD(data.stats, metadata)
        ppsd.add(data)
    except FDSNNoDataException:
        # When no response data exists
        return np.nan, np.nan

    # Find RMS value from PPSD.
    # 1) Take the mean value of PPSD in given frequency window as the PSD value
    # 2) Calculate weighted mean of PSD values in all windows using frequency window width as weights and scaling the
    # acceleration squared values by the window centre frequency squared to convert the result into velocity squared.
    # Also convert the data values out of dB scale as precursor to this.
    # 3) Take sqrt of weighted mean, as data are squared when processed to produce PSD. This gives the RMS value.
    weighted_mean, weight_sum = 0, 0

    try:
        _, mean_psds = ppsd.get_mean()
    except Exception:
        # Fails when no data exists
        return np.nan, np.nan

    psd_widths = [
        1 / ppsd.period_bin_left_edges[n] - 1 / ppsd.period_bin_right_edges[n]
        for n in range(len(ppsd.period_bin_left_edges))
    ]
    psd_centres = [
        1 / ppsd.period_bin_centers[n]
        for n in range(len(ppsd.period_bin_centers))
    ]
    for n in range(len(mean_psds)):
        weighted_mean += math.sqrt(10**(mean_psds[n] / 10) /
                                   (psd_centres[n]**2)) * psd_widths[n]
        weight_sum += psd_widths[n]
    weighted_mean /= weight_sum

    return weighted_mean, ppsd