Ejemplo n.º 1
0
def plot_PPSD(trace, sta, start_time, interval=7200, filebase=None, show=True):
    """
    Plot a Probabilistic Power Spectral Desnsity for the trace

    trace = obspy Trace objet
    sta = obspy Inventory/Station object corresponding to the trace
    start_time = time at which to start spectra
    interval=offset between PSDs (seconds, minimum=3600)
    """
    now_time = trace.start_time
    first_read = True
    while now_time < trace.end_time - interval:
        if first_read:
            if trace.stats.component[1] == 'D':
                ppsd = PPSD(trace.stats,
                            metadata=sta,
                            special_handling='hydrophone')
            else:
                ppsd = PPSD(trace.stats, metadata=sta)
            first_read = False
        ppsd.add(trace)
        now_time += interval

    ppsd.save_npz(f'{filebase}_PPSD.npz')
    if filebase:
        description = '{}.{}.{}.{}'.format(trace.stats.network,
                                           trace.stats.station,
                                           trace.stats.location,
                                           trace.stats.channel)
        ppsd.plot(filebase + '_' + description + '_PPSD.png')
    if show:
        plt.plot()
    # ppsd.plot_temporal([0.1,1,10])
    # ppsd.plot_spectrogram()
    return 0
Ejemplo n.º 2
0
def get_power(trPower, inv, periodlist):
    """
    Calculates the PSD using ObsPy PPSD.
    trPower: input ObsPy trace that must be > 3600. sec long.
    inv:  ObsPy inventory.  Must be level = 'response'
    periodlist: a list of periods at which you want the power 
    returns: power in dB at the periods in periodlist
    """
    from obspy.signal import PPSD
    powers_at_periods = []
    if (trPower.stats.delta * trPower.stats.npts > 3600.):
        ppsd = PPSD(trPower.stats, metadata=inv)
        ppsd.add(trPower)
        if (len(ppsd._binned_psds) > 0):
            psd_periods = ppsd._period_binning[2]
            psd_power = []
            psd_power = ppsd._binned_psds[0]
            for i in range(0, len(periodlist)):
                powers_at_periods.append(psd_power[np.argmin(
                    abs(psd_periods - periodlist[i]))])
        else:
            for i in range(0, len(periodlist)):
                powers_at_periods.append(-1)

    return powers_at_periods
Ejemplo n.º 3
0
def ppsd(mseed_file,drop_value=9999):
    '''
    Get ppsd for one day file
    '''
    
    from obspy.signal import PPSD
    from obspy import read
    from numpy import where,mean,random
    
    #get data
    st=read(mseed_file)
#    st[0].data=random.normal(0, 0.1, size=st[0].stats.npts)
    
    #find gaps and not gaps
    gaps=where(st[0].data==drop_value)[0]
    not_gaps=where(st[0].data!=drop_value)[0]
    
    #Find mean wthout taking gaps into account and remove it
    bias=mean(st[0].data[not_gaps])
    st[0].data=st[0].data-bias
    
    #zero out gaps
    st[0].data[gaps]=0
    
    #define frequency response
    paz = {'gain': 1.0,'sensitivity': 1.0,'poles': [1,],'zeros': [0j, 0j]}
    
    #initalize ppsd object
    ppsd = PPSD(st[0].stats, paz,db_bins=(-60, 10, 0.5),period_limits=[2,600],special_handling='ringlaser')
    
    #add to ppsd
    ppsd.add(st)
Ejemplo n.º 4
0
    def ppsd(self,
             fmin=1.,
             fmax=100.,
             special_handling=None,
             filename=None,
             save=False):
        """
        Function that calculates the probabilistic power spectral density
        of a given station-channel combination.

        :type fmin: float
        :param fmin: Minimum frequency to show in PPSD plot
        :type fmax: float
        :param fmax: Maximum frequency to show in PPSD plot
        """

        # read list of files
        files = np.genfromtxt(self.filist, dtype=str)
        n = files.size
        # if no paz information is given, divide by 1.0
        if self.metadata == None:
            self.metadata = {"sensitivity": 1.0}
        # loop over files
        for i in range(n):
            st = read(self.path + files[i])
            st.merge()
            #st.decimate(self.dec_fact)
            if len(st) > 1:
                warnings.warn("more than one trace in st")
            tr = st.select(station=self.stn, channel=self.chn)[0]
            # at first run, initialize PPSD instance
            if i == 0:
                # "is_rotational_data" is set in order not to differentiate that data
                inst = PPSD(tr.stats,
                            metadata=self.metadata,
                            special_handling=special_handling,
                            ppsd_length=1800.)
            # add trace
            print("add trace %s ..." % tr)
            inst.add(tr)
        print("number of psd segments:", len(inst.current_times_used))
        inst.plot(show_noise_models=True,
                  xaxis_frequency=True,
                  period_lim=(fmin, fmax),
                  filename=filename)
        if save:
            inst.save_npz("ppsd_%s_%s.npz" % (self.stn, self.chn))
Ejemplo n.º 5
0
    def get_all_values(self, nested_dictionary):
        for key, value in nested_dictionary.items():
            if self.check == False:
                if type(value) is dict:
                    nested_dictionary[key] = self.get_all_values(value)
                else:
                    files = []
                    process_list = []
                    if type(value[0]) == list:
                        for j in value[0]:
                            st = read(j)
                            files.append(st[0])

                        ppsd = value[1]
                    else:
                        for j in value:
                            st = read(j)
                            files.append(st[0])
                        try:
                            ppsd = PPSD(
                                files[0].stats,
                                metadata=self.metadata,
                                ppsd_length=self.length,
                                overlap=self.overlap,
                                period_smoothing_width_octaves=self.smoothing,
                                period_step_octaves=self.period)
                        except:
                            pass

                    for i, j in zip(files, value):
                        try:
                            if self.check == False:
                                ppsd.add(i)
                                self.processedFiles = self.processedFiles + 1
                                print(i, " processed")
                                self.fileProcessed.emit(self.processedFiles)
                            else:
                                process_list.append(j)
                        except:
                            process_list.append(j)

                    nested_dictionary[key] = [process_list, ppsd]

        return nested_dictionary
Ejemplo n.º 6
0
def below_noise_model(station, data, inv, save_plot=False):
    tr = df_to_trace(station, data)
    ppsd = PPSD(tr.stats, metadata=inv)
    ppsd.add(tr)

    fig = ppsd.plot(show=False)

    if save_plot:
        julday = format_date_to_str(tr.stats.starttime.julday, 3)
        fig.savefig(
            f"plot_data/psd/{station}/{tr.stats.starttime.year}.{julday}.png",
            dpi=300)

    nlnm_t, nlnm_db = get_nlnm()
    trace_t = ppsd.period_bin_centers.tolist()

    interp_func = interpolate.interp1d(nlnm_t, nlnm_db, bounds_error=False)
    interp_db = interp_func(trace_t)

    traces_db = ppsd.psd_values

    min_t = closest_index_of_list(trace_t, 2.5)
    max_t = closest_index_of_list(trace_t, 10)

    for t, trace_db in enumerate(traces_db):
        diff = np.substract(trace_db[min_t:max_t + 1], interp_db[min_t,
                                                                 max_t + 1])
        for i, element in enumerate(diff):
            if element < 0:
                time_processed = ppsd.times_processed[t]
                year = format_date_to_str(time_processed.year, 4)
                month = format_date_to_str(time_processed.month, 2)
                day = format_date_to_str(time_processed.day, 2)
                hour = format_date_to_str(time_processed.hour, 2)
                minute = format_date_to_str(time_processed.minute, 2)
                second = format_date_to_str(time_processed.second, 2)
                datetime = f'D{year}{month}{day}T{hour}{minute}{second}'
                _id = station + '.' + datetime + '.1'

                return datetime, f'{str(element)}dB', _id, 1, 'Below Low Noise Model', station

    return None, f'OK. BelowLowNoiseModel of {station}', None, 0, None, None
Ejemplo n.º 7
0
def _colormap_plot_ppsd(cmaps):
    """
    Plot for illustrating colormaps: PPSD.

    :param cmaps: list of :class:`~matplotlib.colors.Colormap`
    :rtype: None
    """
    import matplotlib.pyplot as plt
    from obspy import read
    from obspy.signal import PPSD
    from obspy.io.xseed import Parser
    st = read("https://examples.obspy.org/BW.KW1..EHZ.D.2011.037")
    st += read("https://examples.obspy.org/BW.KW1..EHZ.D.2011.038")
    parser = Parser("https://examples.obspy.org/dataless.seed.BW_KW1")
    ppsd = PPSD(st[0].stats, metadata=parser)
    ppsd.add(st)

    for cmap in cmaps:
        ppsd.plot(cmap=cmap, show=False)
    plt.show()
Ejemplo n.º 8
0
def psd(station, parser):
    data = IPOC()
    ppsd_length = 6 * 3600
    overlap = 0.5
    dt = 3 * 24 * 3600
    t1 = UTC('2006-01-01')
    t2 = UTC('2013-11-01')
    ppsd = None
    print t1, t2
    while t1 < t2:
        try:
            if station != 'LVC':
                stream = data.client.getWaveform(
                    'CX', station, '', 'HHZ', t1,
                    t1 + dt + overlap * ppsd_length)
            else:
                stream = data.client.getWaveform(
                    'GE', 'LVC', '00', 'BHZ', t1,
                    t1 + dt + overlap * ppsd_length)

        except:
            t1 += dt
            continue
        if ppsd is None:
            ppsd = PPSD(stream[0].stats,
                        parser=parser,
                        skip_on_gaps=True,
                        db_bins=(-200, -50, 0.5),
                        ppsd_length=ppsd_length,
                        overlap=overlap)
        print t1
        ppsd.add(stream)
        t1 += dt
    if ppsd is not None:
        print 'station %s: %d segments' % (station, len(ppsd.times))
        ppsd.save("/home/richter/Results/IPOC/PPSD/ppsd_%s_6h.pkl.bz2" %
                  station,
                  compress=True)
        return True
    else:
        return False
Ejemplo n.º 9
0
def Noise_plotting(station, channel, PAZ, datasource):
    """
    Function to make use of obspy's PPSD functionality to read in data from
    a single station and the poles-and-zeros for that station before plotting
    the PPSD for this station.  See McNamara(2004) for more details.

    :type station: String
    :param station: Station name as it is in the filenames in the database
    :type channel: String
    :param channel: Channel name as it is in the filenames in the database
    :type PAZ: Dict
    :param PAZ: Must contain, Poles, Zeros, Sensitivity, Gain
        :type Poles: List of Complex
        :type Zeros: List of Complex
        :type Sensitivity: Float
        :type Gain: Float
    :type datasource: String
    :param datasource: The directory in which data can be found, can contain
                        wildcards.

    :returns: PPSD object
    """
    from obspy.signal import PPSD
    from obspy import read as obsread
    import glob

    stafiles = glob.glob(datasource + '/*' + station + '*' + channel + '*')
    stafiles.sort()
    # Initialize PPSD
    st = obsread(stafiles[0])
    ppsd = PPSD(st[0].stats, PAZ)
    for stafile in stafiles[1:]:
        print 'Adding waveform from: ' + stafile
        st = obsread(stafile)
        # Add after read to conserve memory
        ppsd.add(st)
    # Plot the PPSD
    ppsd.plot()
    return ppsd
Ejemplo n.º 10
0
def analyze_noise(data_files,response,decimateby=5):
    """run through data files and create PPSD objects using obsy
    """
    data_files.sort()
    print("++ {} data files".format(len(data_files)))
    inv = read_inventory(response)
    # initialize PPSD with first datafile
    print("1/{} Initializing with data file: ".format(len(data_files)),
                                    os.path.basename(data_files[0]),end='... ')
    start = time.time()
    st = read(data_files[0])
    if decimateby != 0:
        st.decimate(decimateby)
    ppsd = PPSD(st[0].stats, metadata=inv)
    ppsd.add(st)
    year_start = st[0].stats.starttime.year
    jday_start = st[0].stats.starttime.julday
    end = time.time()
    print("complete ({}s)".format(round(end-start,2)))

    # loop over rest of datafiles and add to ppsd
    for i,filename in enumerate(data_files[1:]):
        print('{0}/{1} {2}'.format(i+2,len(data_files),
                                        os.path.basename(filename)),end='... ')
        try:
            start = time.time()
            st = read(filename)
            if decimateby != 0:
                st.decimate(decimateby)
            ppsd.add(st)
            end = time.time()
            print("complete ({}s)".format(round(end-start,2)))
        except Exception as e:
            print(e)
            pass

    return ppsd
Ejemplo n.º 11
0
    def _get_ppsd(self, st_info):

        #Devuelve la ppsd del stream st dado en el parametro de la función

        try:
            st = st_info[0]
            ppsd_dir = st_info[2]
            print("HOLA ", ppsd_dir)
            tr = st[0]
            ppsd = PPSD(tr.stats,
                        metadata=self.parser,
                        skip_on_gaps=True,
                        overlapping=0.5)
            ppsd.add(st)
            ppsd.save_npz(ppsd_dir)
            return [ppsd, ppsd_dir]
        except Exception as e:
            msg = (
                f"Error getting ppsd from: {st_info[1]} for the next reason:\n"
                "%s: %s\n"
                "Skipping this stream.")
            msg = msg % (e.__class__.__name__, str(e))
            warnings.warn(msg)
            pass
Ejemplo n.º 12
0
                channel=chan,
                starttime=UTCDateTime('2004-001T00:00:00.0'),
                endtime=day + secperday,
                filename=respfilename(ch))
            resp = irisclient.evalresp(network,
                                       station,
                                       loc,
                                       chan,
                                       filename="%s%s.png" % (qcfigs, ch),
                                       output='plot')
        except:
            print("No response data for channel %s" % (ch))
    data = {}
    for ch in ids:
        print(respfilename(ch))
        stch = st.select(id=ch)  # Just take the data for a single channel
        calc_daily_stats(stch)
        try:
            ppsd = PPSD(stch[0].stats, metadata=str(respfilename(ch)))
            ppsd.add(stch)
            figname = "%s%d/%03d/%s.png" % (qcfigs, day.year, day.julday, ch)
            path_verify(figname)
            ppsd.plot(figname, cmap=pqlx)
            data = ppsd.get_percentile(percentile=50)
            fname = "%s%d/%03d/PPSDper50_%s.npz" % (qcdata, day.year,
                                                    day.julday, ch)
            path_verify(fname)
            np.savez(fname, data)
        except:
            print("Error with PPSD for %s check for response" % (ch))
from obspy import read
from obspy.signal import PPSD
from obspy.imaging.cm import pqlx
from obspy.io.xseed import Parser

st = read("IN.ZIRO..SHZ.D.2020.092.000051.SAC")
parser = Parser("ZIRODATALESS.SEED")
ppsd = PPSD(st[0].stats, metadata=parser)
ppsd.add(st)

st = read("IN.ZIRO..SHZ.D.2020.092.000051.SAC")
ppsd.add(st)

ppsd.plot(cmap=pqlx)
Ejemplo n.º 14
0
from obspy.core import read
from obspy.xseed import Parser
from obspy.signal import PPSD

st = read("http://examples.obspy.org/BW.KW1..EHZ.D.2011.037")
tr = st.select(id="BW.KW1..EHZ")[0]
parser = Parser("http://examples.obspy.org/dataless.seed.BW_KW1")
paz = parser.getPAZ(tr.id)
ppsd = PPSD(tr.stats, paz)
ppsd.add(st)

st = read("http://examples.obspy.org/BW.KW1..EHZ.D.2011.038")
ppsd.add(st)

ppsd.plot()
Ejemplo n.º 15
0
    def process(self, SDSFile):
        """
        def PSDCollector::process
        Processes a single SDSFile to extract PSDs and store them in the DB
        """

        # Create an empty spectra list that can be preemptively
        # returned to the calling procedure
        spectra = list()

        inventory = SDSFile.inventory

        # Inventory could not be read
        if inventory is None:
            raise Exception("Inventory could not be read")

        # And the prepared data
        data = self.__prepareData(SDSFile)

        # Data could not be read
        if data is None:
            raise Exception("Data could not be read")

        # Try creating the PPSD
        try:

            # Set handling to hydrophone if using pressure data
            # This is a bit hacky but the process should be the same for infrasound data
            handling = "hydrophone" if SDSFile.isPressureChannel else None

            ppsd = PPSD(data[0].stats,
                        inventory,
                        period_limits=self.PERIOD_LIMIT_TUPLE,
                        special_handling=handling)

            # Add the waveform
            ppsd.add(data)

        except Exception as ex:
            raise Exception("Error processing PPSD: '%s'" % (str(ex)))

        for segment, time in zip(ppsd._binned_psds, SDSFile.psdBins):

            # XXX NOTE:
            # Modified /home/ubuntu/.local/lib/python2.7/site-packages/obspy/signal/spectral_estimation.py
            # And /usr/local/lib/python3.5/dist-packages/obspy/signal/spectral_estimation.py
            # To set ppsd.valid as a public attribute! We need this to determine the offset on the frequency axis
            try:
                psd_array = self.__getFrequencyOffset(
                    segment, ppsd.valid, SDSFile.isPressureChannel)
                byteAmplitudes = self.__toByteArray(psd_array)
            # This may fail in multiple ways.. try the next segment
            except Exception as ex:
                self.logger.warning(
                    "Failed processing PPSD for 1 segment: '%s'" % (str(ex)))
                continue

            # Add hash of the data & metadata (first 8 hex digits)
            # Saving 64 bytes * 2 makes (checksums) our database pretty big and this should be sufficient to
            # detect changes
            psdObject = {
                "fileId": SDSFile.filename,
                "checksum": SDSFile.checksum,
                "checksumInventory": self.__getResponseChecksum(inventory),
                "net": SDSFile.net,
                "sta": SDSFile.sta,
                "loc": SDSFile.loc,
                "cha": SDSFile.cha,
                "quality": SDSFile.quality,
                "ts": time.datetime,
                "te": (time + timedelta(minutes=60)).datetime,
                "bin": byteAmplitudes
            }

            spectra.append(psdObject)

        return spectra
Ejemplo n.º 16
0
from obspy.signal import PPSD
from obspy.core.utcdatetime import UTCDateTime
from obspy.clients.fdsn import Client
from obspy.imaging.cm import pqlx

client = Client("IRIS")
st = client.get_waveforms(network="IU",
                          station="ANMO",
                          location="00",
                          channel="LHZ",
                          starttime=UTCDateTime("2010-03-25T06:00:00.000"),
                          endtime=UTCDateTime("2010-03-29T14:00:00.000"))
print(st)
inv = client.get_stations(network="IU",
                          station="ANMO",
                          location="00",
                          channel="LHZ",
                          starttime=UTCDateTime("2010-03-25T06:00:00.000"),
                          endtime=UTCDateTime("2010-03-29T14:00:00.000"),
                          level="response")
tr = st[0]
ppsd = PPSD(tr.stats, inv, time_of_weekday=[(-1, 0, 2), (-1, 22, 24)])
ppsd.add(st)
ppsd.calculate_histogram(time_of_weekday=[(-1, 0, 2), (-1, 22, 24)])
# print("acabe")
# ppsd.plot()
# print(ppsd.times_processed)
ppsd.plot("prove.jpg", cmap=pqlx)

# ppsd = PPSD.load_npz("/home/ecastillo/SANL_results/CM.BAR2.10.HNZ/MassPPSD/CM.BAR2.10.HNZ__20190101T000000Z__20190104T000000Z.npz")
# ppsd.plot("prove.jpg",cmap=pqlx)
Ejemplo n.º 17
0
    fn_in = "{}/{}_{}.mseed".format(rawdatadir, datestr, nslc)
    pbar.set_description("PPSD %s" % fn_in)
    if not os.path.isfile(fn_in):
        continue
    stall = read(fn_in, headonly=True)
    for mseedid in list(set([tr.id for tr in stall])):
        npzdatadir = "{}/npz/{}/{}/{}".format(DATADIR, YYYY, MM, DD)
        pathlib.Path(npzdatadir).mkdir(parents=True, exist_ok=True)
        #fn_out = "{}/npz/{}_{}.npz".format(DATADIR,datestr, nslc)
        fn_out = "{}/{}_{}.npz".format(npzdatadir, datestr, nslc)

        if os.path.isfile(fn_out) and not force_reprocess:
            continue
        st = read(fn_in, sourcename=mseedid)
        trace = st[0]
        print("ppsd...")
        ppsd = PPSD(trace.stats,
                    inv,
                    ppsd_length=600,
                    overlap=0.5,
                    period_smoothing_width_octaves=0.025,
                    period_step_octaves=0.0125,
                    period_limits=(freqmin, freqmax),
                    db_bins=(-200, 20, 0.25))
        #period_limits=(0.008, 50),
        print("add traces ...")
        ppsd.add(st)
        ppsd.save_npz(fn_out[:-4])
        del st, ppsd
    del stall
Ejemplo n.º 18
0
st = read('RAW/*', debug_headers=True)
k = len(st)
i = 0
while i < k:
    tr = st[i]
    print(tr.id, tr.stats.starttime.year, tr.stats.starttime.julday)
    ### differential Volcity to Accerletion
    diftr = obspy.core.trace.Trace.differentiate(tr)

    ### Pole and Zero
    chn = tr.stats.channel
    sta = tr.stats.station
    Ps = "PZs/" + "*" + sta + "*" + "HHZ" + "*"
    HH_paz = glob.glob(Ps)
    pz.attach_paz(diftr, HH_paz[0])
    paz = dict(diftr.stats.paz)

    ### power spectrum density
    ppsd = PPSD(diftr.stats, paz, ppsd_length=3600.0, overlap=0.95)
    ppsd.add(diftr)
    [t, amp] = ppsd.get_mode()

    ### Output
    ts = str(tr.stats.starttime.year) + "." + str(tr.stats.starttime.julday)
    txt = sta + "." + chn + "." + ts + ".txt"
    with open(txt, mode="w") as f:
        for j in range(len(t)):
            f.write("%e %6.2f\n" % (t[j], amp[j]))

    i += 1
Ejemplo n.º 19
0
 def test_PPSD(self):
     """
     Test PPSD routine with some real data. Data was downsampled to 100Hz
     so the ppsd is a bit distorted which does not matter for the purpose
     of testing.
     """
     # load test file
     file_data = os.path.join(self.path,
                              'BW.KW1._.EHZ.D.2011.090_downsampled.asc.gz')
     file_histogram = os.path.join(
         self.path,
         'BW.KW1._.EHZ.D.2011.090_downsampled__ppsd_hist_stack.npy')
     file_binning = os.path.join(
         self.path, 'BW.KW1._.EHZ.D.2011.090_downsampled__ppsd_mixed.npz')
     # parameters for the test
     data = np.loadtxt(file_data)
     stats = {
         '_format': 'MSEED',
         'calib': 1.0,
         'channel': 'EHZ',
         'delta': 0.01,
         'endtime': UTCDateTime(2011, 3, 31, 2, 36, 0, 180000),
         'location': '',
         'mseed': {
             'dataquality': 'D',
             'record_length': 512,
             'encoding': 'STEIM2',
             'byteorder': '>'
         },
         'network': 'BW',
         'npts': 936001,
         'sampling_rate': 100.0,
         'starttime': UTCDateTime(2011, 3, 31, 0, 0, 0, 180000),
         'station': 'KW1'
     }
     tr = Trace(data, stats)
     st = Stream([tr])
     paz = {
         'gain':
         60077000.0,
         'poles': [(-0.037004 + 0.037016j), (-0.037004 - 0.037016j),
                   (-251.33 + 0j), (-131.04 - 467.29j),
                   (-131.04 + 467.29j)],
         'sensitivity':
         2516778400.0,
         'zeros': [0j, 0j]
     }
     ppsd = PPSD(tr.stats, paz)
     ppsd.add(st)
     # read results and compare
     result_hist = np.load(file_histogram)
     self.assertEqual(len(ppsd.times), 4)
     self.assertEqual(ppsd.nfft, 65536)
     self.assertEqual(ppsd.nlap, 49152)
     np.testing.assert_array_equal(ppsd.hist_stack, result_hist)
     # add the same data a second time (which should do nothing at all) and
     # test again - but it will raise UserWarnings, which we omit for now
     with warnings.catch_warnings(record=True):
         warnings.simplefilter('ignore', UserWarning)
         ppsd.add(st)
         np.testing.assert_array_equal(ppsd.hist_stack, result_hist)
     # test the binning arrays
     binning = np.load(file_binning)
     np.testing.assert_array_equal(ppsd.spec_bins, binning['spec_bins'])
     np.testing.assert_array_equal(ppsd.period_bins, binning['period_bins'])
Ejemplo n.º 20
0
for filename in files:
    if not 'inv' in locals():
        inv = read_inventory(filename)
    else:
        inv += read_inventory(filename)

# Trim individual traces
for tr in st1:
    tr.trim(tr.stats.starttime + 0.1, tr.stats.endtime - 0.1)

# PPSD and spectra stuff
from obspy.signal import PPSD
file_root = '/home/chet/figures/NZ/network_info/'
for tr in st:
    pdf_name = file_root + 'PDFs/' + tr.stats.station + tr.stats.channel + '.png'
    tr_ppsd = PPSD(tr.stats, metadata=inv)
    tr_ppsd.add(tr)
    try:
        tr_ppsd.plot(pdf_name)
    except:
        continue
    del tr_ppsd
    # tr.spectrogram(title=str(tr.stats.station) + str(tr.stats.starttime))

# What the memory use of an obspy stream?
num_bytes = 0
for tr in st:
    num_bytes += tr.data.nbytes

# Catalog switch for match_filter
picks = [Pick(time=detecttime + (tr.stats.starttime - detecttime)]
Ejemplo n.º 21
0
def init_ppsd_dataless(tr, dataless_file):
    parser = Parser(dataless_file)
    paz = parser.get_paz(tr.id)
    ppsd = PPSD(tr.stats, metadata=paz)
    return ppsd
Ejemplo n.º 22
0
st = read("data/GR.FUR..BHN.D.2015.361")
inv = read_inventory("data/station_FUR.stationxml")

print(st)
print(inv)
inv.plot(projection="ortho")
# -

#  * compute probabilistic power spectral densities using `PPSD` class from obspy.signal, see http://docs.obspy.org/tutorial/code_snippets/probabilistic_power_spectral_density.html (but use the inventory you read from StationXML as metadata)
#  * plot the processed `PPSD` (`plot()` method attached to `PPSD` object)

# +
from obspy.signal import PPSD

tr = st[0]
ppsd = PPSD(stats=tr.stats, metadata=inv)

ppsd.add(tr)
ppsd.plot()
# -

# Since longer term stacks would need too much waveform data and take way too long to compute, we prepared one year continuous data preprocessed for a single channel of station `FUR` to play with..
#
#  * load long term pre-computed PPSD from file `PPSD_FUR_HHN.npz` using `PPSD`'s `load_npz()` staticmethod (i.e. it is called directly from the class, not an instance object of the class)
#  * plot the PPSD (default is full time-range, depending on how much data and spread is in the data, adjust `max_percentage` option of `plot()` option)  (might take a couple of minutes..!)
#  * do a cumulative plot (which is good to judge non-exceedance percentage dB thresholds)

# +
from obspy.signal import PPSD

ppsd = PPSD.load_npz("data/PPSD_FUR_HHN.npz")
Ejemplo n.º 23
0
        print('Merging streams...')
        streams.merge()
        print('Current data is:')
        print(streams)

        # Build probabilistic power spectral density objects for each trace
        all_ppsds = []
        all_ppsd_names = []
        for stream in streams:
            print('Calculating PPSDs for stream:')
            print(stream)
            ppsds = []
            ppsd_names = []
            metadata = Parser(metadata_directory + stream.stats.station +
                              stream.stats.channel[-1:] + '.seed')
            ppsd = PPSD(stream.stats, metadata)
            ppsd.add(stream)
            ppsds.append(ppsd)
            ppsd_names.append(stream.stats.station + '_' +
                              stream.stats.channel + '_PPSD')
        all_ppsds.extend(ppsds)
        all_ppsd_names.extend(ppsd_names)

        # Plot PPSD data for each trace in 3 views
        print('Plotting PPSD data...')
        for n in range(len(all_ppsds)):
            all_ppsds[n].plot(show_coverage=True,
                              show_noise_models=True,
                              xaxis_frequency=True,
                              cmap=pqlx,
                              filename=all_ppsd_names[n] + '.png',
Ejemplo n.º 24
0
def main(loglevel="INFO", njobs_per_worker=9999):
    logger = logbook.Logger("msnoise")
    # Reconfigure logger to show the pid number in log records
    logger = get_logger('msnoise.compute_psd_child', loglevel, with_pid=True)
    logger.info('*** Starting: Compute PPSD ***')
    db = connect()
    logger.debug('Preloading all instrument response')
    responses = preload_instrument_responses(db, return_format="inventory")

    params = get_params(db)
    ppsd_components = params.qc_components
    ppsd_length = params.qc_ppsd_length
    ppsd_overlap = params.qc_ppsd_overlap
    ppsd_period_smoothing_width_octaves = params.qc_ppsd_period_smoothing_width_octaves
    ppsd_period_step_octaves = params.qc_ppsd_period_step_octaves
    ppsd_period_limits = params.qc_ppsd_period_limits
    ppsd_db_bins = params.qc_ppsd_db_bins

    while is_next_job(db, jobtype='PSD'):
        logger.info("Getting the next job")
        jobs = get_next_job(db, jobtype='PSD', limit=njobs_per_worker)
        logger.debug("I will process %i jobs" % len(jobs))
        if len(jobs) == 0:
            # edge case, should only occur when is_next returns true, but
            # get_next receives no jobs (heavily parallelised code)
            continue
        for job in jobs:
            net, sta, loc = job.pair.split('.')
            print("Processing %s" % job.pair)
            gd = UTCDateTime(job.day).datetime
            files = get_data_availability(
                db,
                net=net,
                sta=sta,
                loc=loc,
                starttime=(UTCDateTime(job.day) - 1.5 * ppsd_length).datetime,
                endtime=gd)
            if len(files) == 0:
                print("No files found for %s" % job.day)
                continue

            for comp in ppsd_components:
                toprocess = []
                for file in files:
                    if file.chan[-1] != comp:
                        continue
                    tmp = os.path.join(file.path, file.file)
                    toprocess.append(tmp)
                if len(toprocess) == 0:
                    continue
                st = Stream()
                for tmp in np.unique(toprocess):
                    logger.debug("Reading %s" % tmp)
                    try:
                        st += read(
                            tmp,
                            starttime=UTCDateTime(gd) - 1.5 * ppsd_length,
                            endtime=UTCDateTime(gd +
                                                datetime.timedelta(days=1)) -
                            0.001)
                    except:
                        logger.debug("Problem loading %s" % tmp)
                if not len(st):
                    continue

                try:
                    st.merge()
                except:
                    logger.info("Failed merging streams:")
                    traceback.print_exc()
                    continue

                st = st.split()
                for tr in st:
                    tr.stats.network = tr.stats.network.upper()
                    tr.stats.station = tr.stats.station.upper()
                    tr.stats.channel = tr.stats.channel.upper()

                tr = st.select(component=comp)[0]
                out = to_sds(tr.stats, gd.year, int(gd.strftime('%j')))
                npzdout = os.path.join("PSD", "NPZ", out)
                logger.debug("ppsd will be output to: %s" % npzdout)
                ppsd = PPSD(tr.stats,
                            metadata=responses,
                            ppsd_length=ppsd_length,
                            overlap=ppsd_overlap,
                            period_smoothing_width_octaves=
                            ppsd_period_smoothing_width_octaves,
                            period_step_octaves=ppsd_period_step_octaves,
                            period_limits=ppsd_period_limits,
                            db_bins=ppsd_db_bins)
                # TODO handle when the response for this trace is not in the inv
                ppsd.add(st)
                out = to_sds(tr.stats, gd.year, int(gd.strftime('%j')))

                pngout = os.path.join("PSD", "PNG", out)
                if not os.path.isdir(os.path.split(npzdout)[0]):
                    os.makedirs(os.path.split(npzdout)[0])
                    os.makedirs(os.path.split(pngout)[0])

                ppsd.save_npz(npzdout + ".npz")
                update_job(db, job.day, job.pair, 'PSD', 'D', ref=job.ref)
                if not params.hpc:
                    for job in jobs:
                        update_job(db, job.day, job.pair, 'PSD2HDF', 'T')
                try:
                    ppsd.plot(pngout + ".png")
                except:
                    logger.debug("Error saving PNG image")
                    traceback.print_exc()

                del ppsd

        logger.debug('Day (job) "D"one')
Ejemplo n.º 25
0
def get_ppsd(my_storage,
             client,
             inv,
             ppsd_restrictions,
             single_cha_contents,
             starttime,
             endtime,
             plot_trace=False):
    """
    Calculates the ppsd object according to starttime, endtime
    and ppsd_restrictions parameters. It will be save in  
    my_storage/{network}.{station}.{location}.{channel}/ppsd

    Parameters:
    -----------
    my_storage: str
        Path to save all ppsd analyses
    client: Client object from obspy
        To use get_waveforms method
    inv: Inventory object from obspy
        To recognize the filtered stations that you want to
        calculate the ppsd
    ppsd_restrictions: PPSDRestrictions
        Information about the PPSD parameters
    single_cha_contents: 'str'
        network.station.location.channel
    starttime: UTCDateTime
        Start time that will be used to calculate the ppsd.
    endtime: UTCDateTime
        End time that will be used to calculate the ppsd.
    plot_trace: Boolean
        Plot the stream (It consumes a little bit time)
    """

    network, station, location, channel = single_cha_contents.split('.')
    try:
        st = client.get_waveforms(network=network,
                                  station=station,
                                  location=location,
                                  channel=channel,
                                  starttime=starttime,
                                  endtime=endtime)

    except:
        strftime = "%Y%m%dT%H%M%SZ"
        st_warn = (f"{network}."
                   f"{station}."
                   f"{location}."
                   f"{channel}"
                   f"__{starttime.strftime(strftime)}"
                   f"__{endtime.strftime(strftime)}")
        st = None

    now = dt.datetime.now().strftime("%Y/%m/%d %H:%M:%S")
    if st == None:
        print_logs(job='load_trace',
                   content=single_cha_contents,
                   status='no',
                   path=st_warn)
        return None

    if plot_trace == True:

        plotst_path = get_path(my_storage,
                               PLOT_TRACE_DIRNAME,
                               single_cha_contents,
                               starttime,
                               endtime,
                               extension_file='jpg')

        filename = os.path.basename(plotst_path)
        if os.path.isfile(plotst_path) == True:
            print_logs(job='save_trace',
                       content=single_cha_contents,
                       status='exist',
                       path=filename)

        else:
            plotst_dir = os.path.dirname(plotst_path)
            if os.path.isdir(plotst_dir) == False:
                os.makedirs(plotst_dir)

            st.plot(outfile=plotst_path)

            print_logs(job='save_trace',
                       content=single_cha_contents,
                       status='ok',
                       path=filename)

    now = dt.datetime.now().strftime("%Y/%m/%d %H:%M:%S")

    try:
        ppsd_path = get_path(my_storage,
                             PPSD_DIRNAME,
                             single_cha_contents,
                             starttime,
                             endtime,
                             extension_file='npz')

        filename = os.path.basename(ppsd_path)
        if os.path.isfile(ppsd_path) == True:
            print_logs(job='save_ppsd',
                       content=single_cha_contents,
                       status='exist',
                       path=filename)
        else:
            ppsd_dir = os.path.dirname(ppsd_path)
            if os.path.isdir(ppsd_dir) == False:
                os.makedirs(ppsd_dir)
            tr = st[0]
            ppsd = PPSD(tr.stats, metadata=inv, **ppsd_restrictions.__dict__)
            ppsd.add(st)
            if ppsd_restrictions.time_of_weekday != None:
                ppsd.calculate_histogram(
                    time_of_weekday=ppsd_restrictions.time_of_weekday)
            ppsd.save_npz(ppsd_path)
            print_logs(job='save_ppsd',
                       content=single_cha_contents,
                       status='ok',
                       path=filename)
    except:
        print_logs(job='save_ppsd',
                   content=single_cha_contents,
                   status='exist',
                   path=filename)
def plotpowermagnitudeSpectrum(tr):
    print('plotting magnitude spectrum....')
    ppsd = PPSD(tr.stats, metadata=' ')
    ppsd.add(tr)
    ppsd.plot()
    return
    else:
        sts = st.select(channel=channels[i], location=locations[i])
    # Fix to remove overlaps, but not mask the data
    sts = sts.merge()
    sts = sts.split()
    sts.sort(keys=['starttime', 'endtime', 'channel'])

    
    print(sts)
    for j, tr in enumerate(sts):
        print("Working on trace {}".format(j))
        print(tr)
        length = tr.stats['endtime'] - tr.stats['starttime']
        cumlen = cumlen + length
        nevents_tr = nevents*length/secyear
        ppsd = PPSD(tr.stats, metadata=inv, ppsd_length=200.0)
        ppsd.add(Stream(tr))
        psdmean = 0
        for period in psdperiodrange:
            psds = ppsd.extract_psd_values(period)[0]
            psdmean = psdmean + math.pow(10.0, 0.05*np.mean(psds))
        psdamp = psdmean/len(psdperiodrange)    
        threshold = psdamp*snr
        print("{} Threshold: {}".format(j,threshold))
        nev_tr = np.zeros_like(nevents)
        for k, mag in enumerate(magarray):
            idx = next((x for x, v in enumerate(amp_mag_dist[k][::-1])
                        if v>threshold), None)
            if idx is not None:
                idx = len(distarray)-idx-1
                nev_tr[:, :, k] = afrac[idx]*nevents_tr[:, :, k]
Ejemplo n.º 28
0
from obspy.signal import PPSD
import d2fcts_mod as d2f
import sys
reload(sys)

from loadmat2trace import loadmat2trace
#OPEN INPUT FILE AND READ PARAMETERS
InputFile = str(sys.argv[1]); del sys.argv[1]

#####we read the list of stations we want to process and calculate the ppsd for the given number of days
######Input1 station list
######Input 2 number of days
data_down_infos=csv.reader(CommentStripper(open(InputFile,"r")), skipinitialspace=True) # allows for blanks in input line
# ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
#Read data and select a trace with the desired station/channel combination:

### LOOP 1: input file line by line
for line in data_down_infos: # line is list of elements in input line
[torigin, event_lat, event_lon, depth, mag, path_data, dist_class, duration_class, toll_class, \
rotation, correction, Network, Station, Location, Channel, sub, dataformat] = d2f.read_input_file(line)
######search for the  data in the database
available, wrongfmt, rotateme = d2f.check_data_avail(tstart, Network, Stationr[inds], reqchan, laufzeit, correction, path_data, dataformat)
    ppsd = PPSD(tr.stats, metadata=parser)

    ppsd.add(st)
    print("number of psd segments:", len(ppsd.times))
    ppsd.plot()
    ppsd.plot("/tmp/ppsd.png")  
    ppsd.plot("/tmp/ppsd.pdf")  
Ejemplo n.º 29
0
def addNetDemo(fSrcDir, static_path):
    STATIC_PATH = static_path
    sDenDir = 'networks'
    fDenDir = os.path.join(STATIC_PATH, sDenDir)
    mkfile(fDenDir, 0)
    # updateSql()  # 删除旧数据并更新数据库
    all_files = []
    all_paths = []
    all_files, all_paths = show_path(fSrcDir, all_files, all_paths)
    for i in range(0, len(all_files)):
        file = all_files[i]
        path = all_paths[i]
        if file.count('.') >= 6:
            dayCount = countDay_1OfYear(datetime.date.today())
            (NetCode, StaCode, LocCode, ChCode, DataCode, nYear,
             nDay) = file.split('.')
            if (len(NetCode) <= 2 and len(StaCode) <= 5 and len(LocCode) <= 2
                    and len(ChCode) <= 3 and DataCode == 'D'
                    and len(nYear) <= 4 and len(nDay) <= 3
                    and int(nDay) == dayCount):
                net = Network(NetCode, NetCode, fSrcDir, sDenDir,
                              3).get_or_create_Network()
                sta = Station(net, StaCode, StaCode).get_or_create_Station()
                cDigitizerInfo = DigitizerInfo('TDE-324', '10Vpp', '100Hz',
                                               'Linear')
                (bRet, AD, gain, rate,
                 filter) = cDigitizerInfo.getDigitizerInfo()
                if not bRet:
                    print('Digitizer not found!')
                    continue
                cSensorInfo = SensorInfo('TMA-33')
                (bRet, sensor, sensorinfo) = cSensorInfo.getSensorInfo()
                if not bRet:
                    print('Sensor not found!')
                    continue
                adsensor = ADSensor(filter, sensorinfo).get_ADSensor()
                sta_adsensor = Sta_ADSensor(
                    sta, adsensor).get_or_create_Sta_ADSensor()
                ch = Channel(sta_adsensor, LocCode, ChCode).get_or_create_CH()

                sDenDir2 = sDenDir + '/' + NetCode
                fDenDir = os.path.join(STATIC_PATH, sDenDir2)
                mkfile(fDenDir, 0)

                sDenDir2 = sDenDir2 + '/' + StaCode
                fDenDir = os.path.join(STATIC_PATH, sDenDir2)
                mkfile(fDenDir, 0)

                sDenDir2 = sDenDir2 + '/' + nYear
                fDenDir = os.path.join(STATIC_PATH, sDenDir2)
                mkfile(fDenDir, 0)

                sDenDir2 = sDenDir2 + '/' + nDay
                fDenDir = os.path.join(STATIC_PATH, sDenDir2)
                mkfile(fDenDir, 0)

                from obspy import read
                # from obspy.io.xseed import Parser
                from obspy.signal import PPSD
                from obspy.imaging.cm import pqlx

                try:
                    st = read(path)
                except Exception as ex:
                    print('%s数据读取错误\n' % file, ex)
                    continue
                ChName = NetCode + '.' + StaCode + '.' + LocCode + '.' + ChCode + '.' + nYear + '.' + nDay
                outfile1 = fDenDir + '/' + ChName + '.day_wave.png'
                outfile2 = fDenDir + '/' + ChName + '.day_wave.low_pass_0.2Hz.png'
                outfile3 = fDenDir + '/' + ChName + '.day_wave.high_pass_0.2Hz.png'
                outfile4 = fDenDir + '/' + ChName + '.ppsd.png'
                outfile5 = fDenDir + '/' + ChName + '.spectrogram.png'

                print(NetCode, StaCode, LocCode, ChCode, DataCode, nYear, nDay)
                st.plot(size=(1600, 1200),
                        tick_format='%I:%M:%p',
                        type="dayplot",
                        interval=30,
                        right_vertical_labels=True,
                        vertical_scaling_range=st[0].data.std() * 20,
                        one_tick_per_line=True,
                        color=["r", "b", "g"],
                        show_y_UTC_label=True,
                        title=ChName,
                        time_offset=8,
                        outfile=outfile1)
                st2 = st.copy()

                st.filter("lowpass", freq=0.2, corners=2)
                st.plot(size=(1600, 1200),
                        tick_format='%I:%M:%p',
                        type="dayplot",
                        interval=30,
                        right_vertical_labels=True,
                        vertical_scaling_range=st[0].data.std() * 20,
                        one_tick_per_line=True,
                        color=["r", "b", "g"],
                        show_y_UTC_label=True,
                        title=ChName + '.low_pass 0.2Hz',
                        time_offset=8,
                        outfile=outfile2)

                st2.filter("highpass", freq=0.2)
                st2.plot(
                    size=(1600, 1200),
                    tick_format='%I:%M:%p',
                    type="dayplot",
                    interval=30,
                    right_vertical_labels=True,
                    vertical_scaling_range=st2[0].data.std() * 20,
                    one_tick_per_line=True,
                    color=["r", "b", "g"],
                    show_y_UTC_label=True,
                    # events={"min_magnitude": 5},
                    title=ChName + '.high_pass 0.2Hz',
                    time_offset=8,
                    outfile=outfile3)

                paz = {}
                paz['zeros'] = []
                paz['zeros'] = Zeros(sensorinfo).getZero()
                paz['poles'] = []
                paz['poles'] = Poles(sensorinfo).getPole()
                if 2000 <= cSensorInfo.getField('IMainType', sensor) <= 3000:
                    paz['zeros'].append(complex(0., 0))
                paz['gain'] = cSensorInfo.getField('IGainNormalization',
                                                   sensorinfo)
                paz['sensitivity'] = cSensorInfo.getField('IGain', sensorinfo) \
                                     * cDigitizerInfo.getField('sensitivity', filter)
                print(paz)
                st = read(path)
                # print(st)
                ppsd = PPSD(st[0].stats, paz)
                ppsd.add(st)
                # print(ppsd.times_data)
                # print('len=',len(ppsd.times_data),ppsd.times_data[0][0],ppsd.times_data[0][1])
                ppsd.plot(outfile4, xaxis_frequency=True, cmap=pqlx)
                ppsd.plot_spectrogram(filename=outfile5, cmap='CMRmap_r')
                if cSensorInfo.getField('IMainType', sensor) < 2000:
                    outfile6 = fDenDir + '/' + ChName + '.1-2s.sp.png'
                    ppsd.plot_temporal(1.414, filename=outfile6)
                elif 2000 <= cSensorInfo.getField('IMainType',
                                                  sensor) < 3000:  # 加速度模式)
                    outfile6 = fDenDir + '/' + ChName + '.1-2Hz.sp.png'
                    ppsd.plot_temporal(.707, filename=outfile6)
                fBlankTime = 0.
                for i in range(1, len(ppsd.times_data)):  # 1个整时间段说明未丢数
                    dt = (ppsd.times_data[i][0] - ppsd.times_data[i - 1][1])
                    if dt < 0:
                        print(dt, ppsd.times_data[i][0],
                              ppsd.times_data[i - 1][1])
                    else:
                        fBlankTime += dt
                runrate = 1.0 - fBlankTime / 86400.
                date = datetime.date(ppsd.times_data[0][0].year,
                                     ppsd.times_data[0][0].month,
                                     ppsd.times_data[0][0].day)
                DayData(ch, date, runrate).set_or_create_Day_data()
        else:
            print(file, "Name is error.")
Ejemplo n.º 30
0
ondeckdatafile = "datafiles/ELYS0.allseispress.dl0204.mseed"
# ondeckmetadata = "datafiles/ELYS0.dl0129.response.xml"
ondeckmetadata = "datafiles/ELYS0.all.dl0226.response.xml"
cMHdata = "datafiles/CRUI3.SP.mseed"
cMHmeta = "datafiles/CRUI3.xml"
cEHdata = "datafiles/CRUI1-2.mseed"
cEHmeta = "datafiles/CRUI1.xml"

#first get cruise ppsd info
print("Working on cruise data")
stMHc = read(cMHdata)
invMHc = read_inventory(cMHmeta)
stMHc_sel = stMHc.select(channel='MHW')
trc = stMHc_sel[0]
ppsdMHc = PPSD(trc.stats, metadata=invMHc, ppsd_length=600.0,
               skip_on_gaps=True, period_limits=(0.02, 100.0),
               db_bins=(-200, -50, 1.))
ppsdMHc.add(stMHc_sel)
(cMHpd, cMHpsd) = ppsdMHc.get_mode()
stEHc = read(cEHdata)
invEHc = read_inventory(cEHmeta)
stEHc_sel = stEHc.select(channel='EHW')
trc = stEHc_sel[0]
ppsdEHc = PPSD(trc.stats, metadata=invEHc, ppsd_length=200.0,
               skip_on_gaps=True, period_limits=(0.02, 100.0),
               db_bins=(-200, -50, 1.))
ppsdEHc.add(stEHc_sel)
(cEHpd, cEHpsd) = ppsdEHc.get_mode()

# For reference, earth low and high noise models
(nlnmpd, nlnmpsd) = get_nlnm()