Ejemplo n.º 1
1
        site = stations[ksta]

        Eppsd_file = ppsd_dir + site + '.LXE.ppsd.npz'
        Nppsd_file = ppsd_dir + site + '.LXN.ppsd.npz'
        Zppsd_file = ppsd_dir + site + '.LXZ.ppsd.npz'

        #Does the ppsd object exist?
        if exists(Eppsd_file) == True:

            #Load the previously saved PPSDs
            Eppsd = PPSD.load_npz(Eppsd_file, metadata=paz)
            Nppsd = PPSD.load_npz(Nppsd_file, metadata=paz)
            Zppsd = PPSD.load_npz(Zppsd_file, metadata=paz)

            #Make ppsd plots, one per channel
            Eppsd.plot(period_lim=(2, 600), cmap=pqlx)
            ax = plt.figure(1).axes[0]
            plt.sca(ax)

            #Set reference lines
            plt.plot([2, 600], [-11, -11], 'k')
            plt.plot([2, 600], [-17, -17], 'k')
            plt.plot([2, 600], [-23, -23], 'k')
            plt.plot([2, 600], [-37, -37], 'k')
            plt.plot([2, 600], [-57, -57], 'k')

            bbox = dict(boxstyle="round", fc="0.8")
            ax.annotate('20cm', xy=(2.3, -11), bbox=bbox)
            ax.annotate('10cm', xy=(2.3, -18), bbox=bbox)
            ax.annotate('5cm', xy=(2.3, -25), bbox=bbox)
            ax.annotate('1cm', xy=(2.3, -38), bbox=bbox)
Ejemplo n.º 2
0
def plot_PPSD(trace, sta, start_time, interval=7200, filebase=None, show=True):
    """
    Plot a Probabilistic Power Spectral Desnsity for the trace

    trace = obspy Trace objet
    sta = obspy Inventory/Station object corresponding to the trace
    start_time = time at which to start spectra
    interval=offset between PSDs (seconds, minimum=3600)
    """
    now_time = trace.start_time
    first_read = True
    while now_time < trace.end_time - interval:
        if first_read:
            if trace.stats.component[1] == 'D':
                ppsd = PPSD(trace.stats,
                            metadata=sta,
                            special_handling='hydrophone')
            else:
                ppsd = PPSD(trace.stats, metadata=sta)
            first_read = False
        ppsd.add(trace)
        now_time += interval

    ppsd.save_npz(f'{filebase}_PPSD.npz')
    if filebase:
        description = '{}.{}.{}.{}'.format(trace.stats.network,
                                           trace.stats.station,
                                           trace.stats.location,
                                           trace.stats.channel)
        ppsd.plot(filebase + '_' + description + '_PPSD.png')
    if show:
        plt.plot()
    # ppsd.plot_temporal([0.1,1,10])
    # ppsd.plot_spectrogram()
    return 0
Ejemplo n.º 3
0
    def ppsd(self,
             fmin=1.,
             fmax=100.,
             special_handling=None,
             filename=None,
             save=False):
        """
        Function that calculates the probabilistic power spectral density
        of a given station-channel combination.

        :type fmin: float
        :param fmin: Minimum frequency to show in PPSD plot
        :type fmax: float
        :param fmax: Maximum frequency to show in PPSD plot
        """

        # read list of files
        files = np.genfromtxt(self.filist, dtype=str)
        n = files.size
        # if no paz information is given, divide by 1.0
        if self.metadata == None:
            self.metadata = {"sensitivity": 1.0}
        # loop over files
        for i in range(n):
            st = read(self.path + files[i])
            st.merge()
            #st.decimate(self.dec_fact)
            if len(st) > 1:
                warnings.warn("more than one trace in st")
            tr = st.select(station=self.stn, channel=self.chn)[0]
            # at first run, initialize PPSD instance
            if i == 0:
                # "is_rotational_data" is set in order not to differentiate that data
                inst = PPSD(tr.stats,
                            metadata=self.metadata,
                            special_handling=special_handling,
                            ppsd_length=1800.)
            # add trace
            print("add trace %s ..." % tr)
            inst.add(tr)
        print("number of psd segments:", len(inst.current_times_used))
        inst.plot(show_noise_models=True,
                  xaxis_frequency=True,
                  period_lim=(fmin, fmax),
                  filename=filename)
        if save:
            inst.save_npz("ppsd_%s_%s.npz" % (self.stn, self.chn))
Ejemplo n.º 4
0
def _colormap_plot_ppsd(cmaps):
    """
    Plot for illustrating colormaps: PPSD.

    :param cmaps: list of :class:`~matplotlib.colors.Colormap`
    :rtype: None
    """
    import matplotlib.pyplot as plt
    from obspy import read
    from obspy.signal import PPSD
    from obspy.io.xseed import Parser
    st = read("https://examples.obspy.org/BW.KW1..EHZ.D.2011.037")
    st += read("https://examples.obspy.org/BW.KW1..EHZ.D.2011.038")
    parser = Parser("https://examples.obspy.org/dataless.seed.BW_KW1")
    ppsd = PPSD(st[0].stats, metadata=parser)
    ppsd.add(st)

    for cmap in cmaps:
        ppsd.plot(cmap=cmap, show=False)
    plt.show()
def Noise_plotting(station, channel, PAZ, datasource):
    """
    Function to make use of obspy's PPSD functionality to read in data from
    a single station and the poles-and-zeros for that station before plotting
    the PPSD for this station.  See McNamara(2004) for more details.

    :type station: String
    :param station: Station name as it is in the filenames in the database
    :type channel: String
    :param channel: Channel name as it is in the filenames in the database
    :type PAZ: Dict
    :param PAZ: Must contain, Poles, Zeros, Sensitivity, Gain
        :type Poles: List of Complex
        :type Zeros: List of Complex
        :type Sensitivity: Float
        :type Gain: Float
    :type datasource: String
    :param datasource: The directory in which data can be found, can contain
                        wildcards.

    :returns: PPSD object
    """
    from obspy.signal import PPSD
    from obspy import read as obsread
    import glob

    stafiles=glob.glob(datasource+'/*'+station+'*'+channel+'*')
    stafiles.sort()
    # Initialize PPSD
    st=obsread(stafiles[0])
    ppsd = PPSD(st[0].stats, PAZ)
    for stafile in stafiles[1:]:
        print 'Adding waveform from: '+stafile
        st=obsread(stafile)
        # Add after read to conserve memory
        ppsd.add(st)
    # Plot the PPSD
    ppsd.plot()
    return ppsd
Ejemplo n.º 6
0
def Noise_plotting(station, channel, PAZ, datasource):
    """
    Function to make use of obspy's PPSD functionality to read in data from
    a single station and the poles-and-zeros for that station before plotting
    the PPSD for this station.  See McNamara(2004) for more details.

    :type station: String
    :param station: Station name as it is in the filenames in the database
    :type channel: String
    :param channel: Channel name as it is in the filenames in the database
    :type PAZ: Dict
    :param PAZ: Must contain, Poles, Zeros, Sensitivity, Gain
        :type Poles: List of Complex
        :type Zeros: List of Complex
        :type Sensitivity: Float
        :type Gain: Float
    :type datasource: String
    :param datasource: The directory in which data can be found, can contain
                        wildcards.

    :returns: PPSD object
    """
    from obspy.signal import PPSD
    from obspy import read as obsread
    import glob

    stafiles = glob.glob(datasource + '/*' + station + '*' + channel + '*')
    stafiles.sort()
    # Initialize PPSD
    st = obsread(stafiles[0])
    ppsd = PPSD(st[0].stats, PAZ)
    for stafile in stafiles[1:]:
        print 'Adding waveform from: ' + stafile
        st = obsread(stafile)
        # Add after read to conserve memory
        ppsd.add(st)
    # Plot the PPSD
    ppsd.plot()
    return ppsd
Ejemplo n.º 7
0
    def ppsd(self, fmin=1., fmax=100., special_handling=None, filename=None, save=False):
        """
        Function that calculates the probabilistic power spectral density
        of a given station-channel combination.

        :type fmin: float
        :param fmin: Minimum frequency to show in PPSD plot
        :type fmax: float
        :param fmax: Maximum frequency to show in PPSD plot
        """

        # read list of files
        files = np.genfromtxt(self.filist, dtype=str)
        n = files.size
        # if no paz information is given, divide by 1.0
        if self.metadata == None:
            self.metadata = {"sensitivity": 1.0}
        # loop over files
        for i in range(n):
            st = read(files[i])
            st.merge()
            st.decimate(self.dec_fact)
            if len(st) > 1:
                warnings.warn("more than one trace in st")
            tr = st.select(station=self.stn, channel=self.chn)[0]
            # at first run, initialize PPSD instance
            if i == 0:
                # "is_rotational_data" is set in order not to differentiate that data
                inst = PPSD(tr.stats, metadata=self.metadata, special_handling=special_handling)
            # add trace
            print("add trace %s ..." % tr)
            inst.add(tr)
        print("number of psd segments:", len(inst.current_times_used))
        inst.plot(show_noise_models=True, xaxis_frequency=True, period_lim=(fmin, fmax), filename=filename)
        if save:
            inst.save_npz("ppsd_%s_%s.npz" % (self.stn, self.chn))
Ejemplo n.º 8
0
def below_noise_model(station, data, inv, save_plot=False):
    tr = df_to_trace(station, data)
    ppsd = PPSD(tr.stats, metadata=inv)
    ppsd.add(tr)

    fig = ppsd.plot(show=False)

    if save_plot:
        julday = format_date_to_str(tr.stats.starttime.julday, 3)
        fig.savefig(
            f"plot_data/psd/{station}/{tr.stats.starttime.year}.{julday}.png",
            dpi=300)

    nlnm_t, nlnm_db = get_nlnm()
    trace_t = ppsd.period_bin_centers.tolist()

    interp_func = interpolate.interp1d(nlnm_t, nlnm_db, bounds_error=False)
    interp_db = interp_func(trace_t)

    traces_db = ppsd.psd_values

    min_t = closest_index_of_list(trace_t, 2.5)
    max_t = closest_index_of_list(trace_t, 10)

    for t, trace_db in enumerate(traces_db):
        diff = np.substract(trace_db[min_t:max_t + 1], interp_db[min_t,
                                                                 max_t + 1])
        for i, element in enumerate(diff):
            if element < 0:
                time_processed = ppsd.times_processed[t]
                year = format_date_to_str(time_processed.year, 4)
                month = format_date_to_str(time_processed.month, 2)
                day = format_date_to_str(time_processed.day, 2)
                hour = format_date_to_str(time_processed.hour, 2)
                minute = format_date_to_str(time_processed.minute, 2)
                second = format_date_to_str(time_processed.second, 2)
                datetime = f'D{year}{month}{day}T{hour}{minute}{second}'
                _id = station + '.' + datetime + '.1'

                return datetime, f'{str(element)}dB', _id, 1, 'Below Low Noise Model', station

    return None, f'OK. BelowLowNoiseModel of {station}', None, 0, None, None
from obspy import read
from obspy.signal import PPSD
from obspy.imaging.cm import pqlx
from obspy.io.xseed import Parser

st = read("IN.ZIRO..SHZ.D.2020.092.000051.SAC")
parser = Parser("ZIRODATALESS.SEED")
ppsd = PPSD(st[0].stats, metadata=parser)
ppsd.add(st)

st = read("IN.ZIRO..SHZ.D.2020.092.000051.SAC")
ppsd.add(st)

ppsd.plot(cmap=pqlx)
Ejemplo n.º 10
0
#Make the PDF
ppsd = PPSD(st[0].stats,paz=pazval,ppsd_length=parserval.len,overlap=parserval.overlap)
for tr in st:
	ppsd.add(tr)
	if debug:
		for pdftime in ppsd.times:
			print 'Here is what is in the PDF: ' + str(pdftime)
try:
	pdfstring = "PDF" + st[0].stats.station + st[0].stats.channel + str(st[0].stats.starttime.year)+ \
		str(st[0].stats.starttime.julday).zfill(3) + ".jpg"
	medianstring = "MEDIAN" + st[0].stats.station + st[0].stats.channel + str(st[0].stats.starttime.year)+ \
		str(st[0].stats.starttime.julday).zfill(3)
	if debug:
		print 'Saving the PDF to : ' + pdfstring
		print 'Saving the median to : ' + medianstring
	
	ppsd.plot(show_percentiles=True,percentiles=[50], filename=pdfstring,
		show = True, show_histogram=True, grid= False, show_coverage=False, \
		period_lim=(parserval.minper,parserval.maxper))
	per,perval = ppsd.get_percentile(percentile=50,hist_cum=None)
	perFile = open(medianstring, 'w')
	for index, val in enumerate(per):
		perFile.write(str("%.2f" % val) + ',' + str(perval[index]) + '\n')
	perFile.close()
	 
		
except:
	'No PPSD saved'

		
Ejemplo n.º 11
0
from obspy.core import read
from obspy.xseed import Parser
from obspy.signal import PPSD

st = read("http://examples.obspy.org/BW.KW1..EHZ.D.2011.037")
tr = st.select(id="BW.KW1..EHZ")[0]
parser = Parser("http://examples.obspy.org/dataless.seed.BW_KW1")
paz = parser.getPAZ(tr.id)
ppsd = PPSD(tr.stats, paz)
ppsd.add(st)

st = read("http://examples.obspy.org/BW.KW1..EHZ.D.2011.038")
ppsd.add(st)

ppsd.plot()
from obspy import read
from obspy.signal import PPSD
from obspy.imaging.cm import pqlx
from obspy.io.xseed import Parser


st = read("http://examples.obspy.org/BW.KW1..EHZ.D.2011.037")
parser = Parser("http://examples.obspy.org/dataless.seed.BW_KW1")
ppsd = PPSD(st[0].stats, metadata=parser)
ppsd.add(st)

st = read("http://examples.obspy.org/BW.KW1..EHZ.D.2011.038")
ppsd.add(st)

ppsd.plot(cmap=pqlx)
Ejemplo n.º 13
0
def addNetDemo(fSrcDir,nNetMode=1,sensortype='TMA-33'):
    # 建立顶层根目录
    STATIC_PATH = os.path.join(os.path.dirname(__file__), 'static')
    sDenDir = 'networks'
    fDenDir = os.path.join(STATIC_PATH, sDenDir)
    mkfile(fDenDir, 0)

    file_list = []
    path_list = []
    (file_list,path_list) = show_path (fSrcDir,file_list,path_list)
    for i in range(len(file_list)):
        file = file_list[i]
        path = path_list[i]

        dayCount = countDay_1OfYear(datetime.date.today())

        num = file.count('.')
        if (num >= 6):
            (NetCode,StaCode,LocCode,ChCode,DataCode,nYear,nDay) = file.split('.')
            # print(NetCode, StaCode, LocCode, ChCode, DataCode, nYear, nDay)
            # print(len(NetCode), len(StaCode), len(LocCode), len(ChCode), len(DataCode), len(nYear), len(nDay))
            if (len(NetCode)<=2 and len(StaCode)<=5 and len(LocCode)<=2 and len(ChCode)<=3 and DataCode=='D'
                    and len(nYear)<=4 and len(nDay)<=3 and int(nDay) == dayCount):
                net = get_or_create_Network(NetCode,NetCode,fSrcDir,sDenDir,nNetMode)
                sta = get_or_create_Station(net,StaCode,StaCode)
                (bRet,AD,gain,rate,filter) = get_DigitizerInfo('TDE-324','10Vpp','100Hz','Linear')
                if bRet==False:
                    continue
                (bRet,sensor,sensorinfo) = get_SensorInfo(sensortype, '0-200Hz', '1.0225V/M/S**2')
                if bRet==False:
                    continue
                ADSensor = get_or_create_ADSensor(filter,sensorinfo)
                StaADSensor = get_or_create_Sta_ADSensor(sta,ADSensor)
                ch = get_or_create_CH(StaADSensor,LocCode, ChCode)
                # 以上,添加1个台站的逻辑确实很复杂

                sDenDir2 = sDenDir + '/' + NetCode
                fDenDir = os.path.join(STATIC_PATH, sDenDir2)
                mkfile(fDenDir, 0)

                sDenDir2 = sDenDir2 + '/' + StaCode
                fDenDir = os.path.join(STATIC_PATH, sDenDir2)
                mkfile(fDenDir, 0)

                sDenDir2 = sDenDir2 + '/' + nYear
                fDenDir = os.path.join(STATIC_PATH, sDenDir2)
                mkfile(fDenDir, 0)
                sDenDir2 = sDenDir2 + '/' + nDay
                fDenDir = os.path.join(STATIC_PATH, sDenDir2)
                mkfile(fDenDir, 0)

                from obspy import read
                #from obspy.io.xseed import Parser
                from obspy.signal import PPSD
                from obspy.imaging.cm import pqlx

                try:
                    st = read(path)
                except Exception as ex:
                    print('读取数据错误\n', ex)
                    continue
                ChName = NetCode + '.' + StaCode + '.' + LocCode + '.' + ChCode + '.' + nYear + '.' + nDay
                outfile1 = fDenDir + '/' + ChName + '.day_wave.png'
                outfile2 = fDenDir + '/' + ChName + '.day_wave.low_pass_0.2Hz.png'
                outfile3 = fDenDir + '/' + ChName + '.day_wave.high_pass_0.2Hz.png'
                outfile4 = fDenDir + '/' + ChName + '.ppsd.png'
                outfile5 = fDenDir + '/' + ChName + '.spectrogram.png'

                print(NetCode, StaCode, LocCode, ChCode, DataCode, nYear, nDay)
                st.plot(size=(1600, 1200), tick_format='%I:%M:%p', type="dayplot", interval=30, right_vertical_labels=True,
                        vertical_scaling_range=st[0].data.std() * 20, one_tick_per_line=True,
                        color=["r", "b", "g"], show_y_UTC_label=True,
                        title=ChName,time_offset=8,
                        outfile=outfile1)
                st2 = st.copy()

                st.filter("lowpass", freq=0.2, corners=2)
                st.plot(size=(1600, 1200), tick_format='%I:%M:%p', type="dayplot", interval=30, right_vertical_labels=True,
                        vertical_scaling_range=st[0].data.std() * 20, one_tick_per_line=True,
                        color=["r", "b", "g"], show_y_UTC_label=True,
                        title=ChName + '.low_pass 0.2Hz',time_offset=8,
                        outfile=outfile2)

                st2.filter("highpass", freq=0.2)
                st2.plot(size=(1600, 1200), tick_format='%I:%M:%p', type="dayplot", interval=30, right_vertical_labels=True,
                         vertical_scaling_range=st2[0].data.std() * 20,one_tick_per_line=True,
                         color=["r", "b", "g"], show_y_UTC_label=True,
                         # events={"min_magnitude": 5},
                         title=ChName+ '.high_pass 0.2Hz', time_offset=8,
                         outfile=outfile3)

                paz = {}
                paz['zeros'] = []
                for zero in Zero.objects.filter(sensor_info=sensorinfo).order_by('id'):
                    paz['zeros'].append(complex(zero.fReal, zero.fImag))
                paz['poles'] = []
                for pole in Pole.objects.filter(sensor_info=sensorinfo).order_by('id'):
                    paz['poles'].append(complex(pole.fReal, pole.fImag))
                if (2000<=sensor.IMainType and sensor.IMainType<3000):       # 加速度模式
                    paz['zeros'].append(complex(0.,0))
                paz['gain'] = sensorinfo.IGainNormalization
                paz['sensitivity'] = sensorinfo.IGain * filter.sensitivity
                #print(paz)
                st = read(path)
                #print(st)
                ppsd = PPSD(st[0].stats, paz)
                ppsd.add(st)
                #print(ppsd.times_data)
                #print('len=',len(ppsd.times_data),ppsd.times_data[0][0],ppsd.times_data[0][1])
                ppsd.plot(outfile4, xaxis_frequency=True, cmap=pqlx)
                ppsd.plot_spectrogram(filename=outfile5, cmap='CMRmap_r')
                if (sensor.IMainType<2000):
                    outfile6 = fDenDir + '/' + ChName + '.1-2s.sp.png'
                    ppsd.plot_temporal(1.414, filename=outfile6)
                elif (2000<=sensor.IMainType and sensor.IMainType<3000):       # 加速度模式)
                    outfile6 = fDenDir + '/' + ChName + '.1-2Hz.sp.png'
                    ppsd.plot_temporal(.707, filename=outfile6)

                fBlankTime = 0.
                for i in range(1,len(ppsd.times_data)): #  1个整时间段说明未丢数
                    dt = (ppsd.times_data[i][0] - ppsd.times_data[i-1][1])
                    if (dt < 0):
                        print(dt,ppsd.times_data[i][0],ppsd.times_data[i-1][1])
                    else:
                        fBlankTime += dt
                runrate = 1.0 - fBlankTime / 86400.
                date = datetime.date(ppsd.times_data[0][0].year,ppsd.times_data[0][0].month,ppsd.times_data[0][0].day)
                set_or_create_Day_data(ch,date,runrate)

        else:
            print(file , "Name is error.")
Ejemplo n.º 14
0
from obspy.signal import PPSD
from obspy.core.utcdatetime import UTCDateTime
from obspy.clients.fdsn import Client
from obspy.imaging.cm import pqlx

client = Client("IRIS")
st = client.get_waveforms(network="IU",
                          station="ANMO",
                          location="00",
                          channel="LHZ",
                          starttime=UTCDateTime("2010-03-25T06:00:00.000"),
                          endtime=UTCDateTime("2010-03-29T14:00:00.000"))
print(st)
inv = client.get_stations(network="IU",
                          station="ANMO",
                          location="00",
                          channel="LHZ",
                          starttime=UTCDateTime("2010-03-25T06:00:00.000"),
                          endtime=UTCDateTime("2010-03-29T14:00:00.000"),
                          level="response")
tr = st[0]
ppsd = PPSD(tr.stats, inv, time_of_weekday=[(-1, 0, 2), (-1, 22, 24)])
ppsd.add(st)
ppsd.calculate_histogram(time_of_weekday=[(-1, 0, 2), (-1, 22, 24)])
# print("acabe")
# ppsd.plot()
# print(ppsd.times_processed)
ppsd.plot("prove.jpg", cmap=pqlx)

# ppsd = PPSD.load_npz("/home/ecastillo/SANL_results/CM.BAR2.10.HNZ/MassPPSD/CM.BAR2.10.HNZ__20190101T000000Z__20190104T000000Z.npz")
# ppsd.plot("prove.jpg",cmap=pqlx)
Ejemplo n.º 15
0
def addNetDemo(fSrcDir, static_path):
    STATIC_PATH = static_path
    sDenDir = 'networks'
    fDenDir = os.path.join(STATIC_PATH, sDenDir)
    mkfile(fDenDir, 0)
    # updateSql()  # 删除旧数据并更新数据库
    all_files = []
    all_paths = []
    all_files, all_paths = show_path(fSrcDir, all_files, all_paths)
    for i in range(0, len(all_files)):
        file = all_files[i]
        path = all_paths[i]
        if file.count('.') >= 6:
            dayCount = countDay_1OfYear(datetime.date.today())
            (NetCode, StaCode, LocCode, ChCode, DataCode, nYear,
             nDay) = file.split('.')
            if (len(NetCode) <= 2 and len(StaCode) <= 5 and len(LocCode) <= 2
                    and len(ChCode) <= 3 and DataCode == 'D'
                    and len(nYear) <= 4 and len(nDay) <= 3
                    and int(nDay) == dayCount):
                net = Network(NetCode, NetCode, fSrcDir, sDenDir,
                              3).get_or_create_Network()
                sta = Station(net, StaCode, StaCode).get_or_create_Station()
                cDigitizerInfo = DigitizerInfo('TDE-324', '10Vpp', '100Hz',
                                               'Linear')
                (bRet, AD, gain, rate,
                 filter) = cDigitizerInfo.getDigitizerInfo()
                if not bRet:
                    print('Digitizer not found!')
                    continue
                cSensorInfo = SensorInfo('TMA-33')
                (bRet, sensor, sensorinfo) = cSensorInfo.getSensorInfo()
                if not bRet:
                    print('Sensor not found!')
                    continue
                adsensor = ADSensor(filter, sensorinfo).get_ADSensor()
                sta_adsensor = Sta_ADSensor(
                    sta, adsensor).get_or_create_Sta_ADSensor()
                ch = Channel(sta_adsensor, LocCode, ChCode).get_or_create_CH()

                sDenDir2 = sDenDir + '/' + NetCode
                fDenDir = os.path.join(STATIC_PATH, sDenDir2)
                mkfile(fDenDir, 0)

                sDenDir2 = sDenDir2 + '/' + StaCode
                fDenDir = os.path.join(STATIC_PATH, sDenDir2)
                mkfile(fDenDir, 0)

                sDenDir2 = sDenDir2 + '/' + nYear
                fDenDir = os.path.join(STATIC_PATH, sDenDir2)
                mkfile(fDenDir, 0)

                sDenDir2 = sDenDir2 + '/' + nDay
                fDenDir = os.path.join(STATIC_PATH, sDenDir2)
                mkfile(fDenDir, 0)

                from obspy import read
                # from obspy.io.xseed import Parser
                from obspy.signal import PPSD
                from obspy.imaging.cm import pqlx

                try:
                    st = read(path)
                except Exception as ex:
                    print('%s数据读取错误\n' % file, ex)
                    continue
                ChName = NetCode + '.' + StaCode + '.' + LocCode + '.' + ChCode + '.' + nYear + '.' + nDay
                outfile1 = fDenDir + '/' + ChName + '.day_wave.png'
                outfile2 = fDenDir + '/' + ChName + '.day_wave.low_pass_0.2Hz.png'
                outfile3 = fDenDir + '/' + ChName + '.day_wave.high_pass_0.2Hz.png'
                outfile4 = fDenDir + '/' + ChName + '.ppsd.png'
                outfile5 = fDenDir + '/' + ChName + '.spectrogram.png'

                print(NetCode, StaCode, LocCode, ChCode, DataCode, nYear, nDay)
                st.plot(size=(1600, 1200),
                        tick_format='%I:%M:%p',
                        type="dayplot",
                        interval=30,
                        right_vertical_labels=True,
                        vertical_scaling_range=st[0].data.std() * 20,
                        one_tick_per_line=True,
                        color=["r", "b", "g"],
                        show_y_UTC_label=True,
                        title=ChName,
                        time_offset=8,
                        outfile=outfile1)
                st2 = st.copy()

                st.filter("lowpass", freq=0.2, corners=2)
                st.plot(size=(1600, 1200),
                        tick_format='%I:%M:%p',
                        type="dayplot",
                        interval=30,
                        right_vertical_labels=True,
                        vertical_scaling_range=st[0].data.std() * 20,
                        one_tick_per_line=True,
                        color=["r", "b", "g"],
                        show_y_UTC_label=True,
                        title=ChName + '.low_pass 0.2Hz',
                        time_offset=8,
                        outfile=outfile2)

                st2.filter("highpass", freq=0.2)
                st2.plot(
                    size=(1600, 1200),
                    tick_format='%I:%M:%p',
                    type="dayplot",
                    interval=30,
                    right_vertical_labels=True,
                    vertical_scaling_range=st2[0].data.std() * 20,
                    one_tick_per_line=True,
                    color=["r", "b", "g"],
                    show_y_UTC_label=True,
                    # events={"min_magnitude": 5},
                    title=ChName + '.high_pass 0.2Hz',
                    time_offset=8,
                    outfile=outfile3)

                paz = {}
                paz['zeros'] = []
                paz['zeros'] = Zeros(sensorinfo).getZero()
                paz['poles'] = []
                paz['poles'] = Poles(sensorinfo).getPole()
                if 2000 <= cSensorInfo.getField('IMainType', sensor) <= 3000:
                    paz['zeros'].append(complex(0., 0))
                paz['gain'] = cSensorInfo.getField('IGainNormalization',
                                                   sensorinfo)
                paz['sensitivity'] = cSensorInfo.getField('IGain', sensorinfo) \
                                     * cDigitizerInfo.getField('sensitivity', filter)
                print(paz)
                st = read(path)
                # print(st)
                ppsd = PPSD(st[0].stats, paz)
                ppsd.add(st)
                # print(ppsd.times_data)
                # print('len=',len(ppsd.times_data),ppsd.times_data[0][0],ppsd.times_data[0][1])
                ppsd.plot(outfile4, xaxis_frequency=True, cmap=pqlx)
                ppsd.plot_spectrogram(filename=outfile5, cmap='CMRmap_r')
                if cSensorInfo.getField('IMainType', sensor) < 2000:
                    outfile6 = fDenDir + '/' + ChName + '.1-2s.sp.png'
                    ppsd.plot_temporal(1.414, filename=outfile6)
                elif 2000 <= cSensorInfo.getField('IMainType',
                                                  sensor) < 3000:  # 加速度模式)
                    outfile6 = fDenDir + '/' + ChName + '.1-2Hz.sp.png'
                    ppsd.plot_temporal(.707, filename=outfile6)
                fBlankTime = 0.
                for i in range(1, len(ppsd.times_data)):  # 1个整时间段说明未丢数
                    dt = (ppsd.times_data[i][0] - ppsd.times_data[i - 1][1])
                    if dt < 0:
                        print(dt, ppsd.times_data[i][0],
                              ppsd.times_data[i - 1][1])
                    else:
                        fBlankTime += dt
                runrate = 1.0 - fBlankTime / 86400.
                date = datetime.date(ppsd.times_data[0][0].year,
                                     ppsd.times_data[0][0].month,
                                     ppsd.times_data[0][0].day)
                DayData(ch, date, runrate).set_or_create_Day_data()
        else:
            print(file, "Name is error.")
Ejemplo n.º 16
0
    else:
        inv += read_inventory(filename)

# Trim individual traces
for tr in st1:
    tr.trim(tr.stats.starttime + 0.1, tr.stats.endtime - 0.1)

# PPSD and spectra stuff
from obspy.signal import PPSD
file_root = '/home/chet/figures/NZ/network_info/'
for tr in st:
    pdf_name = file_root + 'PDFs/' + tr.stats.station + tr.stats.channel + '.png'
    tr_ppsd = PPSD(tr.stats, metadata=inv)
    tr_ppsd.add(tr)
    try:
        tr_ppsd.plot(pdf_name)
    except:
        continue
    del tr_ppsd
    # tr.spectrogram(title=str(tr.stats.station) + str(tr.stats.starttime))

# What the memory use of an obspy stream?
num_bytes = 0
for tr in st:
    num_bytes += tr.data.nbytes

# Catalog switch for match_filter
picks = [Pick(time=detecttime + (tr.stats.starttime - detecttime)]

### Testing mayavi plotting from stackoverflow
import numpy as np
Ejemplo n.º 17
0
        print(
            'Run as: python3 %s "NN.SSSS.LL.CCC" [-plot=spec,temp,site,ppsd] [http://localhost:8080/] '
            % argv[0])
        print(
            '    or: python3 %s "NN.SSSS.LL.CCC" [-plot=spec,temp,site,ppsd] [data (e.g. mseed)] [metadata (e.g. fdsn.xml)]'
            % argv[0])

    stream._cleanup()
    ids = []
    for trace in stream:
        if trace.id in ids:
            continue
        ids += [trace.id]
        ppsd = PPSD(trace.stats,
                    inventory,
                    db_bins=(-200, -50, .5),
                    period_step_octaves=0.125 / 2.,
                    ppsd_length=min([
                        int(trace.stats.npts / 4 / trace.stats.sampling_rate),
                        800
                    ]))
        ppsd.add(stream)

        if "site" in argv[-2]:
            sitemap(inventory)
        if "spec" in argv[-2]:
            ppsd.plot_spectrogram()
        if "temp" in argv[-2]:
            ppsd.plot_temporal([10, 0.1, 1])
        ppsd.plot(show_mode=True, cmap=pqlx)
from obspy import read
from obspy.signal import PPSD
from obspy.io.xseed import Parser

st = read("https://examples.obspy.org/BW.KW1..EHZ.D.2011.037")
parser = Parser("https://examples.obspy.org/dataless.seed.BW_KW1")
ppsd = PPSD(st[0].stats, metadata=parser)
ppsd.add(st)

st = read("https://examples.obspy.org/BW.KW1..EHZ.D.2011.038")
ppsd.add(st)

ppsd.plot(cumulative=True)
Ejemplo n.º 19
0
print(st)
print(inv)
inv.plot(projection="ortho")
# -

#  * compute probabilistic power spectral densities using `PPSD` class from obspy.signal, see http://docs.obspy.org/tutorial/code_snippets/probabilistic_power_spectral_density.html (but use the inventory you read from StationXML as metadata)
#  * plot the processed `PPSD` (`plot()` method attached to `PPSD` object)

# +
from obspy.signal import PPSD

tr = st[0]
ppsd = PPSD(stats=tr.stats, metadata=inv)

ppsd.add(tr)
ppsd.plot()
# -

# Since longer term stacks would need too much waveform data and take way too long to compute, we prepared one year continuous data preprocessed for a single channel of station `FUR` to play with..
#
#  * load long term pre-computed PPSD from file `PPSD_FUR_HHN.npz` using `PPSD`'s `load_npz()` staticmethod (i.e. it is called directly from the class, not an instance object of the class)
#  * plot the PPSD (default is full time-range, depending on how much data and spread is in the data, adjust `max_percentage` option of `plot()` option)  (might take a couple of minutes..!)
#  * do a cumulative plot (which is good to judge non-exceedance percentage dB thresholds)

# +
from obspy.signal import PPSD

ppsd = PPSD.load_npz("data/PPSD_FUR_HHN.npz")
# -

ppsd.plot(max_percentage=10)
Ejemplo n.º 20
0
def main(loglevel="INFO", njobs_per_worker=9999):
    logger = logbook.Logger("msnoise")
    # Reconfigure logger to show the pid number in log records
    logger = get_logger('msnoise.compute_psd_child', loglevel, with_pid=True)
    logger.info('*** Starting: Compute PPSD ***')
    db = connect()
    logger.debug('Preloading all instrument response')
    responses = preload_instrument_responses(db, return_format="inventory")

    params = get_params(db)
    ppsd_components = params.qc_components
    ppsd_length = params.qc_ppsd_length
    ppsd_overlap = params.qc_ppsd_overlap
    ppsd_period_smoothing_width_octaves = params.qc_ppsd_period_smoothing_width_octaves
    ppsd_period_step_octaves = params.qc_ppsd_period_step_octaves
    ppsd_period_limits = params.qc_ppsd_period_limits
    ppsd_db_bins = params.qc_ppsd_db_bins

    while is_next_job(db, jobtype='PSD'):
        logger.info("Getting the next job")
        jobs = get_next_job(db, jobtype='PSD', limit=njobs_per_worker)
        logger.debug("I will process %i jobs" % len(jobs))
        if len(jobs) == 0:
            # edge case, should only occur when is_next returns true, but
            # get_next receives no jobs (heavily parallelised code)
            continue
        for job in jobs:
            net, sta, loc = job.pair.split('.')
            print("Processing %s" % job.pair)
            gd = UTCDateTime(job.day).datetime
            files = get_data_availability(
                db,
                net=net,
                sta=sta,
                loc=loc,
                starttime=(UTCDateTime(job.day) - 1.5 * ppsd_length).datetime,
                endtime=gd)
            if len(files) == 0:
                print("No files found for %s" % job.day)
                continue

            for comp in ppsd_components:
                toprocess = []
                for file in files:
                    if file.chan[-1] != comp:
                        continue
                    tmp = os.path.join(file.path, file.file)
                    toprocess.append(tmp)
                if len(toprocess) == 0:
                    continue
                st = Stream()
                for tmp in np.unique(toprocess):
                    logger.debug("Reading %s" % tmp)
                    try:
                        st += read(
                            tmp,
                            starttime=UTCDateTime(gd) - 1.5 * ppsd_length,
                            endtime=UTCDateTime(gd +
                                                datetime.timedelta(days=1)) -
                            0.001)
                    except:
                        logger.debug("Problem loading %s" % tmp)
                if not len(st):
                    continue

                try:
                    st.merge()
                except:
                    logger.info("Failed merging streams:")
                    traceback.print_exc()
                    continue

                st = st.split()
                for tr in st:
                    tr.stats.network = tr.stats.network.upper()
                    tr.stats.station = tr.stats.station.upper()
                    tr.stats.channel = tr.stats.channel.upper()

                tr = st.select(component=comp)[0]
                out = to_sds(tr.stats, gd.year, int(gd.strftime('%j')))
                npzdout = os.path.join("PSD", "NPZ", out)
                logger.debug("ppsd will be output to: %s" % npzdout)
                ppsd = PPSD(tr.stats,
                            metadata=responses,
                            ppsd_length=ppsd_length,
                            overlap=ppsd_overlap,
                            period_smoothing_width_octaves=
                            ppsd_period_smoothing_width_octaves,
                            period_step_octaves=ppsd_period_step_octaves,
                            period_limits=ppsd_period_limits,
                            db_bins=ppsd_db_bins)
                # TODO handle when the response for this trace is not in the inv
                ppsd.add(st)
                out = to_sds(tr.stats, gd.year, int(gd.strftime('%j')))

                pngout = os.path.join("PSD", "PNG", out)
                if not os.path.isdir(os.path.split(npzdout)[0]):
                    os.makedirs(os.path.split(npzdout)[0])
                    os.makedirs(os.path.split(pngout)[0])

                ppsd.save_npz(npzdout + ".npz")
                update_job(db, job.day, job.pair, 'PSD', 'D', ref=job.ref)
                if not params.hpc:
                    for job in jobs:
                        update_job(db, job.day, job.pair, 'PSD2HDF', 'T')
                try:
                    ppsd.plot(pngout + ".png")
                except:
                    logger.debug("Error saving PNG image")
                    traceback.print_exc()

                del ppsd

        logger.debug('Day (job) "D"one')
Ejemplo n.º 21
0
                channel=chan,
                starttime=UTCDateTime('2004-001T00:00:00.0'),
                endtime=day + secperday,
                filename=respfilename(ch))
            resp = irisclient.evalresp(network,
                                       station,
                                       loc,
                                       chan,
                                       filename="%s%s.png" % (qcfigs, ch),
                                       output='plot')
        except:
            print("No response data for channel %s" % (ch))
    data = {}
    for ch in ids:
        print(respfilename(ch))
        stch = st.select(id=ch)  # Just take the data for a single channel
        calc_daily_stats(stch)
        try:
            ppsd = PPSD(stch[0].stats, metadata=str(respfilename(ch)))
            ppsd.add(stch)
            figname = "%s%d/%03d/%s.png" % (qcfigs, day.year, day.julday, ch)
            path_verify(figname)
            ppsd.plot(figname, cmap=pqlx)
            data = ppsd.get_percentile(percentile=50)
            fname = "%s%d/%03d/PPSDper50_%s.npz" % (qcdata, day.year,
                                                    day.julday, ch)
            path_verify(fname)
            np.savez(fname, data)
        except:
            print("Error with PPSD for %s check for response" % (ch))
from obspy import read
from obspy.signal import PPSD
from obspy.io.xseed import Parser


st = read("https://examples.obspy.org/BW.KW1..EHZ.D.2011.037")
parser = Parser("https://examples.obspy.org/dataless.seed.BW_KW1")
ppsd = PPSD(st[0].stats, metadata=parser)
ppsd.add(st)

st = read("https://examples.obspy.org/BW.KW1..EHZ.D.2011.038")
ppsd.add(st)

ppsd.plot(cumulative=True)
Ejemplo n.º 23
0
ppsdEHc.add(stEHc_sel)
(cEHpd, cEHpsd) = ppsdEHc.get_mode()


# channels = ['EHU', 'EHV', 'EHW']
# channels = ['SHU', 'MHV', 'MHW']
channels = ['EHU']

st = read(datafile)
inv = read_inventory(metadata)

for chn in channels:
    tr = st.select(channel=chn)[1] #first one may have metadata problem
    ppsd = PPSD(tr.stats, metadata=inv, ppsd_length=600.0, skip_on_gaps=True,
                period_limits=(0.02, 100.0), db_bins=(-200,-50, 1.))
    st_select = st.select(channel=chn)
    ppsd.add(st_select)
    plotfile = "{}_ppsd_panelA.png".format(chn)
    ppsd.plot(show=False, show_coverage=False, max_percentage=10,
              period_lim=[0.02, 100], cmap=pqlx)
    ax = plt.gca()
    fig = plt.gcf()
    ax.plot(cEHpd, cEHpsd, linewidth=2, color='darkgreen')
    ax.plot(cMHpd[1:], cMHpsd[1:], linewidth=2, color='darkgreen',
            label='Cruise PSD')
    ax.plot(0., 0., linewidth=2, color='darkgrey', label='Earth noise model')
    ax.legend(loc=1)
    plt.savefig(plotfile)


def plotpowermagnitudeSpectrum(tr):
    print('plotting magnitude spectrum....')
    ppsd = PPSD(tr.stats, metadata=' ')
    ppsd.add(tr)
    ppsd.plot()
    return
Ejemplo n.º 25
0
from obspy.signal import PPSD
import d2fcts_mod as d2f
import sys
reload(sys)

from loadmat2trace import loadmat2trace
#OPEN INPUT FILE AND READ PARAMETERS
InputFile = str(sys.argv[1]); del sys.argv[1]

#####we read the list of stations we want to process and calculate the ppsd for the given number of days
######Input1 station list
######Input 2 number of days
data_down_infos=csv.reader(CommentStripper(open(InputFile,"r")), skipinitialspace=True) # allows for blanks in input line
# ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
#Read data and select a trace with the desired station/channel combination:

### LOOP 1: input file line by line
for line in data_down_infos: # line is list of elements in input line
[torigin, event_lat, event_lon, depth, mag, path_data, dist_class, duration_class, toll_class, \
rotation, correction, Network, Station, Location, Channel, sub, dataformat] = d2f.read_input_file(line)
######search for the  data in the database
available, wrongfmt, rotateme = d2f.check_data_avail(tstart, Network, Stationr[inds], reqchan, laufzeit, correction, path_data, dataformat)
    ppsd = PPSD(tr.stats, metadata=parser)

    ppsd.add(st)
    print("number of psd segments:", len(ppsd.times))
    ppsd.plot()
    ppsd.plot("/tmp/ppsd.png")  
    ppsd.plot("/tmp/ppsd.pdf")