示例#1
0
def data_download(stations, starttime, endtime, event_name):

    print "\n========================================"
    print "event:", event_name
    print "time:", starttime, endtime
    waveforms_folder = "waveforms/" + event_name
    stationxml_folder = "stationxml/" + event_name
    c = Client("IRIS")

    if not os.path.exists(waveforms_folder):
        os.makedirs(waveforms_folder)

    if not os.path.exists(stationxml_folder):
        os.makedirs(stationxml_folder)

    for network, station in stations:
        ### First download waveforms.
        filename = os.path.join(waveforms_folder,
                                "%s.%s.mseed" % (network, station))
        if os.path.exists(filename):
            continue

        try:
            c.get_waveforms(network=network,
                            station=station,
                            location="*",
                            channel="BH?",
                            starttime=starttime,
                            endtime=endtime,
                            filename=filename)
        except Exception as e:
            print("Failed to download %s.%s due to %s" %
                  (network, station, str(e)))
            continue

        print("Successfully downloaded %s." % filename)
        ###

        ### Then download stationxml file
        stationxml_filename = os.path.join(stationxml_folder,
                                           "%s.%s.xml" % (network, station))

        if os.path.exists(stationxml_filename):
            continue

        try:
            c.get_stations(network=network,
                           station=station,
                           location="*",
                           channel="BH?",
                           starttime=starttime,
                           endtime=endtime,
                           filename=stationxml_filename,
                           level="response")
        except Exception as e:
            print("Failed to download %s.%s StationXML due to %s" %
                  (network, station, str(e)))
            continue

        print("Successfully downloaded %s." % stationxml_filename)
示例#2
0
def data_download(stations, starttime, endtime, event_name):

    print "\n========================================"
    print "event:", event_name
    print "time:", starttime, endtime
    waveforms_folder = "waveforms/" + event_name
    stationxml_folder = "stationxml/" + event_name
    c = Client("IRIS")

    if not os.path.exists(waveforms_folder):
        os.makedirs(waveforms_folder)

    if not os.path.exists(stationxml_folder):
        os.makedirs(stationxml_folder)

    for network, station in stations:
        ### First download waveforms.
        filename = os.path.join(waveforms_folder,
                            "%s.%s.mseed" % (network, station))
        if os.path.exists(filename):
            continue

        try:
            c.get_waveforms(network=network, station=station, location="*",
                            channel="BH?", starttime=starttime, endtime=endtime,
                            filename=filename)
        except Exception as e:
            print("Failed to download %s.%s due to %s" %
                (network, station, str(e)))
            continue

        print("Successfully downloaded %s." % filename)
        ###

        ### Then download stationxml file
        stationxml_filename = os.path.join(stationxml_folder,
                                       "%s.%s.xml" % (network, station))

        if os.path.exists(stationxml_filename):
            continue

        try:
            c.get_stations(network=network, station=station, location="*",
                            channel="BH?", starttime=starttime, endtime=endtime,
                            filename=stationxml_filename, level="response")
        except Exception as e:
            print("Failed to download %s.%s StationXML due to %s" % (
                network, station, str(e)))
            continue

        print("Successfully downloaded %s." % stationxml_filename)
示例#3
0
def getCatData(date, opt):

    """
    Download data from IRIS or Earthworm waveserver with padding and filter it. This is
    a specialized version getData() for catalog events, pulling a smaller amount of time
    around a known event.

    date: UTCDateTime of known catalog event
    opt: Options object describing station/run parameters
    
    Returns ObsPy stream object
    """    
    
    # Choose where data are downloaded automatically via options
    # Download data with padding to account for triggering algorithm
    # Make overlap symmetric
    
    if opt.server == "IRIS":
        client = Client("IRIS")
        st = client.get_waveforms(opt.network, opt.station, opt.location, opt.channel,
            date - opt.atrig, date + 3*opt.atrig)
    else:
        client = EWClient(opt.server, opt.port)
        st = client.getWaveform(opt.network, opt.station, opt.location, opt.channel,
            date - opt.atrig, date + 3*opt.atrig)

    st = st.detrend() # can create noise artifacts??
    st = st.merge(method=1, fill_value='interpolate')
    st = st.filter("highpass", freq=opt.fhigh, corners=2,
            zerophase=True)

    return st
示例#4
0
def getIRIS(date, opt, nsec=86400):

    """
    Download data from IRIS with padding and filter it.

    date: UTCDateTime of beginning of period of interest
    opt: Options object describing station/run parameters
    nsec: Number of seconds to download without padding
        (default 86400 s, or 1 day)
    
    Returns ObsPy stream object
    """    

    client = Client("IRIS")

    # Download data with padding to account for triggering algorithm
    st = client.get_waveforms(opt.network, opt.station, opt.location, opt.channel,
        date - opt.ptrig, date + nsec + opt.atrig)

    st = st.detrend() # can create noise artifacts??
    st = st.merge(method=1, fill_value='interpolate')
    st = st.filter("highpass", freq=opt.fhigh, corners=2,
            zerophase=True)

    return st
示例#5
0
def getIRIS(
    date, sta, chan, net, loc="--", nsec=86400, ptrig=10.0, atrig=20.0,
    fmin=1.0, fmax=10.0):

    """
    Download data from IRIS with padding and filter it.

    date: UTCDateTime of beginning of period of interest
    sta: String of station
    chan: String of channel
    net: String of network
    loc: String of location (default "--")
    nsec: Number of seconds to download without padding
        (default 86400 s, or 1 day)
    ptrig: Length of window to keep prior to trigger (default 10.0 s)
    atrig: Length of window to keep after trigger (default 20.0 s)
    fmin: Lower bound of bandpass filter (default 1.0 Hz)
    fmax: Upper bound of bandpass filter (default 10.0 Hz)

    Returns ObsPy stream object
    """    

    client = Client("IRIS")

    # Download data with padding to account for triggering algorithm
    st = client.get_waveforms(
        net, sta, loc, chan, date - ptrig, date + nsec + atrig)

    st = st.detrend()
    st = st.merge(method=1, fill_value=0)
    st = st.filter("bandpass", freqmin=fmin, freqmax=fmax,
                   corners=2, zerophase=True)

    return st
示例#6
0
def getdata(network,
            station,
            location,
            channel,
            t1,
            t2,
            attach_response=True,
            savedat=False,
            folderdat='data',
            filenamepref='Data_',
            clientname='IRIS',
            loadfromfile=False,
            reloadfile=False):
    """
    Get data from IRIS (or NCEDC) if it exists, save it
    USAGE
    st = getdata(network, station, location, channel, t1, t2, attach_response=True,
            savedat=False, folderdat='data', filenamepref='Data_', clientname='IRIS',
            loadfromfile=False)

    INPUTS
    network - seismic network codes, comma separated and no spaces Example: 'NF,IW,RE,TA,UU'
    station - station names, comma separated and no spaces Example: 'BFR,WOY,TCR,WTM'
    location - location codes, comma separated and no spaces Example: '01,00' or more commonly, just use '*' for all
    channel - channels to use. Example: 'BHZ,BHE,BHN,EHZ'
    t1 - UTCDateTime(starttime)
    t2 - UTCDateTime(endtime)
    attach_response - attach station response info?
    savedat - True or False, save data locally so it doesn't need to be redownloaded to look at it again
    folderdat - folder in which to save data, if you save it
    filenamepref - prefix for filename, if you are saving data
    clientname - source of data from FDSN webservices: 'IRIS','NCEDC', 'GEONET' etc. - see list here http://docs.obspy.org/archive/0.10.2/packages/obspy.fdsn.html
    loadfromfile - True or False - if a file from this time period is already on the computer, if you say True, it will automatically use that file without asking if you want to use it

    OUTPUTS
    st_ordered - ObsPy stream object that is in the same order as input station list
    """
    #create directory if need be
    if not os.path.exists(folderdat) and savedat is True:
        os.makedirs(folderdat)
    #create file name
    #filename = filenamepref+str(t1)+str(t2)
    filename = filenamepref + t1.strftime('%Y-%m-%dT%H%M') + '_' + t2.strftime(
        '%Y-%m-%dT%H%M')
    #see if it exists already
    if os.path.exists(folderdat + '/' + filename):
        if loadfromfile is True:
            choice = 'Y'
        else:
            if reloadfile is False:
                choice = raw_input(
                    'file already exists for this time period, enter Y to load from file, N to reload\n'
                )
            else:
                choice = 'N'
    else:
        choice = 'N'
    if choice.upper() == 'Y':
        st_ordered = read(folderdat + '/' + filename, format='PICKLE')
    else:
        try:
            client = FDSN_Client(clientname)
            st = client.get_waveforms(network,
                                      station,
                                      location,
                                      channel,
                                      t1,
                                      t2,
                                      attach_response=True)
            try:
                st.merge(fill_value='interpolate')
            except:
                print('bulk merge failed, trying station by station')
                st_new = Stream()
                stationlist = unique_list(
                    [trace.stats.station for trace in st])
                for sta in stationlist:
                    temp = st.select(station=sta)
                    try:
                        temp.merge(fill_value='interpolate')
                        st_new += temp
                    except Exception as e:
                        print(e)
                        print('%s would not merge - deleting it') % (sta, )
                st = st_new
            st.detrend('linear')
            #find min start time
            mint = min([trace.stats.starttime for trace in st])
            st.trim(starttime=mint, pad=True, fill_value=0)
        except Exception as e:
            print(e)
            return
        #make sure it's in the same order as it was originally input
        order = [trace.stats.station for trace in st]
        st_ordered = Stream()
        temp = station.split(',')
        for sta in temp:
            while sta in order:
                indx = order.index(sta)
                st_ordered.append(st[indx])
                st.pop(indx)
                try:
                    order = [trace.stats.station for trace in st]
                except:
                    order = ['', '']
        #save files
        if savedat:
            st_ordered.write(folderdat + '/' + filename, format="PICKLE")
    return st_ordered
from obspy.fdsn import Client as FDSN_Client
from obspy.iris import Client as OldIris_Client
from obspy.core import UTCDateTime
from obspy.core.util import NamedTemporaryFile
import matplotlib.pyplot as plt
import numpy as np

# MW 7.1 Darfield earthquake, New Zealand
t1 = UTCDateTime("2010-09-3T16:30:00.000")
t2 = UTCDateTime("2010-09-3T17:00:00.000")

# Fetch waveform from IRIS FDSN web service into a ObsPy stream object
fdsn_client = FDSN_Client("IRIS")
st = fdsn_client.get_waveforms('NZ', 'BFZ', '10', 'HHZ', t1, t2)

# Download and save instrument response file into a temporary file
with NamedTemporaryFile() as tf:
    respf = tf.name
    old_iris_client = OldIris_Client()
    # fetch RESP information from "old" IRIS web service, see obspy.fdsn
    # for accessing the new IRIS FDSN web services
    old_iris_client.resp('NZ', 'BFZ', '10', 'HHZ', t1, t2, filename=respf)

    # make a copy to keep our original data
    st_orig = st.copy()

    # define a filter band to prevent amplifying noise during the deconvolution
    pre_filt = (0.005, 0.006, 30.0, 35.0)

    # this can be the date of your raw data or any date for which the
    # SEED RESP-file is valid
# -*- coding: utf-8 -*-

from obspy.fdsn import Client
from obspy import UTCDateTime

rede, estacao, b_jday, e_jday = raw_input('Ex: BL AQDB 2015-001 2015-002:\n').split()

fdsn = Client(base_url="http://moho.iag.usp.br")
start = UTCDateTime("%s" %b_jday)
end = UTCDateTime("%s" %e_jday)
   
st = fdsn.get_waveforms("%s" %rede, "%s" %estacao, "", "HHZ", start, end)
st = fdsn.get_waveforms("%s" %rede, "%s" %estacao, "", "HHN", start, end)
st = fdsn.get_waveforms("%s" %rede, "%s" %estacao, "", "HHE", start, end)
st.write("%s.%s..HHZ.D.%s.%s" %(rede, estacao, b_jday[0:4], b_jday[5:] ), "MSEED")
st.write("%s.%s..HHN.D.%s.%s" %(rede, estacao, b_jday[0:4], b_jday[5:] ), "MSEED")
st.write("%s.%s..HHE.D.%s.%s" %(rede, estacao, b_jday[0:4], b_jday[5:] ), "MSEED")
示例#9
0
    def test_download_urls_for_custom_mapping(self, download_url_mock):
        """
        Tests the downloading of data with custom mappings.
        """
        base_url = "http://example.com"

        # More extensive mock setup simulation service discovery.
        def custom_side_effects(*args, **kwargs):
            if "version" in args[0]:
                return 200, "1.0.200"
            elif "event" in args[0]:
                with open(
                        os.path.join(self.datapath,
                                     "2014-01-07_iris_event.wadl")) as fh:
                    return 200, fh.read()
            elif "station" in args[0]:
                with open(
                        os.path.join(self.datapath,
                                     "2014-01-07_iris_station.wadl")) as fh:
                    return 200, fh.read()
            elif "dataselect" in args[0]:
                with open(
                        os.path.join(self.datapath,
                                     "2014-01-07_iris_dataselect.wadl")) as fh:
                    return 200, fh.read()
            return 404, None

        download_url_mock.side_effect = custom_side_effects

        # Some custom urls
        base_url_event = "http://other_url.com/beta/event_service/11"
        base_url_station = "http://some_url.com/beta2/station/7"
        base_url_ds = "http://new.com/beta3/dataselect/8"
        # An exception will be raised if not actual WADLs are returned.
        c = Client(base_url=base_url,
                   service_mappings={
                       "event": base_url_event,
                       "station": base_url_station,
                       "dataselect": base_url_ds,
                   })

        # Test the dataselect downloading.
        download_url_mock.reset_mock()
        download_url_mock.side_effect = None
        download_url_mock.return_value = 404, None
        try:
            c.get_waveforms("A", "B", "C", "D",
                            UTCDateTime() - 100, UTCDateTime())
        except:
            pass
        self.assertTrue(
            base_url_ds in download_url_mock.call_args_list[0][0][0])

        # Test the station downloading.
        download_url_mock.reset_mock()
        download_url_mock.side_effect = None
        download_url_mock.return_value = 404, None
        try:
            c.get_stations()
        except:
            pass
        self.assertTrue(
            base_url_station in download_url_mock.call_args_list[0][0][0])

        # Test the event downloading.
        download_url_mock.reset_mock()
        download_url_mock.side_effect = None
        download_url_mock.return_value = 404, None
        try:
            c.get_events()
        except:
            pass
        self.assertTrue(
            base_url_event in download_url_mock.call_args_list[0][0][0])
示例#10
0
def getdata(network, station, location, channel, t1, t2, attach_response=True,
            savedat=False, folderdat='data', filenamepref='Data_', clientname='IRIS',
            loadfromfile=False, reloadfile=False):
    """
    Get data from IRIS (or NCEDC) if it exists, save it
    USAGE
    st = getdata(network, station, location, channel, t1, t2, attach_response=True,
            savedat=False, folderdat='data', filenamepref='Data_', clientname='IRIS',
            loadfromfile=False)

    INPUTS
    network - seismic network codes, comma separated and no spaces Example: 'NF,IW,RE,TA,UU'
    station - station names, comma separated and no spaces Example: 'BFR,WOY,TCR,WTM'
    location - location codes, comma separated and no spaces Example: '01,00' or more commonly, just use '*' for all
    channel - channels to use. Example: 'BHZ,BHE,BHN,EHZ'
    t1 - UTCDateTime(starttime)
    t2 - UTCDateTime(endtime)
    attach_response - attach station response info?
    savedat - True or False, save data locally so it doesn't need to be redownloaded to look at it again
    folderdat - folder in which to save data, if you save it
    filenamepref - prefix for filename, if you are saving data
    clientname - source of data from FDSN webservices: 'IRIS','NCEDC', 'GEONET' etc. - see list here http://docs.obspy.org/archive/0.10.2/packages/obspy.fdsn.html
    loadfromfile - True or False - if a file from this time period is already on the computer, if you say True, it will automatically use that file without asking if you want to use it

    OUTPUTS
    st_ordered - ObsPy stream object that is in the same order as input station list
    """
    #create directory if need be
    if not os.path.exists(folderdat) and savedat is True:
        os.makedirs(folderdat)
    #create file name
    #filename = filenamepref+str(t1)+str(t2)
    filename = filenamepref+t1.strftime('%Y-%m-%dT%H%M')+'_'+t2.strftime('%Y-%m-%dT%H%M')
    #see if it exists already
    if os.path.exists(folderdat+'/'+filename):
        if loadfromfile is True:
            choice = 'Y'
        else:
            if reloadfile is False:
                choice = raw_input('file already exists for this time period, enter Y to load from file, N to reload\n')
            else:
                choice = 'N'
    else:
        choice = 'N'
    if choice.upper() == 'Y':
        st_ordered = read(folderdat+'/'+filename, format='PICKLE')
    else:
        try:
            client = FDSN_Client(clientname)
            st = client.get_waveforms(network, station, location, channel,
                                      t1, t2, attach_response=True)
            try:
                st.merge(fill_value='interpolate')
            except:
                print('bulk merge failed, trying station by station')
                st_new = Stream()
                stationlist = unique_list([trace.stats.station for trace in st])
                for sta in stationlist:
                    temp = st.select(station=sta)
                    try:
                        temp.merge(fill_value='interpolate')
                        st_new += temp
                    except Exception as e:
                        print(e)
                        print('%s would not merge - deleting it') % (sta,)
                st = st_new
            st.detrend('linear')
            #find min start time
            mint = min([trace.stats.starttime for trace in st])
            st.trim(starttime=mint, pad=True, fill_value=0)
        except Exception as e:
            print(e)
            return
        #make sure it's in the same order as it was originally input
        order = [trace.stats.station for trace in st]
        st_ordered = Stream()
        temp = station.split(',')
        for sta in temp:
            while sta in order:
                indx = order.index(sta)
                st_ordered.append(st[indx])
                st.pop(indx)
                try:
                    order = [trace.stats.station for trace in st]
                except:
                    order = ['', '']
        #save files
        if savedat:
            st_ordered.write(folderdat+'/'+filename, format="PICKLE")
    return st_ordered
示例#11
0
20140417: Moved PCA and clustering tests to different file, save triggers
"""

from obspy.fdsn import Client
from obspy import UTCDateTime
from obspy.signal.trigger import classicSTALTA, triggerOnset
import numpy as np

# Grab a day from IRIS (HSR is at Mount St. Helens)
client = Client("IRIS")
# This is a time period where I know lots of repeaters are happening
t = UTCDateTime("2004-11-24T00:00:00.000")
savename = 'trigdata4.npy'

print('Grabbing waveforms...')
st = client.get_waveforms("UW", "HSR", "--", "EHZ", t - 10, t + 86420)
print('Done!')

# Detrend and merge, fill gaps with zeros, bandpass
st = st.detrend()
st = st.merge(method=1, fill_value=0)
st = st.filter('bandpass', freqmin=1.0, freqmax=10.0, corners=2, zerophase=True)

# print("Close the window to continue...")
# Helicorder plot
# st.plot(type='dayplot')

# STA/LTA ttrigger
print('Triggering')
tr = st[0]
cft = classicSTALTA(tr.data, 80, 700) # 0.8 s short, 7 s long
示例#12
0
from obspy.fdsn import Client as FDSN_Client
from obspy.iris import Client as OldIris_Client
from obspy.core import UTCDateTime
from obspy.core.util import NamedTemporaryFile
import matplotlib.pyplot as plt

# MW 7.1 Darfield earthquake, New Zealand
t1 = UTCDateTime("2010-09-3T16:30:00.000")
t2 = UTCDateTime("2010-09-3T17:00:00.000")

# Fetch waveform from IRIS FDSN web service into a ObsPy stream object
fdsn_client = FDSN_Client("IRIS")
st = fdsn_client.get_waveforms('NZ', 'BFZ', '10', 'HHZ', t1, t2)

# Download and save instrument response file into a temporary file
with NamedTemporaryFile() as tf:
    respf = tf.name
    old_iris_client = OldIris_Client()
    # fetch RESP information from "old" IRIS web service, see obspy.fdsn
    # for accessing the new IRIS FDSN web services
    old_iris_client.resp('NZ', 'BFZ', '10', 'HHZ', t1, t2, filename=respf)

    # make a copy to keep our original data
    st_orig = st.copy()

    # define a filter band to prevent amplifying noise during the deconvolution
    pre_filt = (0.005, 0.006, 30.0, 35.0)

    # this can be the date of your raw data or any date for which the
    # SEED RESP-file is valid
    date = t1
示例#13
0
minlon = 143.5 #minlongitude 
maxlon = 147.0 #maxlongitude 

cat = client.get_events(starttime=date_start, 
                        endtime=date_end, 
                        minlatitude=minlat,
                        maxlatitude=maxlat,
                        minlongitude=minlon,
                        maxlongitude=maxlon,
                        minmagnitude=min_mag)

#print(cat)

cat.plot()
print(cat.__str__(print_all=True))

net = 'AU' 

stat = 'TOO'

date_start = UTCDateTime("2003-10-18T10:29:26.580000Z")

date_end = date_start + 3600

st = client.get_waveforms(net, stat, "00", "*Z", 
                          date_start, date_end,
                          attach_response=True)

st.plot()
#st.write('Gippsland_low.MSEED', format='MSEED') 
示例#14
0
                        endtime=date_end,
                        minlatitude=minlat,
                        maxlatitude=maxlat,
                        minlongitude=minlon,
                        maxlongitude=maxlon,
                        minmagnitude=min_mag)

#print(cat)

cat.plot()
print(cat.__str__(print_all=True))

net = 'AU'

stat = 'TOO'

date_start = UTCDateTime("2003-10-18T10:29:26.580000Z")

date_end = date_start + 3600

st = client.get_waveforms(net,
                          stat,
                          "00",
                          "*Z",
                          date_start,
                          date_end,
                          attach_response=True)

st.plot()
#st.write('Gippsland_low.MSEED', format='MSEED')
示例#15
0
    def test_download_urls_for_custom_mapping(self, download_url_mock):
        """
        Tests the downloading of data with custom mappings.
        """
        base_url = "http://example.com"

        # More extensive mock setup simulation service discovery.
        def custom_side_effects(*args, **kwargs):
            if "version" in args[0]:
                return 200, "1.0.200"
            elif "event" in args[0]:
                with open(os.path.join(
                        self.datapath, "2014-01-07_iris_event.wadl"),
                        "rb") as fh:
                    return 200, fh.read()
            elif "station" in args[0]:
                with open(os.path.join(
                        self.datapath,
                        "2014-01-07_iris_station.wadl"), "rb") as fh:
                    return 200, fh.read()
            elif "dataselect" in args[0]:
                with open(os.path.join(
                        self.datapath,
                        "2014-01-07_iris_dataselect.wadl"), "rb") as fh:
                    return 200, fh.read()
            return 404, None

        download_url_mock.side_effect = custom_side_effects

        # Some custom urls
        base_url_event = "http://other_url.com/beta/event_service/11"
        base_url_station = "http://some_url.com/beta2/station/7"
        base_url_ds = "http://new.com/beta3/dataselect/8"

        # An exception will be raised if not actual WADLs are returned.
        # Catch warnings to avoid them being raised for the tests.
        with warnings.catch_warnings(record=True) as w:
            warnings.simplefilter("always")
            c = Client(base_url=base_url, service_mappings={
                "event": base_url_event,
                "station": base_url_station,
                "dataselect": base_url_ds,
            })
        for warning in w:
            self.assertTrue("Could not parse" in str(warning) or
                            "cannot deal with" in str(warning))

        # Test the dataselect downloading.
        download_url_mock.reset_mock()
        download_url_mock.side_effect = None
        download_url_mock.return_value = 404, None
        try:
            c.get_waveforms("A", "B", "C", "D", UTCDateTime() - 100,
                            UTCDateTime())
        except:
            pass
        self.assertTrue(
            base_url_ds in download_url_mock.call_args_list[0][0][0])

        # Test the station downloading.
        download_url_mock.reset_mock()
        download_url_mock.side_effect = None
        download_url_mock.return_value = 404, None
        try:
            c.get_stations()
        except:
            pass
        self.assertTrue(
            base_url_station in download_url_mock.call_args_list[0][0][0])

        # Test the event downloading.
        download_url_mock.reset_mock()
        download_url_mock.side_effect = None
        download_url_mock.return_value = 404, None
        try:
            c.get_events()
        except:
            pass
        self.assertTrue(
            base_url_event in download_url_mock.call_args_list[0][0][0])
示例#16
0
from obspy import UTCDateTime
from obspy.fdsn import Client

# connect to an FDSN webservice
client = Client("http://erde.geophysik.uni-muenchen.de:8080")

# use origin time of devastating Japan earthquake
start = UTCDateTime("2011-03-11 05:46:23") + 10 * 60
end = start + 70 * 60

# download waveform and station metadata of station FUR
stream = client.get_waveforms(network="GR",
                              station="FUR",
                              location="",
                              channel="BH*",
                              starttime=start,
                              endtime=end,
                              attach_response=True)

# do basic signal processing and plot the data! ---->
stream.remove_response()
stream.filter("bandpass", freqmin=0.01, freqmax=1)
stream.plot()
示例#17
0
                    source_depth_in_km=event_depth / 1000.0,
                    distance_in_degree=distance_in_degree,
                    phase_list=["P","Pms"])  ## unexcept in taup maybe the program is wrong.   

                if arrivals == []:
                    print "No P!!"
                    break

                # phase_list=["P", "Pms"])
                P_time = event_time + arrivals[0].time
                Pms_time = event_time + arrivals[0].time
                waveform_begin = P_time - 1 * 60
                waveform_end = P_time + 2 * 60
                try:
                    waveform_BHN = client.get_waveforms(
                        network, station_code, "*", "BHN",
                        waveform_begin, waveform_end)[0]
                    # , attach_response=True)
                    waveform_BHE = client.get_waveforms(
                        network, station_code, "*", "BHE",
                        waveform_begin, waveform_end)[0]
                    # , attach_response=True)
                    waveform_BHZ = client.get_waveforms(
                        network, station_code, "*", "BHZ",
                        waveform_begin, waveform_end)[0]
                    # , attach_response=True)
                    # waveform_BHN.stats.sac = AttribDict()
                    # waveform_BHE.stats.sac = AttribDict()
                    # waveform_BHZ.stats.sac = AttribDict()
                    # waveform_BHN.stats.sac['stla'] = station_latitude
                    # waveform_BHN.stats.sac['stlo'] = station_longitude