コード例 #1
0
def generateStationTestData(sta):

    time_range = (UTCDateTime(TIME_RANGE[0]), UTCDateTime(TIME_RANGE[1]))

    client = Client("IRIS")
    inv = client.get_stations(network=NETWORK,
                              station=sta,
                              channel=CHANNEL,
                              starttime=time_range[0],
                              endtime=time_range[1],
                              level='channel')
    print(inv)

    traces = client.get_waveforms(network=NETWORK,
                                  station=sta,
                                  channel=CHANNEL,
                                  location='*',
                                  starttime=time_range[0],
                                  endtime=time_range[1])
    print(traces)

    outfile = 'test_data_' + sta + '.h5'
    asdf_out = pyasdf.ASDFDataSet(outfile, mode='w')
    asdf_out.add_stationxml(inv)
    asdf_out.add_waveforms(traces, TAG)

    print("Saved data to " + outfile)
コード例 #2
0
ファイル: ClientUtils.py プロジェクト: MarcusHaynes/hiperseis
    def queryByBBoxInterval(self, outputFileName, bbox, timeinterval, chan='*Z', bbpadding=2,
                            event_id=None, verbose=False):
        """ Time interval is a tuple (starttime,endtime)
        """
        assert len(timeinterval) == 2, "timeinterval must be a tuple of ascending timestamps. len=" + str(
            len(timeinterval)) + " " + str(timeinterval)

        query_ds = pyasdf.ASDFDataSet(outputFileName)

        client = Client(self._client)
        ref_inv = client.get_stations(network=self._network,
                                      starttime=UTCDateTime(timeinterval[0]),
                                      endtime=UTCDateTime(timeinterval[1]),
                                      minlongitude=bbox[0] - bbpadding,
                                      maxlongitude=bbox[1] + bbpadding,
                                      minlatitude=bbox[2] - bbpadding,
                                      maxlatitude=bbox[3] + bbpadding,
                                      level='channel')

        if verbose:
            print(ref_inv)

        ref_st = Stream()

        # go through inventory and request timeseries data
        for net in ref_inv:
            for stn in net:
                stime = UTCDateTime(timeinterval[0])
                etime = UTCDateTime(timeinterval[1])
                step = 3600*24*10
                while stime + step < etime:
                    try:
                        ref_st = client.get_waveforms(network=net.code, station=stn.code,
                                                      channel=chan, location='*',
                                                      starttime=stime,
                                                      endtime=stime+step)
                        print ref_st
                        self.ref_stations.append(net.code + '.' + stn.code)
                        st_inv = ref_inv.select(station=stn.code, channel=chan)
                        
                        query_ds.add_stationxml(st_inv)
                        for tr in ref_st:
                            query_ds.add_waveforms(tr, "reference_station")
                    except FDSNException:
                        print('Data not available from Reference Station: ' + stn.code)
                    # end try
                    stime += step
                #wend
        # end for

        #tr.write(os.path.join(os.path.dirname(outputFileName), tr.id + ".MSEED"),
        #         format="MSEED") # Don't write miniseed
        if verbose:
            print("Wrote Reference Waveforms to ASDF file: " + outputFileName)
            print('\nWaveform data query completed.')

        metaOutputFileName = os.path.join(os.path.dirname(outputFileName),
                                          'meta.%s.xml'%(os.path.basename(outputFileName)))
        ref_inv.write(metaOutputFileName, format="STATIONXML")
        del query_ds
コード例 #3
0
def get_arclink_event_data(bulk, fname, dataless, event):
    from obspy.core.utcdatetime import UTCDateTime
    try:
        from obspy.arclink.client import Client
    except:
        from obspy.clients.arclink.client import Client
        #from obspy.clients.fdsn.client import Client
    from os import path
    from numpy import nan, isnan
    from mapping_tools import distance
    '''
    Code to extract IRIS data, one station at a time.  Exports mseed file to 
    working directory
    
    datetime tuple fmt = (Y,m,d,H,M)
    sta = station
    '''
    try:
        #first, check it site is in distance and azimuthal range
        for channel in ['SHZ', 'EHZ', 'BHZ', 'HHZ', 'BNZ', 'HNZ']:
            seedid = '.'.join((b[0], b[1], '00', channel))  #'AU.DPH.00.BNZ'

            try:
                staloc = dataless.get_coordinates(seedid, b[4])
            except:
                a = 1  # dummy call
            # try another seed id fmt
            seedid = '.'.join((b[0], b[1], '', channel))  #'AU.DPH.00.BNZ'
            try:
                staloc = dataless.get_coordinates(seedid, b[4])
            except:
                a = 1  # dummy call

        # now get distance and azimuth
        rngkm, az, baz = distance(event['lat'], event['lon'],
                                  staloc['latitude'], staloc['longitude'])
        #print(rngkm, az, baz)
        print('arclink', seedid)
        getRecord = False
        if rngkm <= 2000. and az > 110. and az < 250.:
            getRecord = True
        elif rngkm <= 50.:
            getRecord = True

        # check if file already exists
        if not path.isfile(fname) and getRecord == True:
            print('Getting:', fname)
            client = Client(user='******')
            st = client.get_waveforms(bulk[0], bulk[1], bulk[2], bulk[3],
                                      bulk[4], bulk[5])
            st = st.merge(method=0, fill_value='interpolate')

        print('Writing file:', fname)
        st.write(fname, format="MSEED")
    except:
        print('No data for:', fname)

    return st
コード例 #4
0
                #compute back-azimuth
                stalong=float(pick[4].strip())
                stalat=float(pick[5].strip())

                baz=distaz(evlat,evlong,stalat,stalong)[2]

                #get appropriate N/E waveforms to rotate

                #TODO convert this try/except block to use the IRIS client
                try:
                    #search for N/1 channel with high-frequency response
                    inv=irisclient.get_stations(starttime=starttime,endtime=endtime,station=st,level="channel",channel="BHN,BH1,HHN,HH1,SHN,SH1")
                    #find appropriate channels in the inventory (ASDF returns list, not inventory type)
                    chan=inv.get_contents()['channels'][0].split('.')

                    stream=irisclient.get_waveforms(chan[0],chan[1],chan[2],chan[3],starttime,endtime)
                    wf1=stream[0]
                    if wf1.stats['channel'][-1] == '1':
                        wf2=irisclient.get_waveforms(chan[0],chan[1],chan[2],chan[3][0:-1]+'2',starttime,endtime)[0]
                        wfz=irisclient.get_waveforms(chan[0],chan[1],chan[2],chan[3][0:-1]+'Z',starttime,endtime)[0]

                        stream=Stream(traces=[wfz,wf1,wf2])
                        #get an inventory including the Z and E/2 channels
                        IRISinv=irisclient.get_stations(starttime=starttime,endtime=endtime,station=st,level="channel")
                        stream=stream.rotate(method="->ZNE",inventory=IRISinv)
                        
                    else:
                        wf2=irisclient.get_waveforms(chan[0],chan[1],chan[2],chan[3][0:-1]+'E',starttime,endtime)[0]
                        stream=Stream(traces=[wf1,wf2])
                    stream=stream.rotate(method="NE->RT",back_azimuth=baz)
                    wf=None
コード例 #5
0
def get_iris_event_data(bulk, folder, timestr, dataless, event):
    from obspy import UTCDateTime
    from obspy.clients.fdsn.client import Client
    #from obspy.fdsn import Client
    from os import path
    from numpy import nan, isnan
    from mapping_tools import distance
    '''
    Code to extract IRIS data, one station at a time.  Exports mseed file to 
    working directory
    
    datetime tuple fmt = (Y,m,d,H,M)
    sta = station
    '''

    fdsn_client = Client("IRIS")
    #client = Client("IRIS")
    sta = []
    #st = client.get_waveforms_bulk(bulk)
    for b in bulk:
        try:
            fname = '.'.join((timestr, b[0], b[1], 'mseed'))
            fpath = path.join(folder, fname.replace(':', '.'))

            staloc = nan
            #first, check it site is in distance and azimuthal range
            for channel in ['SHZ', 'EHZ', 'BHZ', 'HHZ', 'BNZ', 'HNZ']:
                if b[0] == 'WRAB':
                    locCode = '10'
                else:
                    locCode = '00'
                seedid = '.'.join(
                    (b[0], b[1], locCode, channel))  # e.g., 'AU.DPH.00.BNZ'
                try:
                    staloc = dataless.get_coordinates(seedid, b[4])
                except:
                    a = 1  # dummy call
                seedid = '.'.join(
                    (b[0], b[1], '', channel))  # e.g., 'AU.DPH..BNZ'
                try:
                    staloc = dataless.get_coordinates(seedid, b[4])
                except:
                    a = 1  # dummy call

            # now get distance and azimuth
            rngkm, az, baz = distance(event['lat'], event['lon'],
                                      staloc['latitude'], staloc['longitude'])
            print(rngkm, az, baz)

            getRecord = False
            if rngkm <= 2000. and az > 130. and az < 230.:
                getRecord = True
            elif rngkm <= 2000. and az > 120. and az < 240. and b[1] == 'RABL':
                getRecord = True
            elif rngkm <= 2000. and az > 120. and az < 240. and b[1] == 'PMG':
                getRecord = True

            # second, check if file exists
            #print(path.isfile(fpath), getRecord)
            if not path.isfile(fpath) and getRecord == True:
                bulk2 = [(b[0], b[1], b[2], "*", b[4], b[5])]  #,
                print('B2', bulk2)
                #                         ("AU", "AFI", "1?", "BHE",  b[4], b[5])]
                client = Client("IRIS")
                #st = client.get_waveforms_bulk(bulk2)
                st = client.get_waveforms(b[0], b[1], b[2], "*", b[4], b[5])
                '''
                 st = fdsn_client.get_waveforms(network=b[0], station=b[1], location=b[2],
                                                channel=b[3], starttime=b[4], endtime=b[5],
                                                attach_response=True)
                 '''
                #print(st[0].stats.location)
                st = st.merge(method=0, fill_value='interpolate')
                sta += st

                print('Writing file: ' + fpath)
                st.write(fpath, format="MSEED")
            else:
                print('File exists:', fpath)
            #return st
        except:
            print('No data for', b[0], b[1])

    return sta
コード例 #6
0
print(ref_inv)

temp_inv = client.get_stations(network="TA",
                               station=temp_sta,
                               channel="BHZ",
                               starttime=starttime,
                               endtime=endtime,
                               level='channel')

print(temp_inv)

# get waveforms
temp_st = client.get_waveforms(network="TA",
                               station=temp_sta,
                               channel="BHZ",
                               location="*",
                               starttime=starttime,
                               endtime=endtime)

ref_st = client.get_waveforms(network="TA",
                              station=perm_sta,
                              channel="BHZ",
                              location="*",
                              starttime=starttime,
                              endtime=endtime)

print(temp_st)

ref_st[0].stats.network = "XX"
ref_inv[0].code = "XX"
コード例 #7
0
loc = '00'
net = 'N4'
chan = 'HHZ'

stime = UTCDateTime('2019-08-16 12:59:10')
etime = stime + 120

client = Client()
inv = client.get_stations(network=net,
                          station=sta,
                          starttime=stime,
                          endtime=etime,
                          channel=chan,
                          level="response")
st = Stream()
st += client.get_waveforms(net, sta, loc, chan, stime, etime)

st.detrend('constant')
st.merge(fill_value=0)
st.attach_response(inv)
st.remove_response(output="DISP")
#st.rotate(method="->ZNE",inventory=inv)
st.filter("bandpass", freqmin=.5, freqmax=5)
tr = st[0]
t = np.linspace(0, (tr.stats.npts - 1) / tr.stats.sampling_rate,
                num=tr.stats.npts)

fig = plt.figure(1, figsize=(12, 12))
plt.ylabel('Displacement (mm)', fontsize=14)
plt.xlim([0, 120])
plt.ylim([-.02, .02])
コード例 #8
0
ファイル: rfbase.py プロジェクト: marscfeng/surfpy
 def download_body_waveforms(self, outdir, fskip=False, client_name='IRIS', minDelta=30, maxDelta=150, channel_rank=['BH', 'HH'],\
         phase='P', startoffset=-30., endoffset=60.0, verbose=False, rotation=True, startdate=None, enddate=None):
     """Download body wave data from IRIS server
     ====================================================================================================================
     ::: input parameters :::
     outdir          - output directory
     fskip           - flag for downloa/overwrite
                         False   - overwrite
                         True    - skip upon existence
     min/maxDelta    - minimum/maximum epicentral distance, in degree
     channel_rank    - rank of channel types
     phase           - body wave phase to be downloaded, arrival time will be computed using taup
     start/endoffset - start and end offset for downloaded data
     rotation        - rotate the seismogram to RT or not
     =====================================================================================================================
     """
     if not os.path.isdir(outdir):
         os.makedirs(outdir)
     client          = Client(client_name)
     ievent          = 0
     Ntrace          = 0
     try:
         stime4down  = obspy.core.utcdatetime.UTCDateTime(startdate)
     except:
         stime4down  = obspy.UTCDateTime(0)
     try:
         etime4down  = obspy.core.utcdatetime.UTCDateTime(enddate)
     except:
         etime4down  = obspy.UTCDateTime()
     print('[%s] [DOWNLOAD BODY WAVE] Start downloading body wave data' %datetime.now().isoformat().split('.')[0])
     try:
         print (self.cat)
     except AttributeError:
         self.copy_catalog()
     for event in self.cat:
         event_id        = event.resource_id.id.split('=')[-1]
         pmag            = event.preferred_magnitude()
         magnitude       = pmag.mag
         Mtype           = pmag.magnitude_type
         event_descrip   = event.event_descriptions[0].text+', '+event.event_descriptions[0].type
         porigin         = event.preferred_origin()
         otime           = porigin.time
         if otime < stime4down or otime > etime4down:
             continue
         ievent          += 1
         try:
             print('[%s] [DOWNLOAD BODY WAVE] ' %datetime.now().isoformat().split('.')[0] + \
                         'Event ' + str(ievent)+': '+ str(otime)+' '+ event_descrip+', '+Mtype+' = '+str(magnitude))
         except:
             print('[%s] [DOWNLOAD BODY WAVE] ' %datetime.now().isoformat().split('.')[0] + \
                 'Event ' + str(ievent)+': '+ str(otime)+' '+ event_descrip+', M = '+str(magnitude))
         evlo            = porigin.longitude
         evla            = porigin.latitude
         try:
             evdp        = porigin.depth/1000.
         except:
             continue
         evstr           = '%s' %otime.isoformat()
         outfname        = outdir + '/' + evstr+'.mseed'
         logfname        = outdir + '/' + evstr+'.log'
         # check file existence
         if os.path.isfile(outfname):
             if fskip:
                 if os.path.isfile(logfname):
                     os.remove(logfname)
                     os.remove(outfname)
                 else:
                     continue
             else:
                 os.remove(outfname)
                 if os.path.isfile(logfname):
                     os.remove(logfname)
         elif os.path.isfile(logfname):
             try:
                 with open(logfname, 'r') as fid:
                     logflag     = fid.readline().split()[0][:4]
                 if logflag == 'DONE' and fskip:
                     continue
             except:
                 pass 
         # initialize log file
         with open(logfname, 'w') as fid:
             fid.writelines('DOWNLOADING\n')
         out_stream      = obspy.Stream()
         itrace          = 0
         for staid in self.waveforms.list():
             netcode, stacode    = staid.split('.')
             with warnings.catch_warnings():
                 warnings.simplefilter("ignore")
                 tmppos          = self.waveforms[staid].coordinates
             stla                = tmppos['latitude']
             stlo                = tmppos['longitude']
             elev                = tmppos['elevation_in_m']
             elev                = elev/1000.
             az, baz, dist       = geodist.inv(evlo, evla, stlo, stla)
             dist                = dist/1000.
             if baz<0.:
                 baz             += 360.
             Delta               = obspy.geodetics.kilometer2degrees(dist)
             if Delta<minDelta:
                 continue
             if Delta>maxDelta:
                 continue
             arrivals            = taupmodel.get_travel_times(source_depth_in_km=evdp, distance_in_degree=Delta, phase_list=[phase])#, receiver_depth_in_km=0)
             try:
                 arr             = arrivals[0]
                 arrival_time    = arr.time
                 rayparam        = arr.ray_param_sec_degree
             except IndexError:
                 continue
             starttime           = otime + arrival_time + startoffset
             endtime             = otime + arrival_time + endoffset
             with warnings.catch_warnings():
                 warnings.simplefilter("ignore")
                 location        = self.waveforms[staid].StationXML[0].stations[0].channels[0].location_code
             # determine type of channel
             channel_type        = None
             for tmpch_type in channel_rank:
                 channel         = '%sE,%sN,%sZ' %(tmpch_type, tmpch_type, tmpch_type)
                 try:
                     st          = client.get_waveforms(network=netcode, station=stacode, location=location, channel=channel,
                                         starttime=starttime, endtime=endtime, attach_response=True)
                     if len(st) >= 3:
                         channel_type= tmpch_type
                         break
                 except:
                     pass
             if channel_type is None:
                 if verbose:
                     print ('--- No data for:', staid)
                 continue
             pre_filt            = (0.04, 0.05, 20., 25.)
             st.detrend()
             try:
                 st.remove_response(pre_filt=pre_filt, taper_fraction=0.1)
             except ValueError:
                 print ('!!! ERROR with response removal for:', staid)
                 continue 
             if rotation:
                 try:
                     st.rotate('NE->RT', back_azimuth=baz)
                 except:
                     continue
             if verbose:
                 print ('--- Getting data for:', staid)
             # append stream
             out_stream  += st
             itrace      += 1
             Ntrace      += 1
         # save data to miniseed
         if itrace != 0:
             out_stream.write(outfname, format = 'mseed', encoding = 'FLOAT64')
             os.remove(logfname) # delete log file
         else:
             with open(logfname, 'w') as fid:
                 fid.writelines('DONE\n')
         print('[%s] [DOWNLOAD BODY WAVE] ' %datetime.now().isoformat().split('.')[0]+\
               'Event ' + str(ievent)+': dowloaded %d traces' %itrace)
     print('[%s] [DOWNLOAD BODY WAVE] All done' %datetime.now().isoformat().split('.')[0] + ' %d events, %d traces' %(ievent, Ntrace))
     return
コード例 #9
0
def fdsnws2geomag():
    '''Convert fdsnws query to geomagnetic data file'''
    parser = argparse.ArgumentParser(
        description=
        'Query the FDSN webservice and convert the geomagnetic data standards')
    parser.add_argument('--url',
                        default=DEFAULT_FDNWS,
                        help='FDSN-WS URL (default: %s)' % DEFAULT_FDNWS)
    parser.add_argument('--format',
                        choices=['internet', 'iaga2002', 'imfv122'],
                        default='iaga2002',
                        help="Output format (default: iaga2002)")
    parser.add_argument('--output',
                        default=sys.stdout,
                        help='Output file (default: stdout).')
    # query specific parameters
    parser.add_argument('--date',
                        default=DEFAULT_DATE,
                        help='Date of the request (default: %s)' %
                        DEFAULT_DATE)
    parser.add_argument('--network',
                        default=DEFAULT_NETWORK,
                        help='Network code (default: DEFAULT_NETWORK)')
    parser.add_argument('--station', required=True, help='Station code')
    parser.add_argument(
        '--location',
        nargs='+',
        default=DEFAULT_LOCATIONS,
        help=
        'Data type + source (data type = R - raw, D - definitive, source = 0,1,2,3..., default: %s)'
        % DEFAULT_LOCATIONS)
    parser.add_argument('--channel',
                        nargs='+',
                        default=DEFAULT_CHANNELS,
                        help='FDSN compliant channel query (default: %s)' %
                        ",".join(DEFAULT_CHANNELS))
    parser.add_argument('-v',
                        '--verbose',
                        action='store_true',
                        help='Verbosity')
    args = parser.parse_args()

    # Set the logging level
    logging.basicConfig(
        format='%(asctime)s.%(msecs)03d %(levelname)s \
            %(module)s %(funcName)s: %(message)s',
        datefmt="%Y-%m-%d %H:%M:%S",
        level=logging.INFO if args.verbose else logging.WARNING)

    # Convert date to starttime and endtime
    reftime = UTCDateTime(args.date)
    starttime = UTCDateTime(
        reftime.datetime.replace(hour=0, minute=0, second=0, microsecond=0))
    endtime = UTCDateTime(
        reftime.datetime.replace(hour=23,
                                 minute=59,
                                 second=59,
                                 microsecond=999999))

    # Create a handler client
    logging.info("Connecting to %s", args.url)
    client = Client(args.url)
    logging.info("Requesting data for %s.%s.%s.%s from %s to %s", args.network,
                 args.station, ",".join(args.location), ",".join(args.channel),
                 starttime.isoformat(), endtime.isoformat())
    stream = Stream(
        client.get_waveforms(args.network, args.station,
                             ",".join(args.location), ",".join(args.channel),
                             starttime, endtime))
    logging.info("Found stream: %s", str(stream.__str__(extended=True)))
    # Load optional inventory information
    inventory = client.get_stations(network=args.network, station=args.station)

    # Handle if no data was found
    if not stream:
        logging.warning("No data found")
        return 1

    # Before sending the raw data for writing, we need to trim the response
    # from the FDSNWS query to are actual request time.  We also merge by
    # location.
    logging.info("Writing informtion to %s", str(args.output))
    # Correct the endtime with delta of the first trace
    endtime = UTCDateTime(reftime.datetime) + 86400 - stream[0].stats.delta
    stream.merge_by_location().trim(starttime,
                                    endtime).write(args.output,
                                                   format=args.format,
                                                   inventory=inventory)
コード例 #10
0
def fdsnws2directory():
    '''
    Much like the fdsnws2geomag but is purely design to get the data from the FDSN-WS
    and add it according to the structure found on geomagnetic daqs servers.

    These structure vary depending on the source but can be customized by input argument.

    Filename for each can not be customized since these following strict naming convention.

    The convention can be found in the pygeomag/data/formats directory.
    '''
    parser = argparse.ArgumentParser(
        description=
        'Query the FDSN webservice and convert the geomagnetic data standards')
    parser.add_argument('--url',
                        default=DEFAULT_FDNWS,
                        help='FDSN-WS URL (default: %s)' % DEFAULT_FDNWS)
    parser.add_argument('--format',
                        choices=['iaga2002', 'imfv122'],
                        default='iaga2002',
                        help="Output format (default: iaga2002)")
    parser.add_argument(
        '--directory',
        default=DEFAULT_DIRECTORY,
        help=
        'Output directory with optional datetime parameter as accept by python datetime (default: %s).'
        % DEFAULT_DIRECTORY)
    # query specific parameters
    parser.add_argument('--date',
                        default=DEFAULT_DATE,
                        help='Date of the request (default: %s)' %
                        DEFAULT_DATE)
    parser.add_argument('--network',
                        default=DEFAULT_NETWORK,
                        help='Network code (default: DEFAULT_NETWORK)')
    parser.add_argument('--station',
                        default='*',
                        help='Station code (default: *)')
    parser.add_argument(
        '--location',
        nargs='+',
        default=DEFAULT_LOCATIONS,
        help=
        'Data type + source (data type = R - raw, D - definitive, source = 0,1,2,3..., default: %s)'
        % DEFAULT_LOCATIONS)
    parser.add_argument('--channel',
                        nargs='+',
                        default=DEFAULT_CHANNELS,
                        help='FDSN compliant channel query (default: %s)' %
                        "," % DEFAULT_CHANNELS)
    parser.add_argument('-v',
                        '--verbose',
                        action='store_true',
                        help='Verbosity')
    args = parser.parse_args()

    # Set the logging level
    logging.basicConfig(
        format='%(asctime)s.%(msecs)03d %(levelname)s \
            %(module)s %(funcName)s: %(message)s',
        datefmt="%Y-%m-%d %H:%M:%S",
        level=logging.INFO if args.verbose else logging.WARNING)

    # Convert date to starttime and endtime
    reftime = UTCDateTime(args.date)
    starttime = UTCDateTime(
        reftime.datetime.replace(hour=0, minute=0, second=0, microsecond=0))
    endtime = UTCDateTime(
        reftime.datetime.replace(hour=23,
                                 minute=59,
                                 second=59,
                                 microsecond=999999))

    # Create a handler client
    logging.info("Connecting to %s", args.url)
    client = Client(args.url)
    logging.info("Requesting data for %s.%s.%s.%s from %s to %s", args.network,
                 args.station, ",".join(args.location), ",".join(args.channel),
                 starttime.isoformat(), endtime.isoformat())
    stream = Stream(
        client.get_waveforms(args.network, args.station,
                             ",".join(args.location), ",".join(args.channel),
                             starttime, endtime))
    logging.info("Found stream: %s", str(stream.__str__(extended=True)))
    # Load optional inventory information
    inventory = client.get_stations(network=args.network, station=args.station)

    # Handle if no data was found
    if not stream:
        logging.warning("No data found")
        return 1

    # Before sending the raw data for writing, we need to trim the response
    # from the FDSNWS query to are actual request time.  We also merge by
    # location.
    # Correct the endtime with delta of the first trace
    endtime = UTCDateTime(reftime.datetime) + 86400 - stream[0].stats.delta
    stream = stream.merge_by_location().trim(starttime, endtime)

    # Loop through the list of stream and generate the unique list of station
    # codes.  We know the network code is constant and its a single sampling rate
    # request.
    stations = set([trace.stats.station for trace in stream])

    # Convert the directory format string to a full path
    directory = starttime.strftime(args.directory)
    logging.info("Creating directory %s if does not exist", directory)
    pathlib.Path(directory).mkdir(parents=True, exist_ok=True)

    for station in stations:
        # Extract the station I need
        extract = stream.select(station=station)
        # Generate its filename (depends on the format)
        if args.format in ['iaga2002']:
            filename = pygeomag.data.formats.iaga2002.get_filename(
                extract[0].stats)
        elif args.format in ['imfv122']:
            filename = pygeomag.data.formats.imfv122.get_filename(
                extract[0].stats)
        else:
            raise ValueError(
                "Unable to generate filename for unhandled format %s" %
                args.format)
        filename = os.path.join(directory, filename)
        logging.info("Writing magnetic data to %s", filename)
        extract.write(filename, format=args.format, inventory=inventory)
コード例 #11
0
cat = client.get_events(starttime=starttime, endtime=endtime, minmagnitude=6.5,
                        minlatitude=25., maxlatitude=40., minlongitude=65., maxlongitude=75., catalog='ISC', magnitudetype='MS')


i = 1
evlo = cat[i].origins[0].longitude; evla = cat[i].origins[0].latitude
otime   = cat[i].origins[0].time
stla    = 34.945910; stlo   = -106.457200
dist, az, baz=obspy.geodetics.gps2dist_azimuth(evla, evlo, stla, stlo) # distance is in m
t0 = 3600.*2.

print evlo, evla
print cat[i].event_descriptions[0]
print otime
print 'Mw = ',cat[1].magnitudes[0].mag
st = client.get_waveforms(network='IU', station='ANMO', location='00', channel='LHZ',
                            starttime=otime, endtime=otime+t0, attach_response=True)
pre_filt = (0.001, 0.005, 1, 100.0)
st.detrend()
st.remove_response(pre_filt=pre_filt, taper_fraction=0.1)
st.filter(type='bandpass', freqmin=0.01, freqmax=0.05, corners=4)

tr1=st[0].copy()
tr2=st[0].copy()
# one-bit
data1 = tr1.data
data1[data1>0.] = 1.
data1[data1<0.] = -1.
# running average
data2 = tr2.data
N = int(80./tr1.stats.delta/2)
data3 = np.zeros(data2.size)
コード例 #12
0
ファイル: main.py プロジェクト: outdoorpet/QC_events
    def create_SG2K_initiate(self, event, quake_df):

        # Launch the custom station/component selection dialog
        sel_dlg = selectionDialog(parent=self,
                                  sta_list=self.station_list,
                                  chan_list=self.channel_codes)
        if sel_dlg.exec_():
            select_sta, select_comp = sel_dlg.getSelected()

            # specify output directory for miniSEED files
            temp_seed_out = os.path.join(os.path.dirname(self.cat_filename),
                                         event)

            # create directory
            if os.path.exists(temp_seed_out):
                shutil.rmtree(temp_seed_out)
            os.mkdir(temp_seed_out)

            query_time = UTCDateTime(quake_df['qtime'] - (10 * 60)).timestamp

            trace_starttime = UTCDateTime(quake_df['qtime'] - (5 * 60))
            trace_endtime = UTCDateTime(quake_df['qtime'] + (15 * 60))

            # Create a Stream object to put data into
            # st = Stream()
            # Create a dictionary to put traces into (keys are tr_ids)
            st_dict = defaultdict(list)

            print('---------------------------------------')
            print('Finding Data for Earthquake: ' + event)

            if os.path.splitext(self.db_filename)[1] == ".db":
                # run SQL query
                for matched_entry in self.session.query(Waveforms). \
                        filter(or_(and_(Waveforms.starttime <= query_time, query_time < Waveforms.endtime),
                                   and_(query_time <= Waveforms.starttime, Waveforms.starttime < query_time + 30 * 60)),
                               Waveforms.station.in_(select_sta),
                               Waveforms.component.in_(select_comp)):
                    print(matched_entry.ASDF_tag)

                    # read in the data to obspy
                    temp_st = read(
                        os.path.join(matched_entry.path,
                                     matched_entry.waveform_basename))

                    # modify network header
                    temp_tr = temp_st[0]
                    temp_tr.stats.network = matched_entry.new_network

                    # st.append(temp_tr)
                    st_dict[temp_tr.get_id()].append(temp_tr)

            if os.path.splitext(self.db_filename)[1] == ".json":
                # run python dictionary query
                for key, matched_entry in self.network_dict.iteritems():
                    if ((matched_entry['starttime'] <= query_time < matched_entry['endtime']) \
                                or (
                                query_time <= matched_entry['starttime'] and matched_entry['starttime'] < query_time + (
                            30 * 60))) \
                            and ((matched_entry['station'] in select_sta) and (
                                matched_entry['component'] in select_comp)):
                        print(matched_entry['ASDF_tag']
                              )  #, os.path.join(matched_entry['path'], key))

                        # read in the data to obspy
                        temp_st = read(os.path.join(matched_entry['path'],
                                                    key))

                        # modify network header
                        temp_tr = temp_st[0]
                        temp_tr.stats.network = matched_entry['new_network']

                        # trim trace to start and endtime
                        temp_tr.trim(starttime=trace_starttime,
                                     endtime=trace_endtime)

                        # st.append(temp_tr)
                        st_dict[temp_tr.get_id()].append(temp_tr)

            # free memory
            temp_st = None
            temp_tr = None

            if not len(st_dict) == 0:
                # .__nonzero__():

                print('')
                print('Merging Traces from %s Stations....' % len(st_dict))
                # Attempt to merge all traces with matching ID'S (same keys in dict) in place
                # st.merge()

                for key in st_dict.keys():
                    if len(st_dict[key]) > 1:
                        temp_st = Stream(traces=st_dict[key])
                        # merge in place
                        # print('\tMerging %s in Stream:' % temp_st.count())
                        temp_st.merge()
                        # assign trace back to dictionary key if there is data
                        if temp_st.__nonzero__():
                            print("Station {0} has {1} Seconds of data".format(
                                key, temp_st[0].stats.endtime -
                                temp_st[0].stats.starttime))
                            st_dict[key] = temp_st[0]
                        elif not temp_st.__nonzero__():
                            print("No Data for: %s" % key)
                            # no data for station delete key
                            del st_dict[key]
                            continue
                    elif len(st_dict[key]) == 1:
                        print("Station {0} has {1} Seconds of data".format(
                            key, st_dict[key][0].stats.endtime -
                            st_dict[key][0].stats.starttime))
                        st_dict[key] = st_dict[key][0]
                    elif len(st_dict[key]) == 0:
                        # no data for station delete key
                        print("No Data for: %s" % key)
                        del st_dict[key]

                print(
                    '\nTrimming Traces to 20 mins around earthquake time....')

                # now trim the st object to 5 mins
                # before query time and 15 minutes afterwards

                for key in st_dict.keys():

                    st_dict[key] = st_dict[key].trim(starttime=trace_starttime,
                                                     endtime=trace_endtime,
                                                     pad=True,
                                                     fill_value=0)

                # st.trim(starttime=trace_starttime, endtime=trace_endtime, pad=True, fill_value=0)

                try:
                    # write traces into temporary directory
                    # for tr in st:
                    for key in st_dict.keys():
                        if type(st_dict[key]) == Stream:
                            #there is a problem with network codes (two stations named the same)
                            #ignore it for now
                            continue
                        st_dict[key].write(os.path.join(
                            temp_seed_out, st_dict[key].get_id() + ".MSEED"),
                                           format="MSEED")
                    print("\nWrote Temporary MiniSEED data to: " +
                          temp_seed_out)
                    print('')
                except:
                    print("Something Went Wrong!")

            else:
                print("No Data for Earthquake!")

            # free memory
            st_dict = None

            # Now requesting reference station data from IRIS if desired
            if self.ref_radioButton.isChecked():
                ref_dir = os.path.join(temp_seed_out, 'ref_data')

                # create ref directory
                if os.path.exists(ref_dir):
                    shutil.rmtree(ref_dir)
                os.mkdir(ref_dir)

                # request stations that are close to the selected stations

                # first use the coords lists to get a bounding box for array
                def calc_bounding_box(x, y):
                    min_x, max_x = (min(x), max(x))
                    min_y, max_y = (min(y), max(y))

                    return (min_x, max_x, min_y, max_y)

                bb = calc_bounding_box(self.station_coords[0],
                                       self.station_coords[1])

                # request data for near earthquake time up to 5 degrees from bounding box of array
                print(
                    '\nRequesting Waveform Data from Nearby Permanent Network Stations....'
                )

                client = Client("IRIS")
                self.ref_inv = client.get_stations(
                    network="AU",
                    starttime=UTCDateTime(quake_df['qtime'] - (5 * 60)),
                    endtime=UTCDateTime(quake_df['qtime'] + (15 * 60)),
                    minlongitude=bb[0] - 2,
                    maxlongitude=bb[1] + 2,
                    minlatitude=bb[2] - 2,
                    maxlatitude=bb[3] + 2,
                    level='channel')

                print(self.ref_inv)

                ref_st = Stream()

                # go through inventory and request timeseries data
                for net in self.ref_inv:
                    for stn in net:
                        try:
                            ref_st += client.get_waveforms(
                                network=net.code,
                                station=stn.code,
                                channel='*',
                                location='*',
                                starttime=UTCDateTime(quake_df['qtime'] -
                                                      (5 * 60)),
                                endtime=UTCDateTime(quake_df['qtime'] +
                                                    (15 * 60)))
                        except FDSNException:
                            print(
                                'No Data for Earthquake from Reference Station: '
                                + stn.code)

                        else:
                            # plot the reference stations
                            js_call = "addRefStation('{station_id}', {latitude}, {longitude});" \
                                .format(station_id=stn.code, latitude=stn.latitude,
                                        longitude=stn.longitude)
                            self.web_view.page().mainFrame(
                            ).evaluateJavaScript(js_call)

                try:
                    # write ref traces into temporary directory
                    for tr in ref_st:
                        tr.write(os.path.join(ref_dir, tr.id + ".MSEED"),
                                 format="MSEED")
                    print("Wrote Reference MiniSEED data to: " + ref_dir)
                    print('\nEarthquake Data Query Done!!!')
                except:
                    print("Something Went Wrong Writing Reference Data!")

                self.ref_inv.write(os.path.join(ref_dir, "ref_metadata.xml"),
                                   format="STATIONXML")