Ejemplo n.º 1
0
    def ReadStnPara(self):
        """
        读取redis中的台站参数信息,台站信息存入一维数组station_list,通道信息存入二维数组channel_list
        """
        stn_para_key = 'StnPara'
        self.station_number = int(self.r.hget(stn_para_key, "station_number"))
        self.trace_list = [[Trace() for col in range(3)]
                           for row in range(self.station_number)]
        print('++++')
        print(type(self.trace_list[0][0]))
        self.channel_list = [[0 for col in range(3)]
                             for row in range(self.station_number)]
        self.station_list = [0 for col in range(self.station_number)]
        # multilist = [[0 for col in range(5)] for row in range(3)]   #二维数组
        coordinates = AttribDict()
        channel_name = [0 for col in range(3)]
        for key in ['latitude', 'longitude', 'elevation']:
            coordinates[key] = 0
        chnNum = 0
        for key in ['Z', 'E', 'N']:
            channel_name[chnNum] = key
            chnNum = chnNum + 1

        for staNo in range(0, self.station_number):
            stn_para_field = '{0:0>4}'.format(staNo)
            stn_para_res = self.r.hget(stn_para_key, stn_para_field)
            stn_par = stn_para_res.decode('utf-8').split()
            # print(str(staNo) + str(stn_par))

            stn_para_defaults = AttribDict()
            stn_para_defaults['coordinates'] = AttribDict()
            stn_para_defaults['coordinates'] = coordinates
            stn_para_defaults['network'] = stn_par[1]
            stn_para_defaults['station'] = stn_par[2]
            stn_para_defaults['channel'] = stn_par[4]
            stn_para_defaults['location'] = stn_par[5]
            stn_para_defaults['latitude'] = float(stn_par[6])
            stn_para_defaults['longitude'] = float(stn_par[7])
            stn_para_defaults['elevation'] = float(stn_par[8])
            stn_para_defaults['channelNum'] = int(stn_par[9])
            stn_para_defaults['sampling_rate'] = int(stn_par[10])
            self.station_list[staNo] = stn_para_defaults

            for chnNo in range(0, self.station_list[staNo]['channelNum']):
                chn_para_defaults = Stats(AttribDict())
                chn_para_defaults['sampling_rate'] = self.station_list[staNo][
                    'sampling_rate']
                chn_para_defaults['delta'] = 1.0
                chn_para_defaults['calib'] = 1.0
                chn_para_defaults['starttime'] = UTCDateTime(0)
                chn_para_defaults['npts'] = 0
                chn_para_defaults['network'] = stn_para_defaults['network']
                chn_para_defaults['station'] = stn_para_defaults['station']
                chn_para_defaults['channel'] = stn_para_defaults[
                    'channel'] + channel_name[chnNo]
                chn_para_defaults['location'] = stn_para_defaults['location']
                chn_para_defaults['response'] = float(stn_par[11 + chnNo])
                self.channel_list[staNo][chnNo] = chn_para_defaults
Ejemplo n.º 2
0
def readstructtag(fid):
    y = AttribDict()
    data = np.fromfile(fid, structtag_dtypes, 1)
    for (key, (format, size)) in structtag_dtypes.fields.items():
        if str(format).count("S") != 0:
            y[key] = data[key][0].decode('UTF-8')
        else:
            y[key] = data[key][0]
    return y
Ejemplo n.º 3
0
Archivo: core.py Proyecto: zurgeg/obspy
def readstructtag(fid):
    y = AttribDict()
    # avoid passing np.intXX down to SpooledTemporaryFile.read() since it
    # errors out on numpy integer types on at least Python 3.6, seems fixed in
    # Python 3.7
    # see https://ci.appveyor.com/project/obspy/obspy/
    #                  builds/29252080/job/9gr8bqkgr005523n#L742
    data = fid.read(int(structtag_dtypes.itemsize))
    data = from_buffer(data, structtag_dtypes)
    for (key, (fmt, size)) in structtag_dtypes.fields.items():
        if str(fmt).count("S") != 0:
            y[key] = data[key][0].decode('UTF-8')
        else:
            y[key] = data[key][0]
    return y
Ejemplo n.º 4
0
    def test_issue217(self):
        """
        Tests issue #217.

        Reading a MiniSEED file without sequence numbers and a record length of
        1024.
        """
        file = os.path.join(self.path, 'data',
                            'reclen_1024_without_sequence_numbers.mseed')
        tr = read(file)[0]
        ms = AttribDict({'record_length': 1024, 'encoding': 'STEIM1',
                         'filesize': 2048, 'dataquality': 'D',
                         'number_of_records': 2, 'byteorder': '>'})
        self.assertEqual('XX.STF1..HHN', tr.id)
        self.assertEqual(ms, tr.stats.mseed)
        self.assertEqual(932, tr.stats.npts)
        self.assertEqual(UTCDateTime(2007, 5, 31, 22, 45, 46, 720000),
                         tr.stats.endtime)
Ejemplo n.º 5
0
def write_stream_to_sac(str1, write_dir='data', ext='', verbose=False):

    if ext != '':
        ext = '.' + ext
    if not os.path.isdir(write_dir):
        sys.exit('No such dir to write sac', write_dir)

    for tr in str1:
        sac = AttribDict()
        (sac.kstnm, sac.knetwk, sac.kcmpnm,
         sac.khole) = (str(tr.stats.station), str(tr.stats.network),
                       str(tr.stats.channel), str(tr.stats.location))
        (sac.stla, sac.stlo,
         sac.stel) = (tr.stats.station_coordinates.latitude,
                      tr.stats.station_coordinates.longitude,
                      tr.stats.station_coordinates.elevation)

        ev = tr.stats.event_origin
        time = ev.time
        # sac depth is in km
        sac.evla, sac.evlo, sac.evdp, sac.mag = ev.latitude, ev.longitude, ev.depth / 1000., tr.stats.event_mag.mag
        sac.evla, sac.evlo, sac.evdp, sac.mag = ev.latitude, ev.longitude, ev.depth / 1000., tr.stats.event_mag.mag
        # sac uses millisec while obspy uses microsec.
        sac.nzyear, sac.nzjday, sac.nzhour, sac.nzmin, sac.nzsec, sac.nzmsec = time.year, time.julday, time.hour, time.minute, time.second, time.microsecond / 1000
        sac.o = 0.
        sac.b = tr.stats.starttime - time  # this is very important!!
        sac.kevnm = str(time)
        # dip is from horizontal downward; inc is from vertical downward
        # in SAC component "incidence angle" relative to the vertical
        sac.cmpaz, sac.cmpinc = tr.stats.cmpaz, tr.stats.dip + 90
        sac.gcarc, sac.dist, sac.az, sac.baz = tr.stats.gcarc, tr.stats.distance / 1000, tr.stats.azimuth, tr.stats.back_azimuth
        # traveltimes
        sac.a = tr.stats.Parr.arrival_time
        sac.ka = 'P'  # cannot add S time because user1 is assigned to ray parameter
        # the ray parameter required by hk code is in sin(th)/v
        (sac.user0, sac.user1) = (tr.stats.Parr.rayp / radiusOfEarth,
                                  tr.stats.Sarr.rayp / radiusOfEarth)
        # add sac header to tr.stats
        tr.stats.sac = sac
        # set sac file name
        tr_name = write_dir + '/' + tr.stats.station + '.' + tr.stats.network + '.' + tr.stats.location + '.' + tr.stats.channel + ext + '.sac'
        tr.write(tr_name, format='SAC')
        if verbose:
            print('Writing sac file ...' + tr_name)
Ejemplo n.º 6
0
    def test_issue160(self):
        """
        Tests issue #160.

        Reading the header of SEED file.
        """
        file = os.path.join(self.path, 'data',
                            'BW.BGLD.__.EHE.D.2008.001.first_10_records')
        tr_one = read(file)[0]
        tr_two = read(file, headonly=True)[0]
        ms = AttribDict({'record_length': 512, 'encoding': 'STEIM1',
                         'filesize': 5120, 'dataquality': 'D',
                         'number_of_records': 10, 'byteorder': '>'})
        for tr in tr_one, tr_two:
            self.assertEqual('BW.BGLD..EHE', tr.id)
            self.assertEqual(ms, tr.stats.mseed)
            self.assertEqual(4120, tr.stats.npts)
            self.assertEqual(UTCDateTime(2008, 1, 1, 0, 0, 20, 510000),
                             tr.stats.endtime)
Ejemplo n.º 7
0
def stream_add_stats(data_stream,inv,evt,write_sac=False,rotate_in_obspy=False):
    for net in inv:
        for sta in net:
            str1=data_stream.select(network=net.code,station=sta.code)
            print(str(net.code),str(sta.code),len(str1))
            if len(str1) == 0:
                continue
            # update in future to deal with multiple channel (total_number_of channels)
            if len(str1) % 3 !=0:
                print('Problem: missing components', str1); exit()
                
            for tr in str1:
                for chan in sta:
                    if tr.stats.channel == chan.code and tr.stats.location == chan.location_code:
                        break
                else:
                    print('Problem finding channel in inventory',tr); exit()
                tr.stats.coordinates={'latitude':chan.latitude,'longitude':chan.longitude}
                (tr.stats.distance,tr.stats.azimuth,tr.stats.back_azimuth)=gps2dist_azimuth(
                    chan.latitude, chan.longitude, evt.origins[0].latitude, evt.origins[0].longitude)
                if write_sac==True:
                    sac= AttribDict()
                    sac.kstnm=str(sta.code);
                    sac.knetwk=str(net.code);
                    sac.kcmpnm=str(chan.code)
                    sac.khole=str(chan.location_code)
                    sac.stla=chan.latitude; sac.stlo=chan.longitude; sac.stel=chan.elevation
                    sac.evla=evt.origins[0].latitude; sac.evlo=evt.origins[0].longitude;
                    sac.evdp=evt.origins[0].depth/1000. # in km
                    sac.mag=evt.magnitudes[0].mag; time=evt.origins[0].time
    
                    sac.nzyear,  sac.nzjday,  sac.nzhour,  sac.nzmin,  sac.nzsec,  sac.nzmsec=time.year, time.julday, time.hour, time.minute, time.second,  time.microsecond/1000
                    sac.o=0.
                    sac.b=tr.stats.starttime-time # this is very important!!
                    sac.kevnm=str(time)
                    sac.cmpaz=chan.azimuth
                    # dip is from horizontal downward; inc is from vertical downward
                    sac.cmpinc=chan.dip+90
                    sac.gcarc = locations2degrees(evt.origins[0].latitude, evt.origins[0].longitude, chan.latitude, chan.longitude)
                    sac.dist,sac.az,sac.baz= tr.stats.distance/1000,tr.stats.azimuth,tr.stats.back_azimuth
                    tr.stats.sac=sac
                    tr_name=sta.code+'.'+net.code+'.'+chan.location_code+'.'+chan.code+'.sac'
                    tr.write(tr_name,format='SAC')
Ejemplo n.º 8
0
def _read_dmx(filename, head_only=None, **kwargs):
    station = None
    if "station" in kwargs:
        station = kwargs["station"]

    traces = []
    with open(filename, "rb") as fid:
        content = fid.read()

    with SpooledTemporaryFile(mode='w+b') as fid:
        fid.write(content)
        fid.seek(0)

        while fid.read(12):  # we require at least 1 full structtag
            fid.seek(-12, 1)
            structtag = readstructtag(fid)
            if structtag.id_struct == 7:
                descripttrace = readdescripttrace(fid)
                if station is None or descripttrace.st_name.strip() == station:
                    data = readdata(fid, descripttrace.length,
                                    descripttrace.datatype)
                    tr = Trace(data=np.asarray(data))
                    tr.stats.network = descripttrace.network.strip()
                    tr.stats.station = descripttrace.st_name.strip()
                    tr.stats.channel = descripttrace.component
                    tr.stats.sampling_rate = descripttrace.rate
                    tr.stats.starttime = UTCDateTime(descripttrace.begintime)
                    tr.stats.dmx = AttribDict({
                        "descripttrace": descripttrace,
                        "structtag": structtag
                    })
                    traces.append(tr)
                else:
                    fid.seek(int(structtag.len_data), 1)
            else:
                fid.seek(
                    int(structtag.len_struct) + int(structtag.len_data), 1)

    st = Stream(traces=traces)
    # print(st)
    return st
Ejemplo n.º 9
0
def keep_longest(stream):
    """
    keeps the longest record of each channel
    """

    st_tmp = Stream()
    st_tmp.sort(['npts'])
    channels = AttribDict()

    for i, tr in enumerate(stream):

        if tr.stats.channel in channels:
            continue
        else:
            # Append the name of channel, samplingpoints and number of trace
            channels[tr.stats.channel] = [tr.stats.npts, i]
            st_tmp.append(stream[i])

    stream = st_tmp

    return stream
Ejemplo n.º 10
0
def cut_events(in_, out):
    print 'read events...'
    catalog = readEvents(in_, 'QUAKEML')
    print 'cut events...'
    for event in ProgressBar()(catalog):
        oid = get_event_id(event.origins[0].resource_id.getQuakeMLURI())
        ori = event.origins[0]
        etime = ori.time
        #print 'Select', event
        st = Stream()
        for arrival in ori.arrivals:
            arrival.pick_id.convertIDToQuakeMLURI()
            pick = arrival.pick_id.getReferredObject()
            if not pick:
                print 'FAIL to get pick from arrival'
                continue
            ptime = pick.time
            seed_id = pick.waveform_id.getSEEDString()
            try:
                st1 = Stream(
                    data.client.getWaveform(*(seed_id.split('.') +
                                              [ptime - 50, ptime + 250])))
            except Exception as ex:
                print '%s for %s' % (ex, seed_id)
                continue
            st1.merge()
            #print 'load %s %s %.1f' % (seed_id, pick.phase_hint, ptime - etime)
            st1[0].stats['event'] = AttribDict(
                id=event.resource_id.resource_id,
                origin_id=oid,
                etime=etime,
                ptime=ptime,
                lat=ori.latitude,
                lon=ori.longitude,
                depth=ori.depth,
                rms=ori.quality.standard_error,
                mag=event.magnitudes[0].mag)
            st += st1
        st.write(out % oid, 'Q')
Ejemplo n.º 11
0
def gauge2sac(gauge_file,dictionary,xyfile,outdir,time_epi,dt):
    '''
    Convert output from fort.gauge file intop individual sac files
    ''' 
    from numpy import genfromtxt,unique,where,arange,interp
    from obspy import Stream,Trace
    from obspy.core.util.attribdict import AttribDict

    
    #Read gauge file
    gauges=genfromtxt(gauge_file)
    #Read names
    plume_name=genfromtxt(dictionary,usecols=0,dtype='S')
    claw_name=genfromtxt(dictionary,usecols=1)
    lat=genfromtxt(xyfile,usecols=2)
    lon=genfromtxt(xyfile,usecols=3)
    #Find unique stations
    gauge_list=unique(gauges[:,0])
    for k in range(len(gauge_list)):
        print k
        st=Stream(Trace())
        i=where(gauges[:,0]==gauge_list[k])[0]
        data=gauges[i,6]
        time=gauges[i,2]
        ti=arange(0,time.max(),dt)
        tsunami=interp(ti,time,data)
        st[0].data=tsunami
        st[0].stats.starttime=time_epi
        st[0].stats.delta=dt
        iname=where(claw_name==gauge_list[k])[0][0]
        st[0].stats.station=plume_name[iname]
        sac=AttribDict()
        sac.stla=lat[iname]
        sac.stlo=lon[iname]
        sac.evla=46.607
        sac.evlo=153.230
        #sac.iztype='IO'
        st[0].stats['sac']=sac
        st.write(outdir+'/'+plume_name[iname]+'.tsun.sac',format='SAC')
Ejemplo n.º 12
0
 def __init__(self):
     self.config = AttribDict()
     self.events = events.ses3dCatalog()
     self.stalst = stations.StaLst()
     # self.vmodel
     return
Ejemplo n.º 13
0
    def _unpack_trace(data):
        ah_stats = AttribDict({
            'version': '2.0',
            'event': AttribDict(),
            'station': AttribDict(),
            'record': AttribDict(),
            'extras': []
        })

        # station info
        data.unpack_int()  # undocumented extra int?
        ah_stats.station.code = _unpack_string(data)
        data.unpack_int()  # here too?
        ah_stats.station.channel = _unpack_string(data)
        data.unpack_int()  # and again?
        ah_stats.station.type = _unpack_string(data)
        ah_stats.station.recorder = _unpack_string(data)
        ah_stats.station.sensor = _unpack_string(data)
        ah_stats.station.azimuth = data.unpack_float()  # degrees E from N
        ah_stats.station.dip = data.unpack_float()  # up = -90, down = +90
        ah_stats.station.latitude = data.unpack_double()
        ah_stats.station.longitude = data.unpack_double()
        ah_stats.station.elevation = data.unpack_float()
        ah_stats.station.gain = data.unpack_float()
        ah_stats.station.normalization = data.unpack_float()  # A0

        npoles = data.unpack_int()
        ah_stats.station.poles = []
        for _i in range(npoles):
            r = data.unpack_float()
            i = data.unpack_float()
            ah_stats.station.poles.append(complex(r, i))

        nzeros = data.unpack_int()
        ah_stats.station.zeros = []
        for _i in range(nzeros):
            r = data.unpack_float()
            i = data.unpack_float()
            ah_stats.station.zeros.append(complex(r, i))
        ah_stats.station.comment = _unpack_string(data)

        # event info
        ah_stats.event.latitude = data.unpack_double()
        ah_stats.event.longitude = data.unpack_double()
        ah_stats.event.depth = data.unpack_float()
        ot_year = data.unpack_int()
        ot_mon = data.unpack_int()
        ot_day = data.unpack_int()
        ot_hour = data.unpack_int()
        ot_min = data.unpack_int()
        ot_sec = data.unpack_float()
        try:
            ot = UTCDateTime(ot_year, ot_mon, ot_day, ot_hour, ot_min, ot_sec)
        except:
            ot = None
        ah_stats.event.origin_time = ot
        data.unpack_int()  # and again?
        ah_stats.event.comment = _unpack_string(data)

        # record info
        ah_stats.record.type = dtype = data.unpack_int()  # data type
        ah_stats.record.ndata = ndata = data.unpack_uint()  # number of samples
        ah_stats.record.delta = data.unpack_float()  # sampling interval
        ah_stats.record.max_amplitude = data.unpack_float()
        at_year = data.unpack_int()
        at_mon = data.unpack_int()
        at_day = data.unpack_int()
        at_hour = data.unpack_int()
        at_min = data.unpack_int()
        at_sec = data.unpack_float()
        at = UTCDateTime(at_year, at_mon, at_day, at_hour, at_min, at_sec)
        ah_stats.record.start_time = at
        ah_stats.record.units = _unpack_string(data)
        ah_stats.record.inunits = _unpack_string(data)
        ah_stats.record.outunits = _unpack_string(data)
        data.unpack_int()  # and again?
        ah_stats.record.comment = _unpack_string(data)
        data.unpack_int()  # and again?
        ah_stats.record.log = _unpack_string(data)

        # user attributes
        nusrattr = data.unpack_int()
        ah_stats.usrattr = {}
        for _i in range(nusrattr):
            key = _unpack_string(data)
            value = _unpack_string(data)
            ah_stats.usrattr[key] = value

        # unpack data using dtype from record info
        if dtype == 1:
            # float
            temp = data.unpack_farray(ndata, data.unpack_float)
        elif dtype == 6:
            # double
            temp = data.unpack_farray(ndata, data.unpack_double)
        else:
            # e.g. 3 (vector), 2 (complex), 4 (tensor)
            msg = 'Unsupported AH v2 record type %d'
            raise NotImplementedError(msg % (dtype))

        tr = Trace(np.array(temp))
        tr.stats.ah = ah_stats
        tr.stats.delta = ah_stats.record.delta
        tr.stats.starttime = ah_stats.record.start_time
        tr.stats.station = ah_stats.station.code
        tr.stats.channel = ah_stats.station.channel
        return tr
Ejemplo n.º 14
0
    def _unpack_trace(data):
        ah_stats = AttribDict({
            'version': '1.0',
            'event': AttribDict(),
            'station': AttribDict(),
            'record': AttribDict(),
            'extras': []
        })

        # station info
        ah_stats.station.code = _unpack_string(data)
        ah_stats.station.channel = _unpack_string(data)
        ah_stats.station.type = _unpack_string(data)
        ah_stats.station.latitude = data.unpack_float()
        ah_stats.station.longitude = data.unpack_float()
        ah_stats.station.elevation = data.unpack_float()
        ah_stats.station.gain = data.unpack_float()
        ah_stats.station.normalization = data.unpack_float()  # A0
        poles = []
        zeros = []
        for _i in range(0, 30):
            r = data.unpack_float()
            i = data.unpack_float()
            poles.append(complex(r, i))
            r = data.unpack_float()
            i = data.unpack_float()
            zeros.append(complex(r, i))
        # first value describes number of poles/zeros
        npoles = int(poles[0].real) + 1
        nzeros = int(zeros[0].real) + 1
        ah_stats.station.poles = poles[1:npoles]
        ah_stats.station.zeros = zeros[1:nzeros]

        # event info
        ah_stats.event.latitude = data.unpack_float()
        ah_stats.event.longitude = data.unpack_float()
        ah_stats.event.depth = data.unpack_float()
        ot_year = data.unpack_int()
        ot_mon = data.unpack_int()
        ot_day = data.unpack_int()
        ot_hour = data.unpack_int()
        ot_min = data.unpack_int()
        ot_sec = data.unpack_float()
        try:
            ot = UTCDateTime(ot_year, ot_mon, ot_day, ot_hour, ot_min, ot_sec)
        except:
            ot = None
        ah_stats.event.origin_time = ot
        ah_stats.event.comment = _unpack_string(data)

        # record info
        ah_stats.record.type = dtype = data.unpack_int()  # data type
        ah_stats.record.ndata = ndata = data.unpack_uint()  # number of samples
        ah_stats.record.delta = data.unpack_float()  # sampling interval
        ah_stats.record.max_amplitude = data.unpack_float()
        at_year = data.unpack_int()
        at_mon = data.unpack_int()
        at_day = data.unpack_int()
        at_hour = data.unpack_int()
        at_min = data.unpack_int()
        at_sec = data.unpack_float()
        at = UTCDateTime(at_year, at_mon, at_day, at_hour, at_min, at_sec)
        ah_stats.record.start_time = at
        ah_stats.record.abscissa_min = data.unpack_float()
        ah_stats.record.comment = _unpack_string(data)
        ah_stats.record.log = _unpack_string(data)

        # extras
        ah_stats.extras = data.unpack_array(data.unpack_float)

        # unpack data using dtype from record info
        if dtype == 1:
            # float
            temp = data.unpack_farray(ndata, data.unpack_float)
        elif dtype == 6:
            # double
            temp = data.unpack_farray(ndata, data.unpack_double)
        else:
            # e.g. 3 (vector), 2 (complex), 4 (tensor)
            msg = 'Unsupported AH v1 record type %d'
            raise NotImplementedError(msg % (dtype))
        tr = Trace(np.array(temp))
        tr.stats.ah = ah_stats
        tr.stats.delta = ah_stats.record.delta
        tr.stats.starttime = ah_stats.record.start_time
        tr.stats.station = ah_stats.station.code
        tr.stats.channel = ah_stats.station.channel
        return tr
Ejemplo n.º 15
0
def txt2sac(txt,output_dir = os.getcwd()):
  ''' This function will convert txt file to SAC file'''
  with open(txt,encoding='iso-8859-9') as eqfile:
    if os.stat(txt).st_size == 0:
      print(txt + ' is empty')
      return
    head = [next(eqfile) for x in range(14)]
      
  head = [line.rstrip('\n') for line in head]
  hd =  AttribDict()
  hd['sac'] = AttribDict()
  # Retrieve Event Information
  # Retrieve EventTime
  s = ''.join(i for i in head[2] if i.isdigit())
  evtime = datetime.strptime(s, '%Y%m%d%H%M%S%f')
  # Retrieve EVLA, EVLO
  _,coors = head[3].split(':')
  # Remove space, N and E 
  coors = coors.replace(' ','')
  coors = coors.replace('N','')
  coors = coors.replace('E','')
  evla,evlo = coors.split('-')
  hd['sac'].evla = float(evla.replace(',','.')); hd['sac'].evlo = float(evlo.replace(',','.'))
  # Retrieve EVDP
  _,depth = head[4].split(':')
  evdp = depth.replace(' ','')
  hd['sac'].evdp = float(evdp)
  # Retrieve MAG
  _,mags = head[5].split(':')
  # Check if multiple Magnitude types are associated with the earthquake
  if ',' in mags:
    mag,imagtyp = mag_seperator(mags)
    hd['sac'].imagtyp = imagtyp
  else:
    _, mag,imagtyp = mags.split(' ')
    hd['sac'].imagtyp = mag_type(imagtyp)
  hd['sac'].mag = float(mag.replace(',','.'))
  # Retrieve Station Information
  # Assign Network
  hd['network'] = 'TK'
  # Assing Location
  hd['location'] = 00
  # Retrieve KSTNM
  _,stnm = head[6].split(':')
  kstnm = stnm.replace(' ','')
  hd['station'] = kstnm
  # Retrieve STLA, STLO
  _,coors = head[7].split(':')
  # Remove space, N and E 
  coors = coors.replace(' ','')
  coors = coors.replace('N','')
  coors = coors.replace('E','')
  stla,stlo = coors.split('-')
  hd['sac'].stla = float(stla.replace(',','.')); hd['sac'].stlo = float(stlo.replace(',','.'))
  # Retrieve STEL
  _,el = head[8].split(':')
  stel = el.replace(' ','')
  hd['stel'] = float(stel.replace(',','.'))
  # Retrieve Record Information
  # Retrieve Recordtime
  s = ''.join(i for i in head[11] if i.isdigit())
  starttime = datetime.strptime(s, '%d%m%Y%H%M%S%f')
  hd['starttime'] = UTCDateTime(starttime)
  hd['sac'].o = UTCDateTime(starttime) - UTCDateTime(evtime) 
  # Retrieve NPTS
  _,nptss = head[12].split(':')
  npts = nptss.replace(' ','')
  hd['npts'] = int(npts)
  # Retrieve DELTA
  _,dt = head[13].split(':')
  delta = dt.replace(' ','')
  hd['delta'] = float(delta.replace(',','.'))
  hd['sampling_rate'] = 1/hd['delta']
  hd['endtime'] = hd['starttime'] + hd['npts']*hd['delta']
  hd['sac'].lcalda = 1; hd['sac'].lovrok = 1
  eqfile.close()
  # Read Waveform
  with open(txt,encoding='iso-8859-9') as eqfile:
    wfs = eqfile.readlines()[18:]
  wfs = [line.rstrip('\n') for line in wfs]
  wfs = [line.split(' ') for line in wfs]
  wfs = [list(filter(None, line)) for line in wfs]
  e = []; n = []; z = [];
  for line in wfs:
    n.append(line[0])
    e.append(line[1])
    z.append(line[2])
  #East
  tracee = Trace(np.asarray(e))
  hd['channel'] = 'HGE'
  tracee.stats = hd
  st = Stream(traces=[tracee])
  st.write(os.path.join(output_dir,st[0].id + '.SAC'), format='SAC')
  #North
  tracen = Trace(np.asarray(n))
  hd['channel'] = 'HGN'
  tracen.stats = hd
  st = Stream(traces=[tracen])
  st.write(os.path.join(output_dir,st[0].id + '.SAC'), format='SAC')
  #Vertical
  tracez = Trace(np.asarray(z))
  hd['channel'] = 'HGZ'
  tracez.stats = hd
  st = Stream(traces=[tracez])
  st.write(os.path.join(output_dir,st[0].id + '.SAC'), format='SAC')
  return
Ejemplo n.º 16
0
def get_syngine_data(model,
                     client=None,
                     reclat=None,
                     reclon=None,
                     inv=None,
                     eventid=None,
                     origins=None,
                     m_tensor=None,
                     source_dc=None):
    """
    param reclat:
    type reclat: list of floats
    param reclon:
    type reclon: list of floats
    """
    if client:
        client = fdsnClient(client)
    synclient = synClient()

    if inv:
        streams = AttribDict()
        for network in inv:
            stream = obspy.Stream()

            for station in network:
                print(station)
                if eventid:
                    stream_tmp = synclient.get_waveforms(model=model,
                                                         network=network.code,
                                                         station=station.code,
                                                         eventid=eventid)
                else:
                    stream_tmp = synclient.get_waveforms(
                        model=model,
                        network=network.code,
                        station=station.code,
                        origintime=origins.time,
                        sourcelatitude=origins.latitude,
                        sourcelongitude=origins.longitude,
                        sourcedepthinmeters=origins.depth,
                        sourcemomenttensor=m_tensor,
                        sourcedoublecouple=source_dc)
                stream.append(stream_tmp[0])
            streams[network.code] = stream

    if reclat and reclon:
        stream = obspy.Stream()
        for rlat, rlon in zip(reclat, reclon):
            if eventid:
                stream_tmp = synclient.get_waveforms(model=model,
                                                     receiverlatitude=rlat,
                                                     receiverlongitude=rlon,
                                                     eventid=eventid)
            else:
                stream_tmp = synclient.get_waveforms(
                    model=model,
                    receiverlatitude=rlat,
                    receiverlongitude=rlon,
                    origintime=origins.time,
                    sourcelatitude=origins.latitude,
                    sourcelongitude=origins.longitude,
                    sourcedepthinmeters=origins.depth,
                    sourcemomenttensor=m_tensor,
                    sourcedoublecouple=source_dc)
            stream.append(stream_tmp[0])
        streams = stream

    if origins:
        starttime = origins.time - 120
        endtime = starttime + 120
        if client:
            cat = client.get_events(starttime,
                                    endtime,
                                    minlatitude=origins.latitude - .5,
                                    maxlatitude=origins.latitude + .5)
        else:
            cat = None
    else:
        cat = None

    return streams, cat
Ejemplo n.º 17
0
def RGF_from_SW4(path_to_green=".",
                 t0=0,
                 file_name=None,
                 origin_time=None,
                 event_lat=None,
                 event_lon=None,
                 depth=None,
                 station_name=None,
                 station_lat=None,
                 station_lon=None,
                 output_directory="sw4out"):
    """
    Function to convert reciprocal Green's functions from SW4 to tensor format
    
    Reads the reciprocal Green's functions (displacement/unit force) from SW4 and
    performs the summation to get the Green's function tensor.
    RGFs from SW4 are oriented north, east and positive down by setting az=0.
    
    Assumes the following file structure:
    f[x,y,z]/station_name/event_name.[x,y,z]
    """

    import os
    from obspy.core import read, Stream
    from obspy.geodetics.base import gps2dist_azimuth
    from obspy.core.util.attribdict import AttribDict

    # Defined variables (do not change)
    dirs = ["fz", "fx", "fy"]  # directory to displacement per unit force
    du = [
        "duxdx", "duydy", "duzdz", "duydx", "duxdy", "duzdx", "duxdz", "duzdy",
        "duydz"
    ]
    orientation = ["Z", "N", "E"]  # set az=0 in SW4 so x=north, y=east
    cmpaz = [0, 0, 90]
    cmpinc = [0, 90, 90]

    # Create a new output directory under path_to_green
    dirout = "%s/%s" % (path_to_green, output_directory)
    if os.path.exists(dirout):
        print("Warning: output directory '%s' already exists." % dirout)
    else:
        print("Creating output directory '%s'." % dirout)
        os.mkdir(dirout)

    # Loop over each directory fx, fy, fz
    nsta = len(station_name)
    for i in range(3):
        # Set headers according to the orientation
        if dirs[i][-1].upper() == "Z":
            scale = -1  # change to positive up
        else:
            scale = 1

        # Loop over each station
        for j in range(nsta):
            station = station_name[j]
            stlo = station_lon[j]
            stla = station_lat[j]
            dirin = "%s/%s/%s" % (path_to_green, dirs[i], station)
            print("Reading RGFs from %s:" % (dirin))
            st = Stream()
            for gradient in du:
                fname = "%s/%s.%s" % (dirin, file_name, gradient)
                st += read(fname, format="SAC")

            # Set station headers
            starttime = origin_time - t0
            dist, az, baz = gps2dist_azimuth(event_lat, event_lon, stla, stlo)

            # SAC headers
            sacd = AttribDict()
            sacd.stla = stla
            sacd.stlo = stlo
            sacd.evla = event_lat
            sacd.evlo = event_lon
            sacd.az = az
            sacd.baz = baz
            sacd.dist = dist / 1000  # convert to kilometers
            sacd.o = 0
            sacd.b = -1 * t0
            sacd.cmpaz = cmpaz[i]
            sacd.cmpinc = cmpinc[i]
            sacd.kstnm = station

            # Update start time
            for tr in st:
                tr.stats.starttime = starttime
                tr.stats.distance = dist
                tr.stats.back_azimuth = baz

            # Sum displacement gradients to get reciprocal Green's functions
            tensor = Stream()
            for gradient, element in zip(["duxdx", "duydy", "duzdz"],
                                         ["XX", "YY", "ZZ"]):
                trace = st.select(channel=gradient)[0].copy()
                trace.stats.channel = "%s%s" % (orientation[i], element)
                tensor += trace

            trace = st.select(channel="duydx")[0].copy()
            trace.data += st.select(channel="duxdy")[0].data
            trace.stats.channel = "%s%s" % (orientation[i], "XY")
            tensor += trace

            trace = st.select(channel="duzdx")[0].copy()
            trace.data += st.select(channel="duxdz")[0].data
            trace.stats.channel = "%s%s" % (orientation[i], "XZ")
            tensor += trace

            trace = st.select(channel="duzdy")[0].copy()
            trace.data += st.select(channel="duydz")[0].data
            trace.stats.channel = "%s%s" % (orientation[i], "YZ")
            tensor += trace

            # Set sac headers before saving
            print("    Saving GFs to %s" % dirout)
            for tr in tensor:
                tr.trim(origin_time, tr.stats.endtime)
                tr.data = scale * tr.data
                tr.stats.sac = sacd
                sacout = "%s/%s.%.4f.%s" % (dirout, station, depth,
                                            tr.stats.channel)
                #print("Writing %s to file."%sacout)
                tr.write(sacout, format="SAC")