예제 #1
0
def stream_add_stats(data_stream,inv,evt,write_sac=False,rotate_in_obspy=False):
    for net in inv:
        for sta in net:
            str1=data_stream.select(network=net.code,station=sta.code)
            print(str(net.code),str(sta.code),len(str1))
            if len(str1) == 0:
                continue
            # update in future to deal with multiple channel (total_number_of channels)
            if len(str1) % 3 !=0:
                print('Problem: missing components', str1); exit()
                
            for tr in str1:
                for chan in sta:
                    if tr.stats.channel == chan.code and tr.stats.location == chan.location_code:
                        break
                else:
                    print('Problem finding channel in inventory',tr); exit()
                tr.stats.coordinates={'latitude':chan.latitude,'longitude':chan.longitude}
                (tr.stats.distance,tr.stats.azimuth,tr.stats.back_azimuth)=gps2dist_azimuth(
                    chan.latitude, chan.longitude, evt.origins[0].latitude, evt.origins[0].longitude)
                if write_sac==True:
                    sac= AttribDict()
                    sac.kstnm=str(sta.code);
                    sac.knetwk=str(net.code);
                    sac.kcmpnm=str(chan.code)
                    sac.khole=str(chan.location_code)
                    sac.stla=chan.latitude; sac.stlo=chan.longitude; sac.stel=chan.elevation
                    sac.evla=evt.origins[0].latitude; sac.evlo=evt.origins[0].longitude;
                    sac.evdp=evt.origins[0].depth/1000. # in km
                    sac.mag=evt.magnitudes[0].mag; time=evt.origins[0].time
    
                    sac.nzyear,  sac.nzjday,  sac.nzhour,  sac.nzmin,  sac.nzsec,  sac.nzmsec=time.year, time.julday, time.hour, time.minute, time.second,  time.microsecond/1000
                    sac.o=0.
                    sac.b=tr.stats.starttime-time # this is very important!!
                    sac.kevnm=str(time)
                    sac.cmpaz=chan.azimuth
                    # dip is from horizontal downward; inc is from vertical downward
                    sac.cmpinc=chan.dip+90
                    sac.gcarc = locations2degrees(evt.origins[0].latitude, evt.origins[0].longitude, chan.latitude, chan.longitude)
                    sac.dist,sac.az,sac.baz= tr.stats.distance/1000,tr.stats.azimuth,tr.stats.back_azimuth
                    tr.stats.sac=sac
                    tr_name=sta.code+'.'+net.code+'.'+chan.location_code+'.'+chan.code+'.sac'
                    tr.write(tr_name,format='SAC')
예제 #2
0
def gauge2sac(gauge_file,dictionary,xyfile,outdir,time_epi,dt):
    '''
    Convert output from fort.gauge file intop individual sac files
    ''' 
    from numpy import genfromtxt,unique,where,arange,interp
    from obspy import Stream,Trace
    from obspy.core.util.attribdict import AttribDict

    
    #Read gauge file
    gauges=genfromtxt(gauge_file)
    #Read names
    plume_name=genfromtxt(dictionary,usecols=0,dtype='S')
    claw_name=genfromtxt(dictionary,usecols=1)
    lat=genfromtxt(xyfile,usecols=2)
    lon=genfromtxt(xyfile,usecols=3)
    #Find unique stations
    gauge_list=unique(gauges[:,0])
    for k in range(len(gauge_list)):
        print k
        st=Stream(Trace())
        i=where(gauges[:,0]==gauge_list[k])[0]
        data=gauges[i,6]
        time=gauges[i,2]
        ti=arange(0,time.max(),dt)
        tsunami=interp(ti,time,data)
        st[0].data=tsunami
        st[0].stats.starttime=time_epi
        st[0].stats.delta=dt
        iname=where(claw_name==gauge_list[k])[0][0]
        st[0].stats.station=plume_name[iname]
        sac=AttribDict()
        sac.stla=lat[iname]
        sac.stlo=lon[iname]
        sac.evla=46.607
        sac.evlo=153.230
        #sac.iztype='IO'
        st[0].stats['sac']=sac
        st.write(outdir+'/'+plume_name[iname]+'.tsun.sac',format='SAC')
예제 #3
0
def txt2sac(txt,output_dir = os.getcwd()):
  ''' This function will convert txt file to SAC file'''
  with open(txt,encoding='iso-8859-9') as eqfile:
    if os.stat(txt).st_size == 0:
      print(txt + ' is empty')
      return
    head = [next(eqfile) for x in range(14)]
      
  head = [line.rstrip('\n') for line in head]
  hd =  AttribDict()
  hd['sac'] = AttribDict()
  # Retrieve Event Information
  # Retrieve EventTime
  s = ''.join(i for i in head[2] if i.isdigit())
  evtime = datetime.strptime(s, '%Y%m%d%H%M%S%f')
  # Retrieve EVLA, EVLO
  _,coors = head[3].split(':')
  # Remove space, N and E 
  coors = coors.replace(' ','')
  coors = coors.replace('N','')
  coors = coors.replace('E','')
  evla,evlo = coors.split('-')
  hd['sac'].evla = float(evla.replace(',','.')); hd['sac'].evlo = float(evlo.replace(',','.'))
  # Retrieve EVDP
  _,depth = head[4].split(':')
  evdp = depth.replace(' ','')
  hd['sac'].evdp = float(evdp)
  # Retrieve MAG
  _,mags = head[5].split(':')
  # Check if multiple Magnitude types are associated with the earthquake
  if ',' in mags:
    mag,imagtyp = mag_seperator(mags)
    hd['sac'].imagtyp = imagtyp
  else:
    _, mag,imagtyp = mags.split(' ')
    hd['sac'].imagtyp = mag_type(imagtyp)
  hd['sac'].mag = float(mag.replace(',','.'))
  # Retrieve Station Information
  # Assign Network
  hd['network'] = 'TK'
  # Assing Location
  hd['location'] = 00
  # Retrieve KSTNM
  _,stnm = head[6].split(':')
  kstnm = stnm.replace(' ','')
  hd['station'] = kstnm
  # Retrieve STLA, STLO
  _,coors = head[7].split(':')
  # Remove space, N and E 
  coors = coors.replace(' ','')
  coors = coors.replace('N','')
  coors = coors.replace('E','')
  stla,stlo = coors.split('-')
  hd['sac'].stla = float(stla.replace(',','.')); hd['sac'].stlo = float(stlo.replace(',','.'))
  # Retrieve STEL
  _,el = head[8].split(':')
  stel = el.replace(' ','')
  hd['stel'] = float(stel.replace(',','.'))
  # Retrieve Record Information
  # Retrieve Recordtime
  s = ''.join(i for i in head[11] if i.isdigit())
  starttime = datetime.strptime(s, '%d%m%Y%H%M%S%f')
  hd['starttime'] = UTCDateTime(starttime)
  hd['sac'].o = UTCDateTime(starttime) - UTCDateTime(evtime) 
  # Retrieve NPTS
  _,nptss = head[12].split(':')
  npts = nptss.replace(' ','')
  hd['npts'] = int(npts)
  # Retrieve DELTA
  _,dt = head[13].split(':')
  delta = dt.replace(' ','')
  hd['delta'] = float(delta.replace(',','.'))
  hd['sampling_rate'] = 1/hd['delta']
  hd['endtime'] = hd['starttime'] + hd['npts']*hd['delta']
  hd['sac'].lcalda = 1; hd['sac'].lovrok = 1
  eqfile.close()
  # Read Waveform
  with open(txt,encoding='iso-8859-9') as eqfile:
    wfs = eqfile.readlines()[18:]
  wfs = [line.rstrip('\n') for line in wfs]
  wfs = [line.split(' ') for line in wfs]
  wfs = [list(filter(None, line)) for line in wfs]
  e = []; n = []; z = [];
  for line in wfs:
    n.append(line[0])
    e.append(line[1])
    z.append(line[2])
  #East
  tracee = Trace(np.asarray(e))
  hd['channel'] = 'HGE'
  tracee.stats = hd
  st = Stream(traces=[tracee])
  st.write(os.path.join(output_dir,st[0].id + '.SAC'), format='SAC')
  #North
  tracen = Trace(np.asarray(n))
  hd['channel'] = 'HGN'
  tracen.stats = hd
  st = Stream(traces=[tracen])
  st.write(os.path.join(output_dir,st[0].id + '.SAC'), format='SAC')
  #Vertical
  tracez = Trace(np.asarray(z))
  hd['channel'] = 'HGZ'
  tracez.stats = hd
  st = Stream(traces=[tracez])
  st.write(os.path.join(output_dir,st[0].id + '.SAC'), format='SAC')
  return
예제 #4
0
def RGF_from_SW4(path_to_green=".",
                 t0=0,
                 file_name=None,
                 origin_time=None,
                 event_lat=None,
                 event_lon=None,
                 depth=None,
                 station_name=None,
                 station_lat=None,
                 station_lon=None,
                 output_directory="sw4out"):
    """
    Function to convert reciprocal Green's functions from SW4 to tensor format
    
    Reads the reciprocal Green's functions (displacement/unit force) from SW4 and
    performs the summation to get the Green's function tensor.
    RGFs from SW4 are oriented north, east and positive down by setting az=0.
    
    Assumes the following file structure:
    f[x,y,z]/station_name/event_name.[x,y,z]
    """

    import os
    from obspy.core import read, Stream
    from obspy.geodetics.base import gps2dist_azimuth
    from obspy.core.util.attribdict import AttribDict

    # Defined variables (do not change)
    dirs = ["fz", "fx", "fy"]  # directory to displacement per unit force
    du = [
        "duxdx", "duydy", "duzdz", "duydx", "duxdy", "duzdx", "duxdz", "duzdy",
        "duydz"
    ]
    orientation = ["Z", "N", "E"]  # set az=0 in SW4 so x=north, y=east
    cmpaz = [0, 0, 90]
    cmpinc = [0, 90, 90]

    # Create a new output directory under path_to_green
    dirout = "%s/%s" % (path_to_green, output_directory)
    if os.path.exists(dirout):
        print("Warning: output directory '%s' already exists." % dirout)
    else:
        print("Creating output directory '%s'." % dirout)
        os.mkdir(dirout)

    # Loop over each directory fx, fy, fz
    nsta = len(station_name)
    for i in range(3):
        # Set headers according to the orientation
        if dirs[i][-1].upper() == "Z":
            scale = -1  # change to positive up
        else:
            scale = 1

        # Loop over each station
        for j in range(nsta):
            station = station_name[j]
            stlo = station_lon[j]
            stla = station_lat[j]
            dirin = "%s/%s/%s" % (path_to_green, dirs[i], station)
            print("Reading RGFs from %s:" % (dirin))
            st = Stream()
            for gradient in du:
                fname = "%s/%s.%s" % (dirin, file_name, gradient)
                st += read(fname, format="SAC")

            # Set station headers
            starttime = origin_time - t0
            dist, az, baz = gps2dist_azimuth(event_lat, event_lon, stla, stlo)

            # SAC headers
            sacd = AttribDict()
            sacd.stla = stla
            sacd.stlo = stlo
            sacd.evla = event_lat
            sacd.evlo = event_lon
            sacd.az = az
            sacd.baz = baz
            sacd.dist = dist / 1000  # convert to kilometers
            sacd.o = 0
            sacd.b = -1 * t0
            sacd.cmpaz = cmpaz[i]
            sacd.cmpinc = cmpinc[i]
            sacd.kstnm = station

            # Update start time
            for tr in st:
                tr.stats.starttime = starttime
                tr.stats.distance = dist
                tr.stats.back_azimuth = baz

            # Sum displacement gradients to get reciprocal Green's functions
            tensor = Stream()
            for gradient, element in zip(["duxdx", "duydy", "duzdz"],
                                         ["XX", "YY", "ZZ"]):
                trace = st.select(channel=gradient)[0].copy()
                trace.stats.channel = "%s%s" % (orientation[i], element)
                tensor += trace

            trace = st.select(channel="duydx")[0].copy()
            trace.data += st.select(channel="duxdy")[0].data
            trace.stats.channel = "%s%s" % (orientation[i], "XY")
            tensor += trace

            trace = st.select(channel="duzdx")[0].copy()
            trace.data += st.select(channel="duxdz")[0].data
            trace.stats.channel = "%s%s" % (orientation[i], "XZ")
            tensor += trace

            trace = st.select(channel="duzdy")[0].copy()
            trace.data += st.select(channel="duydz")[0].data
            trace.stats.channel = "%s%s" % (orientation[i], "YZ")
            tensor += trace

            # Set sac headers before saving
            print("    Saving GFs to %s" % dirout)
            for tr in tensor:
                tr.trim(origin_time, tr.stats.endtime)
                tr.data = scale * tr.data
                tr.stats.sac = sacd
                sacout = "%s/%s.%.4f.%s" % (dirout, station, depth,
                                            tr.stats.channel)
                #print("Writing %s to file."%sacout)
                tr.write(sacout, format="SAC")