Ejemplo n.º 1
0
def get_event_info(starttime, endtime, streams):
    events = []
    arrivals = {}
    try:
        client = FDSNClient("NERIES")
        events = client.get_events(starttime=starttime - 20 * 60,
                                   endtime=endtime)
        for ev in events[::-1]:
            has_arrivals = False
            origin = ev.origins[0]
            origin_time = origin.time
            lon1 = origin.longitude
            lat1 = origin.latitude
            depth = abs(origin.depth / 1e3)
            for st in streams:
                sta = st[0].stats.station
                lon2 = st[0].stats.coordinates['longitude']
                lat2 = st[0].stats.coordinates['latitude']
                dist = locations2degrees(lat1, lon1, lat2, lon2)
                tts = getTravelTimes(dist, depth)
                list_ = arrivals.setdefault(sta, [])
                for tt in tts:
                    tt['time'] = origin_time + tt['time']
                    if starttime < tt['time'] < endtime:
                        has_arrivals = True
                        list_.append(tt)
            if not has_arrivals:
                events[:] = events[:-1]
    except Exception as e:
        msg = ("Problem while fetching events or determining theoretical "
               "phases: %s: %s" % (e.__class__.__name__, str(e)))
        return None, None, msg
    return events, arrivals, None
Ejemplo n.º 2
0
def calculate_time_phase(event, sta):
    """
    calculate arrival time of the requested phase to use in retrieving
    waveforms.
    :param event:
    :param sta:
    :return:
    """

    ev_lat = event['latitude']
    ev_lon = event['longitude']
    ev_dp = abs(float(event['depth']))
    sta_lat = float(sta[4])
    sta_lon = float(sta[5])
    delta = locations2degrees(ev_lat, ev_lon, sta_lat, sta_lon)
    tt = taup.getTravelTimes(delta, ev_dp)
    phase_list = ['P', 'Pdiff', 'PKIKP']

    time_ph = 0
    flag = False
    for ph in phase_list:
        for i in range(len(tt)):
            if tt[i]['phase_name'] == ph:
                flag = True
                time_ph = tt[i]['time']
                break
            else:
                continue
        if flag:
            print 'Phase: %s' % ph
            break
    t_start = event['t1'] + time_ph
    t_end = event['t2'] + time_ph
    return t_start, t_end
Ejemplo n.º 3
0
def get_neries_info(starttime, endtime, streams):
    events = []
    arrivals = {}
    try:
        client = neries.Client()
        events = client.getEvents(min_datetime=starttime - 20 * 60,
                                  max_datetime=endtime,
                                  format="list")
        for ev in events[::-1]:
            has_arrivals = False
            origin_time = ev['datetime']
            lon1 = ev['longitude']
            lat1 = ev['latitude']
            depth = abs(ev['depth'])
            for st in streams:
                sta = st[0].stats.station
                lon2 = st[0].stats.coordinates['longitude']
                lat2 = st[0].stats.coordinates['latitude']
                dist = locations2degrees(lat1, lon1, lat2, lon2)
                tts = getTravelTimes(dist, depth)
                list_ = arrivals.setdefault(sta, [])
                for tt in tts:
                    tt['time'] = origin_time + tt['time']
                    if starttime < tt['time'] < endtime:
                        has_arrivals = True
                        list_.append(tt)
            if not has_arrivals:
                events.remove(ev)
    except Exception as e:
        msg = ("Problem while determining theoretical phases using "
               "neries/taup: %s: %s" % (e.__class__.__name__, str(e)))
        return None, None, msg
    return events, arrivals, None
def ray_density(lat1, lon1, lat2, lon2,
                dt=1, gr_x=360, gr_y=180, 
                npts=180, projection='robin', 
                ray_coverage=False):
    '''
    Create the DATA array which contains the
    info for ray density
    '''
    global long_0

    mymap = Basemap(projection=projection, lon_0=long_0, lat_0=0)
    #npts=max(gr_x, gr_y)
    # grd[2]: longitude
    # grd[3]: latitude
    grd = mymap.makegrid(gr_x, gr_y, returnxy=True)

    lons, lats = mymap.gcpoints(lon1, lat1, lon2, lat2, npts)
    dist = locations2degrees(lat1, lon1, lat2, lon2)
    bap = int((dist - 97.0)*npts/dist)/2
    midlon = len(lons)/2
    midlat = len(lats)/2
    lons = lons[midlon-bap:midlon+1+bap]
    lats = lats[midlat-bap:midlat+1+bap]
    data = np.zeros([len(grd[2]), len(grd[3])])
    for i in range(len(lons)):
        xi, yi = point_finder(lons[i], lats[i], grd)
        # first one is latitude and second longitude
        try:
            #data[yi][xi] = dt/float(dist-97.0)
            data[yi][xi] += dt/len(lons)
        except Exception, e:
            print e
Ejemplo n.º 5
0
def get_event_info(starttime, endtime, streams):
    events = []
    arrivals = {}
    try:
        client = FDSNClient("NERIES")
        events = client.get_events(starttime=starttime - 20 * 60,
                                   endtime=endtime)
        for ev in events[::-1]:
            has_arrivals = False
            origin = ev.origins[0]
            origin_time = origin.time
            lon1 = origin.longitude
            lat1 = origin.latitude
            depth = abs(origin.depth / 1e3)
            for st in streams:
                sta = st[0].stats.station
                lon2 = st[0].stats.coordinates['longitude']
                lat2 = st[0].stats.coordinates['latitude']
                dist = locations2degrees(lat1, lon1, lat2, lon2)
                tts = getTravelTimes(dist, depth)
                list_ = arrivals.setdefault(sta, [])
                for tt in tts:
                    tt['time'] = origin_time + tt['time']
                    if starttime < tt['time'] < endtime:
                        has_arrivals = True
                        list_.append(tt)
            if not has_arrivals:
                events[:] = events[:-1]
    except Exception as e:
        msg = ("Problem while fetching events or determining theoretical "
               "phases: %s: %s" % (e.__class__.__name__, str(e)))
        return None, None, msg
    return events, arrivals, None
Ejemplo n.º 6
0
def loc2degrees(a, b):

    if type(a) is dict: a1 = Basic.dictToLocation(a)
    else: a1 = a

    if type(b) is dict: b1 = Basic.dictToLocation(b)
    else: b1 = b

    delta = locations2degrees(float(a1.lat), float(a1.lon), float(b1.lat),
                              float(b1.lon))
    return delta
Ejemplo n.º 7
0
def get_time_correction(traces, source_dict):
    from obspy.taup import TauPyModel
    model = TauPyModel(model="iasp91")
    from obspy.core.util import locations2degrees
    time_correction = []
    for trace in traces:
        name = trace.stats.station
        lat_inline = source_dict[name][4]
        lon_inline = source_dict[name][3]
        lat = trace.stats.sac.stla
        lon = trace.stats.sac.stlo
        evla = trace.stats.sac.evla
        evlo = trace.stats.sac.evlo
        evdp = trace.stats.sac.evdp
        dist_inline = locations2degrees(lat_inline, lon_inline, evla, evlo)
        dist = locations2degrees(lat, lon, evla, evlo)

        time_inline = model.get_travel_times(source_depth_in_km=evdp/1000.0, distance_in_degree=dist_inline, phase_list=["P"])[0].time
        time = model.get_travel_times(source_depth_in_km=evdp/1000.0, distance_in_degree=dist, phase_list=["P"])[0].time
        time_correction.append(time - time_inline)
        # print time_inline - time
    return time_correction
Ejemplo n.º 8
0
def check_collision(lats, lons, radius, dist_bt, angle_step):
    '''
    This function will check if the moment tensors are collide on 
    the plot, if there are collisions, then we will put all the
    MT on a circle lined back to the location of the event
    
    input: 
    lats - list of latitudes of the events
    lons - list of longitudes of the events
    radius - radius of the circle to put the MT
    dist_bt - the distance difference between MTs to be checked, if
        the distance between the two MTs is smaller than dist_bt, 
        then we will put them on a circle
    angle_step - the increase of angle on the circle to put the
        MTs. 
        
    returns:
    lats_m - list of latitudes of the modified location
    lons_m - list of longitudes of the modified location 
    indicator - a list of flag showing which events we modified
    '''

    lats_m = np.zeros(len(lats))
    lons_m = np.zeros(len(lats))
    indicator = np.zeros(len(lats))

    angles = range(0, 360, angle_step)
    j = 0
    for i in range(len(lats) - 1):
        for k in range(i + 1, len(lats)):
            dist = locations2degrees(lats[i], lons[i], lats[k], lons[k]) * 111
            if dist < dist_bt:
                indicator[i] = 1
                indicator[k] = 1
            else:
                pass

    for i in range(len(lats)):
        if indicator[i] == 1:

            ix = j % len(angles)
            a = radius * np.cos(angles[ix] * math.pi / 180)
            b = radius * np.sin(angles[ix] * math.pi / 180)
            lats_m[i] = lats[i] + a
            lons_m[i] = lons[i] + b
            j += 1
        else:
            lats_m[i] = lats[i]
            lons_m[i] = lons[i]

    return lats_m, lons_m, indicator
Ejemplo n.º 9
0
def check_collision(lats, lons, radius, dist_bt, angle_step):
    '''
    This function will check if the moment tensors are collide on 
    the plot, if there are collisions, then we will put all the
    MT on a circle lined back to the location of the event
    
    input: 
    lats - list of latitudes of the events
    lons - list of longitudes of the events
    radius - radius of the circle to put the MT
    dist_bt - the distance difference between MTs to be checked, if
        the distance between the two MTs is smaller than dist_bt, 
        then we will put them on a circle
    angle_step - the increase of angle on the circle to put the
        MTs. 
        
    returns:
    lats_m - list of latitudes of the modified location
    lons_m - list of longitudes of the modified location 
    indicator - a list of flag showing which events we modified
    '''
    
    lats_m = np.zeros(len(lats))
    lons_m = np.zeros(len(lats))
    indicator = np.zeros(len(lats))
    
    angles = range(0,360,angle_step)
    j = 0
    for i in range(len(lats)-1):
        for k in range(i+1, len(lats)):
            dist = locations2degrees(lats[i], lons[i], lats[k], lons[k]) * 111
            if dist < dist_bt:
                indicator[i] =1
                indicator[k] =1
            else:
                pass
    
    for i in range(len(lats)):
        if indicator[i] == 1:
    
            ix = j%len(angles)
            a = radius*np.cos(angles[ix]*math.pi/180)
            b = radius*np.sin(angles[ix]*math.pi/180)
            lats_m[i] = lats[i] + a
            lons_m[i] = lons[i] + b
            j +=1
        else:
            lats_m[i] = lats[i]
            lons_m[i] = lons[i] 
    
    return lats_m, lons_m, indicator
Ejemplo n.º 10
0
def ray_density(lat1, lon1, lat2, lon2, dt=1, gr_x=360, gr_y=180, npts=180, projection='robin', ray_coverage=False):
    """
    Create the DATA array which contains the info for ray density

    Procedure:
    1. make a grid based on the inputs (grd)
    grd: lon, lat, x, y
    2. find the great circle points:
    note that lon , lat are actually x and y!
    3. calculate the distance and find the middle point
    4. subtracting 97 degrees from the distance and find all the points on that section
    5. data ---> zero array with x*y elements

    """

    global long_0

    mymap = Basemap(projection=projection, lon_0=long_0, lat_0=0)
    #npts=max(gr_x, gr_y)
    # grd[2]: longitude
    # grd[3]: latitude
    grd = mymap.makegrid(gr_x, gr_y, returnxy=True)

    lons, lats = mymap.gcpoints(lon1, lat1, lon2, lat2, npts)
    dist = locations2degrees(lat1, lon1, lat2, lon2)

    # npts points on dist...how many on (dist-97)!: (dist-97)*npts/dist....but we also need to make it half!
    bap = int((dist - 97.0)*npts/dist)/2
    midlon = len(lons)/2
    midlat = len(lats)/2
    lons = lons[midlon-bap:midlon+1+bap]
    lats = lats[midlat-bap:midlat+1+bap]

    data = np.zeros([len(grd[2]), len(grd[3])])
    if not len(lons) == len(lats):
        sys.exit('ERROR: Lengths longitudes and latitudes are not the same! %s and %s' % (len(lons), len(lats)))

    for i in range(len(lons)):
        xi, yi = point_finder(lons[i], lats[i], grd)
        # first one is latitude and second longitude
        try:
            #data[yi][xi] = dt/float(dist-97.0)
            data[yi][xi] += dt/len(lons)
        except Exception, e:
            print '\nException: %s' % e
Ejemplo n.º 11
0
def filterStationsDistance (stationList, parameter):
        '''
        Filters stationslist via distance attribute
        
        :type stationList: list
        :param stationList: list of stationobjects
        :type parameter: list
        :param parameter: list of parameter used filter routines
        '''
    
        D = []

        for stationobject in stationList:
            sdelta = locations2degrees (parameter['elat'], parameter['elon'], 
                                        float (stationobject.lat), float (stationobject.lon))

            if sdelta > parameter['minDist'] and sdelta < parameter['maxDist']:
               D.append(stationobject)
        #endfor

        return D
Ejemplo n.º 12
0
def getSlowestStation(lat,lon,depth,calc):
    client = Client("IRIS")
    inventory = client.get_stations(latitude=lat, longitude=lon,maxradius=1.5)
    lats = []
    lons = []
    codes = []
    for network in inventory.networks:
        for station in network.stations:
            lats.append(station.latitude)
            lons.append(station.longitude)
            codes.append(station.code)
    lats = np.array(lats)
    lons = np.array(lons)
    codes = np.array(codes)
    distances = []
    times = []
    for i in range(0,len(lats)):
        slat = lats[i]
        slon = lons[i]
        distance = locations2degrees(lat,lon,slat,slon)
        distances.append(distance)
        ptime,stime = calc.getTravelTimes(distance,depth)
        times.append(ptime)
    times = np.array(times)
    distances = np.array(distances)
    sortidx = np.argsort(distances)
    distances = distances[sortidx]
    times = times[sortidx]
    lats = lats[sortidx]
    lons = lons[sortidx]
    codes = codes[sortidx]
    distances = distances[0:4]
    times = times[0:4] + TELEMETRY_DELAY + PROCESSING_DELAY
    lats = lats[0:4]
    lons = lons[0:4]
    codes = codes[0:4]
    idx = times.argmax()
    sdict = {'lat':lats[idx],'lon':lons[idx],'time':times[idx],'code':codes[idx]}
    return sdict
Ejemplo n.º 13
0
def ProcessLoopP(filepath):
    '''The file processing loop associated with P wave tomography: Just deal with the BHZ files to save time'''

    sacfiles = glob.glob('*.BHZ')

    #Check to see if there is more than one location for a given station
    stationnames = []
    for sacfile in sorted(sacfiles):
      sacfilenameparts = sacfile.split('.')
      stationname = sacfilenameparts[2]

      if stationname not in stationnames:
        stationnames.append(stationname)

        trace = read(sacfile)

        evlat = trace[0].stats.sac.evla
        evlon = trace[0].stats.sac.evlo
        evdep = trace[0].stats.sac.evdp
        stlat = trace[0].stats.sac.stla
        stlon = trace[0].stats.sac.stlo

        dist = locations2degrees(evlat,evlon,stlat,stlon) #find distance from the quake to the station
        arcs = IRISclient.distaz(stalat=stlat,stalon=stlon,evtlat=evlat,evtlon=evlon)
        az = arcs['backazimuth']
        baz = arcs['azimuth']

        #If we're running this code twice in a row, need to correct the evdep accordingly. The evdep that comes from obspy will be in km, but needs
        #to be in the SAC header in meters. If the depth is already in meters in the SACfile, then it will almost certainly be >1000. This if statement check for 
        #this, and converts to km if necessary

        if evdep > 1e3:
          evdep = evdep/1000.0

        traveltimes = getTravelTimes(dist,evdep, model='iasp91')
        
        P = 0
        S = 0

        for element in traveltimes:
            phaseinfo = element['phase_name']
            if phaseinfo == 'P':
               Ptime = element['time']
               P = 1
            if phaseinfo == 'S':
               Stime = element['time']
               S = 1
            if (P==1 and S==1):
               break

        try:
            P = Ptime
        except:
            Ptime = 0
        try:
            S = Stime
        except:
            Stime = 0

        #Set the P and S times
        trace[0].stats.sac.az = float(az)
        trace[0].stats.sac.baz = float(baz)
        trace[0].stats.sac.o = 0.0 #add origin time
        
        if Ptime > 0:
          trace[0].stats.sac.t1 = Ptime
        if Stime > 0:
          trace[0].stats.sac.t2 = Stime

        trace[0].stats.sac.evdp = evdep*1000 #dbpick wants depth to be in meters

        #other operations
        trace[0].detrend('demean')

        #THE CROSS CORRELATION CODE ASSUMES A SAMPLING RATE OF 0.05 (20 SAMPLES/SECOND). IT WILL NOT WORK
        #OTHERWISE!!!

        trace[0].resample(20)

        if results.autop:

          tracestreamP = trace.copy()

          df = tracestreamP[0].stats.sampling_rate
          filter1 = 0.02
          filter2 = 0.1
          tracestreamP.filter("bandpass",freqmin=filter1,freqmax=filter2,corners=2)
          tracestreamP.taper(max_percentage=0.05, type='cosine')

          p_pick, phase_info = pkBaer(tracestreamP[0].data,df,10,2,2,10,20,6) #output from this is in samples.

          autoPtime = p_pick/df

          #Append the autopicker's time to the 
          if abs(Ptime-autoPtime) < 20:
            print 'Autopick accepted!'
            trace[0].stats.sac.t3 = autoPtime
           
        #Important - must write to the SAC file!
        trace.write(sacfile,format='SAC')
           
        print 'Appended arrivals to %s' %sacfile

      else:
        print 'Found multiple instruments at station %s. Removing all but 1' %(stationname)
        os.system('rm %s' %sacfile)


    #create antelope database for P arrivals
    os.system('sac2db *.BHZ Z')
Ejemplo n.º 14
0
def naloc3(csspath, nalocfile, locmd='./loc3d_aug', ttgridpath='../ttgrid',filter='*'):
    
    # if ttgridpath != "../austtg":
    #     os.system('ln -s %s ../austtg' % (ttgridpath) )

    if ttgridpath != "../ttgrid":
        os.system('ln -s %s ../ttgrid' % (ttgridpath) )

    # get CSS3 arrival file
    cssfilelst = glob.glob(csspath+"/"+filter+"*.arrival")
    cssfilelst.sort()
    
    logFile = open('naloc.log','w')
    locfp = open(nalocfile, 'a+')
    
    
    evid = 0
    
    for cssfile in cssfilelst:
        print cssfile 
        
        os.system('cp ' + cssfile + ' arrival_list')
        
        fp = open('arrival_list','r')
        lineTemp = fp.readlines()
        fp.close()
        numberPhase = len(lineTemp)
        
        if numberPhase < 3:
            continue    
        
        row = lineTemp[0].split()
        originOld = UTCDateTime(float(row[14]))
        evlaOld = float(row[15])
        evloOld = float(row[16])
        evdpOld = float(row[17])
            
        p = subprocess.Popen([locmd],stdout = logFile).communicate()[0]
    #     os.system('./loc3d_aug')
        
        if not os.path.exists('fort.13'):
            logFile.write('Location failed.')
    #         print 'Loc fail'
        else:
            fp = open('fort.13', 'r')
            line = fp.readline()
            fp.close()
            row = line.split()
            evla = float(row[1])
            evlo = float(row[2])
            evdp = float(row[3])
            origin = UTCDateTime(float(row[10]))+float(row[4])
                    
            eorigin = origin - originOld        
            edist = locations2degrees(evla, evlo, evlaOld, evloOld)
            edepth = evdp - evdpOld
            
            [date, time] = datetimefmt(origin)
            [dateold, timeold] = datetimefmt(originOld)
            
    #         sstring = '%s %12.4f %12.4f %8.3f %s %12.4f %12.4f %8.3f %8.3f %8.3f %8.3f %4d \n' % (str(origin), evla, evlo, evdp, str(originOld), evlaOld, evloOld, evdpOld, eorigin, edist, edepth, numberPhase)
    #         locfp.write(sstring)
    #         evid += 1
            sstring = '%12s %14s %12.4f %12.4f %8.3f %12s %14s %12.4f %12.4f %8.3f %8.3f %8.3f %8.3f %4d \n' % (date, time, evla, evlo, evdp, dateold, timeold, evlaOld, evloOld, evdpOld, eorigin, edist, edepth, numberPhase)
            locfp.write(sstring)
            
            try:
                #os.remove('fort.13')
                pass
            except:
                pass
            
            swCleanupLoc()
            
    locfp.close()
    logFile.close()
    
    if ttgridpath != "../austtg":
        os.system('rm -rf ../austtg')
Ejemplo n.º 15
0
def naloc3v2(csspath, nalocfile, locmd='./loc3d_aug', ttgridpath='../austtg',filter='*'):
    
    if ttgridpath != "../austtg":
        os.system('ln -s %s ../austtg' % (ttgridpath) )
        
    resdir = "./residuals"
    if not os.path.exists(resdir):
        os.makedirs(resdir)
    
    # get CSS3 arrival file
    cssfilelst = glob.glob(csspath+"/"+filter+"*.arrival")
    cssfilelst.sort()
    
    logFile = open('naloc.log','w')
    locfp = open(nalocfile, 'w')
    
    
    evid = 0
    
    for cssfile in cssfilelst:
        print cssfile 
        
        os.system('cp ' + cssfile + ' arrival_list')
        
#         try:
#             os.system("rm fort.13")
#         except:
#             pass
        
        fp = open('arrival_list','r')
        lineTemp = fp.readlines()
        fp.close()
        numberPhase = len(lineTemp)
        
        if numberPhase < 3:
            continue    
        
        row = lineTemp[0].split()
        originOld = UTCDateTime(float(row[14]))
        evlaOld = float(row[15])
        evloOld = float(row[16])
        evdpOld = float(row[17])
            
        p = subprocess.Popen([locmd],stdout = logFile).communicate()[0]
    #     os.system('./loc3d_aug')
        
        if not os.path.exists('fort.13'):
            logFile.write('Location failed.')
    #         print 'Loc fail'
        else:
            fp = open('fort.13', 'r')
            line = fp.readline()
            fp.close()
            row = line.split()
            evla = float(row[1])
            evlo = float(row[2])
            evdp = float(row[3])
            origin = UTCDateTime(float(row[10]))+float(row[4])
                    
            eorigin = origin - originOld        
            edist = locations2degrees(evla, evlo, evlaOld, evloOld)
            edepth = evdp - evdpOld
            
            [date, time] = datetimefmt(origin)
            [dateold, timeold] = datetimefmt(originOld)
            
            # open residuals.dat
            with open("residuals.dat", "r") as fp:
                lst = fp.readlines()
                
            pfres = 0.0
            sfres = 0.0
            
            
            pfres = float(lst[-2].split()[-1])
            sfres = float(lst[-1].split()[-1])
            
    #         sstring = '%s %12.4f %12.4f %8.3f %s %12.4f %12.4f %8.3f %8.3f %8.3f %8.3f %4d \n' % (str(origin), evla, evlo, evdp, str(originOld), evlaOld, evloOld, evdpOld, eorigin, edist, edepth, numberPhase)
    #         locfp.write(sstring)
    #         evid += 1
            sstring = '%12s %14s %12.4f %12.4f %8.3f %12s %14s %12.4f %12.4f %8.3f %8.3f %8.3f %8.3f %4d %8.3f %8.3f\n' % (date, time, evla, evlo, evdp, dateold, timeold, evlaOld, evloOld, evdpOld, eorigin, edist, edepth, numberPhase, pfres, sfres)
            locfp.write(sstring)
            
            
            resfn = resdir+"/"+dateold.replace("-","")+timeold.replace(":", "")[0:6]+".res"
            os.system("cp residuals.dat %s" % resfn)
            
            swCleanupLoc()
            
    locfp.close()
    logFile.close()
    
    if ttgridpath != "../austtg":
        os.system('rm -rf ../austtg')
Ejemplo n.º 16
0
def main(args):
    globaldict = getGlobalConfig()
    shakehome = globaldict['shakehome']
    popfile = globaldict['popfile']
    if shakehome is None:
        print 'Cannot find ShakeMap home folder on this system.'
        sys.exit(1)
    datadir = os.path.join(shakehome,'data',args.event)
    if not os.path.isdir(datadir):
        print 'Cannot find event %s on the system' % args.event
        sys.exit(1)

    #Make sure the timeoutput folder is there (can't put our time grids in output - that gets
    #wiped out every time shakemap runs
    outfolder = os.path.join(datadir,'timeoutput')
    if not os.path.isdir(outfolder):
        os.makedirs(outfolder)
        
    #now look for config file in top-level folder
    configfile = os.path.join(datadir,'alert.conf')
    if not os.path.isfile(configfile):
        print 'Cannot find alert config file for %s in the data directory' % args.event
        sys.exit(1)
    config = ConfigParser.ConfigParser()
    config.readfp(open(configfile))

    #get the bounds of the map so we can find cities
    xmin = float(config.get('MAP','xmin'))
    xmax = float(config.get('MAP','xmax'))
    ymin = float(config.get('MAP','ymin'))
    ymax = float(config.get('MAP','ymax'))
    
    citylist = getCityList(xmin,xmax,ymin,ymax,globaldict['cityfile'])
    
    #Get the MMI threshold below which alert times will NOT be saved
    mmithresh = float(config.get('MAP','mmithresh'))

    #get the array of epicenters
    lats = [float(p) for p in config.get('FAULT','lats').split()]
    lons = [float(p) for p in config.get('FAULT','lons').split()]

    #write out a new grind.conf file
    writeGrind(config,datadir)

    #instantiate our p/s travel time calculator
    calc = TravelTimeCalculator()

    #where is the grind binary?
    grindbin = os.path.join(shakehome,'bin','grind')

    #specify the event.xml file, get the depth of the event
    eventfile = os.path.join(datadir,'input','event.xml')
    root = parse(eventfile)
    eq = root.getElementsByTagName('earthquake')[0]
    depth = float(eq.getAttribute('depth'))
    root.unlink()

    #get the dimensionality of the grid file and of the pop grid we'll interpolate to
    gridfile = os.path.join(datadir,'output','grid.xml')
    if not os.path.isfile(gridfile):
        grindcmd = '%s -event %s' % (grindbin,args.event)
        res,stdout,stderr = getCommandOutput(grindcmd)
    mmigrid = ShakeGrid(gridfile,variable='MMI')
    popgrid = EsriGrid(popfile)
    popgrid.load(bounds=mmigrid.getRange())
    m,n = popgrid.griddata.shape
    
    #loop over all the event realizations
    timefiles = []
    timestack = np.zeros((m,n,len(lats)),dtype=np.float32)
    for i in range(0,len(lats)):
        print 'Calculating arrival times for scenario %i of %i' % (i+1,len(lats))
        lat = lats[i]
        lon = lons[i]
        if i == 0:
            lonoff = 0
            latoff = 0
        else:
            lonoff = -1* (lons[i] - lons[i-1])
            latoff = lats[i] - lats[i-1]
        #modify the event.xml file to have the new lat/lon epicenter
        sourcetext = getEventText(eventfile,lat,lon)
        f = open(eventfile,'wt')
        f.write(sourcetext)
        f.close()

        sdict = getSlowestStation(lat,lon,depth,calc)
        ptime = sdict['time']
        stationlat = sdict['lat']
        stationlon = sdict['lon']
        
        grindcmd = '%s -latoff %f -lonoff %f -event %s' % (grindbin,latoff,lonoff,args.event)
        res,stdout,stderr = getCommandOutput(grindcmd)
        if not res:
            print 'Grind command failed: "%s", "%s"' % (stdout,stderr)
            sys.exit(1)
            
        #Get the grid.xml output, do some time calculations
        mmigrid = ShakeGrid(gridfile,variable='MMI')
        timegrid = np.zeros((m,n),dtype=np.float32)
        
        for row in range(0,m):
            for col in range(0,n):
                mmilat,mmilon = mmigrid.getLatLon(row,col)
                distance = locations2degrees(lat,lon,mmilat,mmilon)
                tmp,stime = calc.getTravelTimes(distance,depth)
                timegrid[row,col] = stime - ptime

        #debugging
        f = plt.figure()
        plt.subplot(2,1,1)
        plt.imshow(mmigrid.griddata)
        plt.colorbar()
        plt.subplot(2,1,2)
        plt.imshow(timegrid)
        plt.colorbar()
        plt.savefig(os.path.join(outfolder,'timegrid.png'))
        plt.close(f)

        with warnings.catch_warnings():
            warnings.simplefilter("ignore")
            exposure,timegrid = getTimeExposure(timegrid,mmigrid,popfile,mmithresh)
        print 'Population Warning Times for epicenter %.4f,%.4f' % (lat,lon)
        printExposure(exposure)
        expofile = os.path.join(outfolder,'expo%03i.json' % (i+1))
        f = open(expofile,'wt')
        f.write(json.dumps(exposure))
        f.close()
        timefile = os.path.join(outfolder,'timegrid%03i.flt' % (i+1))
        timefiles.append(timefile)
        metadict = {'epilat':lat,'epilon':lon,'eventid':args.event}
        saveTimeGrid(timefile,timegrid,mmigrid.geodict,metadict)
        timestack[:,:,i] = timegrid
        alertgrid = popgrid
        alertgrid.griddata = timegrid
        makeMap(alertgrid,'alertmap_%i' % i,outfolder,popfile,globaldict['popcolormap'],sdict,citylist,[lat],[lon])
        
        
    methods = config.get('MAP','output').split(',')
    for method in methods:
        if method == 'median':
            statgrid = np.median(timestack,axis=2)
        if method == 'mean':
            statgrid = np.nanmean(timestack,axis=2)
        if method == 'min':
            statgrid = np.nanmin(timestack,axis=2)
        if method == 'max':
            statgrid = np.nanmax(timestack,axis=2)
        timegrid = popgrid
        timegrid.griddata = statgrid
        makeMap(timegrid,method,outfolder,popfile,globaldict['popcolormap'],sdict,citylist,lats,lons)
Ejemplo n.º 17
0
def cc_core(ls_first, ls_second, identity_all, max_ts, print_sta):
    """
    Perform the main part of the cross correlation and creating 
    the cc.txt file
    """

    global input

    try:

        cc_open = open('./cc.txt', 'a')

        tr1 = read(ls_first)[0]

        if input['phase'] != 'N':
            evsta_dist = util.locations2degrees(lat1 = tr1.stats.sac.evla, \
                                    long1 = tr1.stats.sac.evlo, lat2 = tr1.stats.sac.stla, \
                                    long2 = tr1.stats.sac.stlo)

            taup_tt = taup.getTravelTimes(delta=evsta_dist,
                                          depth=tr1.stats.sac.evdp)

            phase_exist = 'N'

            for tt_item in taup_tt:
                if tt_item['phase_name'] == input['phase']:
                    print 'Requested phase:'
                    print input['phase']
                    print '------'
                    print tt_item['phase_name']
                    print 'exists in the waveform!'
                    print '-----------------------'
                    t_phase = tt_item['time']

                    phase_exist = 'Y'
                    break

        if input['phase'] == 'N' or (input['phase'] != 'N'
                                     and phase_exist == 'Y'):

            # identity of the current waveform
            identity = tr1.stats.network + '.' + tr1.stats.station + '.' + \
                        tr1.stats.location + '.' + tr1.stats.channel

            # Keep the current identity in a new variable
            id_name = identity

            try:
                tr2 = read(os.path.join(input['second_path'], identity))[0]
            except Exception, error:
                # if it is not possible to read the identity in the second path
                # then change the network part of the identity based on
                # correction unit
                identity = input['corr_unit'] + '.' + tr1.stats.station + '.' + \
                        tr1.stats.location + '.' + tr1.stats.channel
                tr2 = read(os.path.join(input['second_path'], identity))[0]

            if input['resample'] != 'N':
                print 'WARNING: you are using resample!!!'
                tr1.resample(input['resample'])
                tr2.resample(input['resample'])

            if input['tw'] == 'Y':
                t_cut_1 = tr1.stats.starttime + t_phase - input['preset']
                t_cut_2 = tr1.stats.starttime + t_phase + input['offset']
                tr1.trim(starttime=t_cut_1, endtime=t_cut_2)

                t_cut_1 = tr2.stats.starttime + t_phase - input['preset']
                t_cut_2 = tr2.stats.starttime + t_phase + input['offset']
                tr2.trim(starttime=t_cut_1, endtime=t_cut_2)

            if input['hlfilter'] == 'Y':
                tr1.filter('lowpass', freq=input['hfreq'], corners=2)
                tr2.filter('lowpass', freq=input['hfreq'], corners=2)
                tr1.filter('highpass', freq=input['lfreq'], corners=2)
                tr2.filter('highpass', freq=input['lfreq'], corners=2)

            # normalization of all three waveforms to the
            # max(max(tr1), max(tr2), max(tr3)) to keep the scales
            #maxi = max(abs(tr1.data).max(), abs(tr2.data).max(), abs(tr3.data).max())
            '''
            maxi = max(abs(tr1.data).max(), abs(tr2.data).max())
            tr1_data = tr1.data/abs(maxi)
            tr2_data = tr2.data/abs(maxi)
            tr3_data = tr3.data/abs(maxi)
            '''
            tr1.data = tr1.data / abs(max(tr1.data))
            tr2.data = tr2.data / abs(max(tr2.data))

            cc_np = tr1.stats.sampling_rate * max_ts
            np_shift, coeff = cross_correlation.xcorr(tr1, tr2, int(cc_np))
            t_shift = float(np_shift) / tr1.stats.sampling_rate

            # scale_str shows whether the scale of the waveforms are the same or not
            # if scale_str = 'Y' then the scale is correct.
            scale_str = 'Y'

            if abs(tr1.data).max() > 2.0 * abs(tr2.data).max():
                label_tr1 = ls_first.split('/')[-2]
                label_tr2 = ls_second[0].split('/')[-2]
                print '#####################################################'
                print "Scale is not correct! " + label_tr1 + '>' + label_tr2
                print '#####################################################'
                scale_str = 'N'
            elif abs(tr2.data).max() >= 2.0 * abs(tr1.data).max():
                label_tr1 = ls_first.split('/')[-2]
                label_tr2 = ls_second[0].split('/')[-2]
                print '#####################################################'
                print "Scale is not correct! " + label_tr2 + '>' + label_tr1
                print '#####################################################'
                scale_str = 'N'

            if not str(coeff) == 'nan':
                cc_open.writelines(id_name + ',' + str(round(coeff, 4)) + ',' + str(t_shift) + \
                                                ',' + scale_str + ',' + '\n')

            print "Cross Correlation:"
            print id_name
            print "Shift:       " + str(t_shift)
            print "Coefficient: " + str(coeff)
            print print_sta
            print '------------------'

            cc_open.close()
            cc_open.close()

    except Exception, error:
        print '##################'
        print error
        print '##################'
Ejemplo n.º 18
0
def single_comparison():
    """
    one by one comparison of the waveforms in the first path with the second path.
    """

    client = Client()

    global input

    # identity of the waveforms (first and second paths) to be compared with each other
    identity_all = input['net'] + '.' + input['sta'] + '.' + \
                    input['loc'] + '.' + input['cha']
    ls_first = glob.glob(os.path.join(input['first_path'], identity_all))
    ls_second = glob.glob(os.path.join(input['second_path'], identity_all))

    for i in range(0, len(ls_first)):
        try:
            tr1 = read(ls_first[i])[0]

            if input['phase'] != 'N':
                evsta_dist = util.locations2degrees(lat1 = tr1.stats.sac.evla, \
                                        long1 = tr1.stats.sac.evlo, lat2 = tr1.stats.sac.stla, \
                                        long2 = tr1.stats.sac.stlo)

                taup_tt = taup.getTravelTimes(delta=evsta_dist,
                                              depth=tr1.stats.sac.evdp)

                phase_exist = 'N'

                for tt_item in taup_tt:
                    if tt_item['phase_name'] == input['phase']:
                        print 'Requested phase:'
                        print input['phase']
                        print '------'
                        print tt_item['phase_name']
                        print 'exists in the waveform!'
                        print '-----------------------'
                        t_phase = tt_item['time']

                        phase_exist = 'Y'
                        break

                if phase_exist != 'Y':
                    continue

            # identity of the current waveform
            identity = tr1.stats.network + '.' + tr1.stats.station + '.' + \
                        tr1.stats.location + '.' + tr1.stats.channel

            # tr1: first path, tr2: second path, tr3: Raw data
            #tr3 = read(os.path.join(input['first_path'], '..', 'BH_RAW', identity))[0]

            if input['resp_paz'] == 'Y':
                response_file = os.path.join(input['first_path'], '..',
                                             'Resp/RESP.' + identity)

                # Extract the PAZ info from response file
                paz = readRESP(response_file, unit=input['corr_unit'])

                poles = paz['poles']
                zeros = paz['zeros']
                scale_fac = paz['gain']
                sensitivity = paz['sensitivity']

                print paz

                # Convert Poles and Zeros (PAZ) to frequency response.
                h, f = pazToFreqResp(poles, zeros, scale_fac, \
                                1./tr1.stats.sampling_rate, tr1.stats.npts*2, freq=True)
                # Use the evalresp library to extract
                # instrument response information from a SEED RESP-file.
                resp = invsim.evalresp(t_samp = 1./tr1.stats.sampling_rate, \
                        nfft = tr1.stats.npts*2, filename = response_file, \
                        date = tr1.stats.starttime, units = input['corr_unit'].upper())

            # Keep the current identity in a new variable
            id_name = identity

            try:
                tr2 = read(os.path.join(input['second_path'], identity))[0]
            except Exception, error:
                # if it is not possible to read the identity in the second path
                # then change the network part of the identity based on
                # correction unit
                identity = input['corr_unit'] + '.' + tr1.stats.station + '.' + \
                        tr1.stats.location + '.' + tr1.stats.channel
                tr2 = read(os.path.join(input['second_path'], identity))[0]

            if input['resample'] != 'N':
                print 'WARNING: you are using resample!!!'
                tr1.resample(input['resample'])
                tr2.resample(input['resample'])

            if input['tw'] == 'Y':
                t_cut_1 = tr1.stats.starttime + t_phase - input['preset']
                t_cut_2 = tr1.stats.starttime + t_phase + input['offset']
                tr1.trim(starttime=t_cut_1, endtime=t_cut_2)

                t_cut_1 = tr2.stats.starttime + t_phase - input['preset']
                t_cut_2 = tr2.stats.starttime + t_phase + input['offset']
                tr2.trim(starttime=t_cut_1, endtime=t_cut_2)

            if input['hlfilter'] == 'Y':
                tr1.filter('lowpass', freq=input['hfreq'], corners=2)
                tr2.filter('lowpass', freq=input['hfreq'], corners=2)
                tr1.filter('highpass', freq=input['lfreq'], corners=2)
                tr2.filter('highpass', freq=input['lfreq'], corners=2)

            # normalization of all three waveforms to the
            # max(max(tr1), max(tr2), max(tr3)) to keep the scales
            #maxi = max(abs(tr1.data).max(), abs(tr2.data).max(), abs(tr3.data).max())

            #maxi = max(abs(tr1.data).max(), abs(tr2.data).max())
            #tr1_data = tr1.data/abs(maxi)
            #tr2_data = tr2.data/abs(maxi)
            #tr3_data = tr3.data/abs(maxi)

            tr1_data = tr1.data / abs(max(tr1.data))
            tr2_data = tr2.data / abs(max(tr2.data))

            #tr1_data = tr1.data
            #tr2_data = tr2.data*1e9

            print max(tr1.data)
            print max(tr2.data)

            # create time arrays for tr1, tr2 and tr3
            time_tr1 = np.arange(0, tr1.stats.npts/tr1.stats.sampling_rate, \
                                                1./tr1.stats.sampling_rate)
            time_tr2 = np.arange(0, tr2.stats.npts/tr2.stats.sampling_rate, \
                                                1./tr2.stats.sampling_rate)
            #time_tr3 = np.arange(0, tr3.stats.npts/tr3.stats.sampling_rate, \
            #                                    1./tr3.stats.sampling_rate)

            # label for plotting
            label_tr1 = ls_first[i].split('/')[-2]
            label_tr2 = ls_second[i].split('/')[-2]
            label_tr3 = 'RAW'

            if input['resp_paz'] == 'Y':
                # start plotting
                plt.figure()
                plt.subplot2grid((3, 4), (0, 0), colspan=4, rowspan=2)
                #plt.subplot(211)

            plt.plot(time_tr1, tr1_data, color='blue', label=label_tr1, lw=3)
            plt.plot(time_tr2, tr2_data, color='red', label=label_tr2, lw=3)
            #plt.plot(time_tr3, tr3_data, color = 'black', ls = '--', label = label_tr3)

            plt.xlabel('Time (sec)', fontsize='xx-large', weight='bold')

            if input['corr_unit'] == 'dis':
                ylabel_str = 'Relative Displacement'
            elif input['corr_unit'] == 'vel':
                ylabel_str = 'Relative Vel'
            elif input['corr_unit'] == 'acc':
                ylabel_str = 'Relative Acc'

            plt.ylabel(ylabel_str, fontsize='xx-large', weight='bold')

            plt.xticks(fontsize='xx-large', weight='bold')
            plt.yticks(fontsize='xx-large', weight='bold')

            plt.legend(loc=1, prop={'size': 20})

            #-------------------Cross Correlation
            # 5 seconds as total length of samples to shift for cross correlation.

            cc_np = tr1.stats.sampling_rate * 3

            np_shift, coeff = cross_correlation.xcorr(tr1, tr2, int(cc_np))

            t_shift = float(np_shift) / tr1.stats.sampling_rate

            print "Cross Correlation:"
            print "Shift:       " + str(t_shift)
            print "Coefficient: " + str(coeff)

            plt.title('Single Comparison' + '\n' + str(t_shift) + \
                        ' sec , coeff: ' + str(round(coeff, 5)) + \
                        '\n' + id_name, \
                        fontsize = 'xx-large', weight = 'bold')

            if input['resp_paz'] == 'Y':
                # -----------------------
                #plt.subplot(223)
                plt.subplot2grid((3, 4), (2, 0), colspan=2)
                '''
                plt.plot(np.log10(f), np.log10(abs(resp)/(sensitivity*sensitivity)), \
                                            color = 'blue', label = 'RESP', lw=3)
                plt.plot(np.log10(f), np.log10(abs(h)/sensitivity), \
                                            color = 'red', label = 'PAZ', lw=3)
                '''
                plt.loglog(f, abs(resp)/(sensitivity*sensitivity), \
                                            color = 'blue', label = 'RESP', lw=3)
                plt.loglog(f, abs(h)/sensitivity, \
                                            color = 'red', label = 'PAZ', lw=3)

                #for j in [0.008, 0.012, 0.025, 0.5, 1, 2, 3, 4]:
                for j in [0]:
                    plt.axvline(np.log10(j), linestyle='--')

                #plt.xlabel('Frequency [Hz]\n(power of 10)', fontsize = 'xx-large', weight = 'bold')
                #plt.ylabel('Amplitude\n      (power of 10)', fontsize = 'xx-large', weight = 'bold')

                plt.xlabel('Frequency [Hz]',
                           fontsize='xx-large',
                           weight='bold')
                plt.ylabel('Amplitude', fontsize='xx-large', weight='bold')

                plt.xticks(fontsize='xx-large', weight='bold')

                #plt.yticks = MaxNLocator(nbins=4)
                plt.yticks(fontsize='xx-large', weight='bold')
                plt.legend(loc=2, prop={'size': 20})

                # -----------------------
                #plt.subplot(224)
                plt.subplot2grid((3, 4), (2, 2), colspan=2)

                #take negative of imaginary part
                phase_paz = np.unwrap(np.arctan2(h.imag, h.real))
                phase_resp = np.unwrap(np.arctan2(resp.imag, resp.real))
                #plt.plot(np.log10(f), phase_resp, color = 'blue', label = 'RESP', lw=3)
                #plt.plot(np.log10(f), phase_paz, color = 'red', label = 'PAZ', lw=3)

                plt.semilogx(f, phase_resp, color='blue', label='RESP', lw=3)
                plt.semilogx(f, phase_paz, color='red', label='PAZ', lw=3)

                #for j in [0.008, 0.012, 0.025, 0.5, 1, 2, 3, 4]:
                for j in [0.0]:
                    plt.axvline(np.log10(j), linestyle='--')

                #plt.xlabel('Frequency [Hz]\n(power of 10)', fontsize = 'xx-large', weight = 'bold')
                plt.xlabel('Frequency [Hz]',
                           fontsize='xx-large',
                           weight='bold')
                plt.ylabel('Phase [radian]',
                           fontsize='xx-large',
                           weight='bold')

                plt.xticks(fontsize='xx-large', weight='bold')
                plt.yticks(fontsize='xx-large', weight='bold')

                plt.legend(loc=3, prop={'size': 20})

                # title, centered above both subplots
                # make more room in between subplots for the ylabel of right plot
                plt.subplots_adjust(wspace=0.4, hspace=0.3)
                """
                # -----------------------
                plt.subplot(325)
                
                plt.plot(np.log10(f), np.log10(abs(resp)/(sensitivity*sensitivity)) - \
                                        np.log10(abs(h)/sensitivity), \
                                        color = 'black', label = 'RESP - PAZ')

                for j in [0.008, 0.012, 0.025, 0.5, 1, 2, 3, 4]:
                    plt.axvline(np.log10(j), linestyle = '--')

                plt.xlabel('Frequency [Hz] (power of 10)')
                plt.ylabel('Amplitude (power of 10)')

                plt.legend()
                
                # -----------------------
                plt.subplot(326)
                #take negative of imaginary part
                phase_paz = np.unwrap(np.arctan2(h.imag, h.real))
                phase_resp = np.unwrap(np.arctan2(resp.imag, resp.real))
                plt.plot(np.log10(f), np.log10(phase_resp) - np.log10(phase_paz), \
                                        color = 'black', label = 'RESP - PAZ')

                for j in [0.008, 0.012, 0.025, 0.5, 1, 2, 3, 4]:
                    plt.axvline(np.log10(j), linestyle = '--')

                plt.xlabel('Frequency [Hz] (power of 10)')
                plt.ylabel('Phase [radian] (power of 10)')

                plt.legend()

                # title, centered above both subplots
                # make more room in between subplots for the ylabel of right plot
                plt.subplots_adjust(wspace=0.3)
                """
            plt.show()

            print str(i + 1) + '/' + str(len(ls_first))
            print ls_first[i]
            print '------------------'
            wait = raw_input(id_name)
            print '***************************'

        except Exception, error:
            print '##################'
            print error
            print '##################'
Ejemplo n.º 19
0
    def downloadRemoteData(self, net, evtTime, evtLat, evtLon, evtDep, mag, fileFormat):

        acceptedStations = [
            "SPB",
            "CPUP",
            "LPAZ",
            "PLCA",
            "LCO",
            "LVC",
            "OTAV",
            "PTGA",
            "RCBR",
            "SAML",
            "SDV",
            "TRQA",
            "CAN",
            "FDF",
            "HDC",
            "INU",
            "KIP",
            "PPTF",
            "TAM",
            "TRIS",
            "CNG",
            "CVNA",
            "GRM",
            "HVD",
            "SWZ",
            "UPI",
            "WIN",
        ]

        stations = self.iris.station(network=net, station="*", starttime=evtTime, endtime=evtTime + 3600, level="sta")
        dom = parseString(stations)
        nStations = len(dom.getElementsByTagName("Station"))
        i = 0
        while i < nStations:
            for node in dom.getElementsByTagName("Station"):
                staName = node.getAttribute("sta_code")
                if staName not in acceptedStations:
                    i += 1
                else:
                    xmlStationLat = dom.getElementsByTagName("Lat")[i].toxml()
                    staLat = float(xmlStationLat.replace("<Lat>", "").replace("</Lat>", ""))
                    xmlStationLon = dom.getElementsByTagName("Lon")[i].toxml()
                    staLon = float(xmlStationLon.replace("<Lon>", "").replace("</Lon>", ""))
                    xmlStationElev = dom.getElementsByTagName("Elevation")[i].toxml()
                    staElev = float(xmlStationElev.replace("<Elevation>", "").replace("</Elevation>", ""))

                    # delta = taup.locations2degrees(evtLat, evtLon, staLat, staLon) # delta stores distance in degrees
                    delta = util.locations2degrees(evtLat, evtLon, staLat, staLon)  # delta stores distance in degrees
                    itp = taup()  # calling IAG Taup class
                    itp.getTravelTimes(delta, evtDep)
                    pTime = itp.P()
                    sTime = itp.S()
                    # to use in sac headers...
                    seisTime = evtTime + pTime - self.beforeP
                    originTime = self.beforeP - pTime

                    fileTime = str(evtTime).replace("T", "-").replace(":", "-")[:-8]

                    try:
                        st = self.iris.getWaveform(
                            net, staName, "*", "BH*", evtTime + pTime - self.beforeP, evtTime + sTime + self.afterS
                        )
                        print "\nData found for station " + staName
                        st.merge(method=1, fill_value="interpolate")

                        try:
                            os.mkdir(staName)
                        except:
                            pass

                        os.mkdir(staName + "/" + fileTime)
                        for tr in st:
                            loc = str(tr.stats.location)
                            fileName = (
                                staName
                                + "/"
                                + fileTime
                                + "/"
                                + net
                                + "."
                                + staName
                                + "."
                                + loc
                                + "."
                                + str(tr.stats.channel)
                                + "."
                                + fileTime
                                + "."
                                + fileFormat
                            )
                            tr.write(fileName, fileFormat)
                            print fileName, "saved."
                            if fileFormat == "SAC":
                                self.fillSACHeaders(
                                    fileName, seisTime, originTime, evtLat, evtLon, evtDep, mag, staLat, staLon, staElev
                                )
                    except:
                        pass
                    i += 1
Ejemplo n.º 20
0
def _validate_and_write_waveforms(st, callback, starttime, endtime, scale,
                                  source, receiver, db, label, format):
    if not label:
        label = ""
    else:
        label += "_"

    for tr in st:
        # Half the filesize but definitely sufficiently accurate.
        tr.data = np.require(tr.data, dtype=np.float32)

    if scale != 1.0:
        for tr in st:
            tr.data *= scale

    # Sanity checks. Raise internal server errors in case something fails.
    # This should not happen and should have been caught before.
    if endtime > st[0].stats.endtime:
        msg = ("Endtime larger than the extracted endtime: endtime=%s, "
               "largest db endtime=%s" % (
                _format_utc_datetime(endtime),
                _format_utc_datetime(st[0].stats.endtime)))
        callback((tornado.web.HTTPError(500, log_message=msg, reason=msg),
                  None))
        return
    if starttime < st[0].stats.starttime - 3600.0:
        msg = ("Starttime more than one hour before the starttime of the "
               "seismograms.")
        callback((tornado.web.HTTPError(500, log_message=msg, reason=msg),
                  None))
        return

    if isinstance(source, FiniteSource):
        mu = None
    else:
        mu = st[0].stats.instaseis.mu

    # Trim, potentially pad with zeroes.
    st.trim(starttime, endtime, pad=True, fill_value=0.0, nearest_sample=False)

    # Checked in another function and just a sanity check.
    assert format in ("miniseed", "saczip")

    if format == "miniseed":
        with io.BytesIO() as fh:
            st.write(fh, format="mseed")
            fh.seek(0, 0)
            binary_data = fh.read()
        callback((binary_data, mu))
    # Write a number of SAC files into an archive.
    elif format == "saczip":
        byte_strings = []
        for tr in st:
            # Write SAC headers.
            tr.stats.sac = obspy.core.AttribDict()
            # Write WGS84 coordinates to the SAC files.
            tr.stats.sac.stla = geocentric_to_elliptic_latitude(
                receiver.latitude)
            tr.stats.sac.stlo = receiver.longitude
            tr.stats.sac.stdp = receiver.depth_in_m
            tr.stats.sac.stel = 0.0
            if isinstance(source, FiniteSource):
                tr.stats.sac.evla = geocentric_to_elliptic_latitude(
                    source.hypocenter_latitude)
                tr.stats.sac.evlo = source.hypocenter_longitude
                tr.stats.sac.evdp = source.hypocenter_depth_in_m
                # Force source has no magnitude.
                if not isinstance(source, ForceSource):
                    tr.stats.sac.mag = source.moment_magnitude
                src_lat = source.hypocenter_latitude
                src_lng = source.hypocenter_longitude
            else:
                tr.stats.sac.evla = geocentric_to_elliptic_latitude(
                    source.latitude)
                tr.stats.sac.evlo = source.longitude
                tr.stats.sac.evdp = source.depth_in_m
                # Force source has no magnitude.
                if not isinstance(source, ForceSource):
                    tr.stats.sac.mag = source.moment_magnitude
                src_lat = source.latitude
                src_lng = source.longitude
            # Thats what SPECFEM uses for a moment magnitude....
            tr.stats.sac.imagtyp = 55
            # The event origin time relative to the reference which I'll
            # just assume to be the starttime here?
            tr.stats.sac.o = source.origin_time - starttime

            # Sac coordinates are elliptical thus it only makes sense to
            # have elliptical distances.
            dist_in_m, az, baz = gps2DistAzimuth(
                lat1=tr.stats.sac.evla,
                lon1=tr.stats.sac.evlo,
                lat2=tr.stats.sac.stla,
                lon2=tr.stats.sac.stlo)

            tr.stats.sac.dist = dist_in_m / 1000.0
            tr.stats.sac.az = az
            tr.stats.sac.baz = baz

            # XXX: Is this correct? Maybe better use some function in
            # geographiclib?
            tr.stats.sac.gcarc = locations2degrees(
                lat1=src_lat,
                long1=src_lng,
                lat2=receiver.latitude,
                long2=receiver.longitude)

            # Some provenance.
            tr.stats.sac.kuser0 = "InstSeis"
            tr.stats.sac.kuser1 = db.info.velocity_model[:8]
            tr.stats.sac.user0 = scale
            # Prefix version numbers to identify them at a glance.
            tr.stats.sac.kt7 = "A" + db.info.axisem_version[:7]
            tr.stats.sac.kt8 = "I" + __version__[:7]

            with io.BytesIO() as temp:
                tr.write(temp, format="sac")
                temp.seek(0, 0)
                filename = "%s%s.sac" % (label, tr.id)
                byte_strings.append((filename, temp.read()))
        callback((byte_strings, mu))
Ejemplo n.º 21
0
def single_comparison():
    
    """
    one by one comparison of the waveforms in the first path with the second path.
    """
    
    client = Client()
    
    global input
    
    # identity of the waveforms (first and second paths) to be compared with each other
    identity_all = input['net'] + '.' + input['sta'] + '.' + \
                    input['loc'] + '.' + input['cha']
    ls_first = glob.glob(os.path.join(input['first_path'], identity_all))
    ls_second = glob.glob(os.path.join(input['second_path'], identity_all))
    
    for i in range(0, len(ls_first)):
        try:
            tr1 = read(ls_first[i])[0]
    
            if input['phase'] != 'N':
                evsta_dist = util.locations2degrees(lat1 = tr1.stats.sac.evla, \
                                        long1 = tr1.stats.sac.evlo, lat2 = tr1.stats.sac.stla, \
                                        long2 = tr1.stats.sac.stlo)
                
                taup_tt = taup.getTravelTimes(delta = evsta_dist, depth = tr1.stats.sac.evdp)
                
                phase_exist = 'N'
                
                for tt_item in taup_tt:
                    if tt_item['phase_name'] == input['phase']:
                        print 'Requested phase:'
                        print input['phase']
                        print '------'
                        print tt_item['phase_name']
                        print 'exists in the waveform!'
                        print '-----------------------'
                        t_phase = tt_item['time']
                        
                        phase_exist = 'Y'
                        break
                        
                if phase_exist != 'Y':
                    continue
            
            # identity of the current waveform
            identity = tr1.stats.network + '.' + tr1.stats.station + '.' + \
                        tr1.stats.location + '.' + tr1.stats.channel
            
            # tr1: first path, tr2: second path, tr3: Raw data
            #tr3 = read(os.path.join(input['first_path'], '..', 'BH_RAW', identity))[0]
            
            if input['resp_paz'] == 'Y':
                response_file = os.path.join(input['first_path'], '..', 'Resp/RESP.' + identity)
                
                # Extract the PAZ info from response file
                paz = readRESP(response_file, unit = input['corr_unit'])
                
                poles = paz['poles']
                zeros = paz['zeros']
                scale_fac = paz['gain']
                sensitivity = paz['sensitivity']
            
                print paz
                
                # Convert Poles and Zeros (PAZ) to frequency response.
                h, f = pazToFreqResp(poles, zeros, scale_fac, \
                                1./tr1.stats.sampling_rate, tr1.stats.npts*2, freq=True)
                # Use the evalresp library to extract 
                # instrument response information from a SEED RESP-file.
                resp = invsim.evalresp(t_samp = 1./tr1.stats.sampling_rate, \
                        nfft = tr1.stats.npts*2, filename = response_file, \
                        date = tr1.stats.starttime, units = input['corr_unit'].upper())
            
            # Keep the current identity in a new variable
            id_name = identity
            
            try:
                tr2 = read(os.path.join(input['second_path'], identity))[0]
            except Exception, error:
                # if it is not possible to read the identity in the second path
                # then change the network part of the identity based on
                # correction unit
                identity = input['corr_unit'] + '.' + tr1.stats.station + '.' + \
                        tr1.stats.location + '.' + tr1.stats.channel
                tr2 = read(os.path.join(input['second_path'], identity))[0]
            
            if input['resample'] != 'N':
                print 'WARNING: you are using resample!!!'
                tr1.resample(input['resample'])
                tr2.resample(input['resample'])
            
            if input['tw'] == 'Y':
                t_cut_1 = tr1.stats.starttime + t_phase - input['preset']
                t_cut_2 = tr1.stats.starttime + t_phase + input['offset']
                tr1.trim(starttime = t_cut_1, endtime = t_cut_2)
                
                t_cut_1 = tr2.stats.starttime + t_phase - input['preset']
                t_cut_2 = tr2.stats.starttime + t_phase + input['offset']
                tr2.trim(starttime = t_cut_1, endtime = t_cut_2)
            
            
            if input['hlfilter'] == 'Y':
                tr1.filter('lowpass', freq=input['hfreq'], corners=2)
                tr2.filter('lowpass', freq=input['hfreq'], corners=2)
                tr1.filter('highpass', freq=input['lfreq'], corners=2)
                tr2.filter('highpass', freq=input['lfreq'], corners=2)
            
            # normalization of all three waveforms to the 
            # max(max(tr1), max(tr2), max(tr3)) to keep the scales
            #maxi = max(abs(tr1.data).max(), abs(tr2.data).max(), abs(tr3.data).max())
            
            #maxi = max(abs(tr1.data).max(), abs(tr2.data).max())
            #tr1_data = tr1.data/abs(maxi)
            #tr2_data = tr2.data/abs(maxi)
            #tr3_data = tr3.data/abs(maxi)
            
            tr1_data = tr1.data/abs(max(tr1.data))
            tr2_data = tr2.data/abs(max(tr2.data))
            
            #tr1_data = tr1.data
            #tr2_data = tr2.data*1e9
            
            print max(tr1.data)
            print max(tr2.data)
            
            # create time arrays for tr1, tr2 and tr3
            time_tr1 = np.arange(0, tr1.stats.npts/tr1.stats.sampling_rate, \
                                                1./tr1.stats.sampling_rate)
            time_tr2 = np.arange(0, tr2.stats.npts/tr2.stats.sampling_rate, \
                                                1./tr2.stats.sampling_rate)
            #time_tr3 = np.arange(0, tr3.stats.npts/tr3.stats.sampling_rate, \
            #                                    1./tr3.stats.sampling_rate)
            
            # label for plotting
            label_tr1 = ls_first[i].split('/')[-2]
            label_tr2 = ls_second[i].split('/')[-2]
            label_tr3 = 'RAW'
        
            if input['resp_paz'] == 'Y':
                # start plotting
                plt.figure()
                plt.subplot2grid((3,4), (0,0), colspan=4, rowspan=2)
                #plt.subplot(211)
            
            plt.plot(time_tr1, tr1_data, color = 'blue', label = label_tr1, lw=3)
            plt.plot(time_tr2, tr2_data, color = 'red', label = label_tr2, lw=3)
            #plt.plot(time_tr3, tr3_data, color = 'black', ls = '--', label = label_tr3)

            plt.xlabel('Time (sec)', fontsize = 'xx-large', weight = 'bold')
            
            if input['corr_unit'] == 'dis':
                ylabel_str = 'Relative Displacement'
            elif input['corr_unit'] == 'vel':
                ylabel_str = 'Relative Vel'
            elif input['corr_unit'] == 'acc':
                ylabel_str = 'Relative Acc'
            
            plt.ylabel(ylabel_str, fontsize = 'xx-large', weight = 'bold')
            
            plt.xticks(fontsize = 'xx-large', weight = 'bold')
            plt.yticks(fontsize = 'xx-large', weight = 'bold')
            
            plt.legend(loc=1,prop={'size':20})
            
            #-------------------Cross Correlation
            # 5 seconds as total length of samples to shift for cross correlation.
            
            cc_np = tr1.stats.sampling_rate * 3
            
            np_shift, coeff = cross_correlation.xcorr(tr1, tr2, int(cc_np))
            
            t_shift = float(np_shift)/tr1.stats.sampling_rate
            
            print "Cross Correlation:"
            print "Shift:       " + str(t_shift)
            print "Coefficient: " + str(coeff)
            
            plt.title('Single Comparison' + '\n' + str(t_shift) + \
                        ' sec , coeff: ' + str(round(coeff, 5)) + \
                        '\n' + id_name, \
                        fontsize = 'xx-large', weight = 'bold')
            
            if input['resp_paz'] == 'Y':
                # -----------------------
                #plt.subplot(223)
                plt.subplot2grid((3,4), (2,0), colspan=2)
                '''
                plt.plot(np.log10(f), np.log10(abs(resp)/(sensitivity*sensitivity)), \
                                            color = 'blue', label = 'RESP', lw=3)
                plt.plot(np.log10(f), np.log10(abs(h)/sensitivity), \
                                            color = 'red', label = 'PAZ', lw=3)
                '''
                plt.loglog(f, abs(resp)/(sensitivity*sensitivity), \
                                            color = 'blue', label = 'RESP', lw=3)
                plt.loglog(f, abs(h)/sensitivity, \
                                            color = 'red', label = 'PAZ', lw=3)
                
                #for j in [0.008, 0.012, 0.025, 0.5, 1, 2, 3, 4]:
                for j in [0]:
                    plt.axvline(np.log10(j), linestyle = '--')

                #plt.xlabel('Frequency [Hz]\n(power of 10)', fontsize = 'xx-large', weight = 'bold')
                #plt.ylabel('Amplitude\n      (power of 10)', fontsize = 'xx-large', weight = 'bold')
                
                plt.xlabel('Frequency [Hz]', fontsize = 'xx-large', weight = 'bold')
                plt.ylabel('Amplitude', fontsize = 'xx-large', weight = 'bold')
                
                plt.xticks(fontsize = 'xx-large', weight = 'bold')
                
                
                #plt.yticks = MaxNLocator(nbins=4)
                plt.yticks(fontsize = 'xx-large', weight = 'bold')
                plt.legend(loc=2,prop={'size':20})
                
                # -----------------------
                #plt.subplot(224)
                plt.subplot2grid((3,4), (2,2), colspan=2)

                #take negative of imaginary part
                phase_paz = np.unwrap(np.arctan2(h.imag, h.real))
                phase_resp = np.unwrap(np.arctan2(resp.imag, resp.real))
                #plt.plot(np.log10(f), phase_resp, color = 'blue', label = 'RESP', lw=3)
                #plt.plot(np.log10(f), phase_paz, color = 'red', label = 'PAZ', lw=3)
                
                plt.semilogx(f, phase_resp, color = 'blue', label = 'RESP', lw=3)
                plt.semilogx(f, phase_paz, color = 'red', label = 'PAZ', lw=3)
                
                #for j in [0.008, 0.012, 0.025, 0.5, 1, 2, 3, 4]:
                for j in [0.0]:
                    plt.axvline(np.log10(j), linestyle = '--')

                #plt.xlabel('Frequency [Hz]\n(power of 10)', fontsize = 'xx-large', weight = 'bold')
                plt.xlabel('Frequency [Hz]', fontsize = 'xx-large', weight = 'bold')
                plt.ylabel('Phase [radian]', fontsize = 'xx-large', weight = 'bold')
                
                plt.xticks(fontsize = 'xx-large', weight = 'bold')
                plt.yticks(fontsize = 'xx-large', weight = 'bold')
            
                plt.legend(loc=3,prop={'size':20})
                
                # title, centered above both subplots
                # make more room in between subplots for the ylabel of right plot
                plt.subplots_adjust(wspace=0.4, hspace=0.3)
                """
                # -----------------------
                plt.subplot(325)
                
                plt.plot(np.log10(f), np.log10(abs(resp)/(sensitivity*sensitivity)) - \
                                        np.log10(abs(h)/sensitivity), \
                                        color = 'black', label = 'RESP - PAZ')

                for j in [0.008, 0.012, 0.025, 0.5, 1, 2, 3, 4]:
                    plt.axvline(np.log10(j), linestyle = '--')

                plt.xlabel('Frequency [Hz] (power of 10)')
                plt.ylabel('Amplitude (power of 10)')

                plt.legend()
                
                # -----------------------
                plt.subplot(326)
                #take negative of imaginary part
                phase_paz = np.unwrap(np.arctan2(h.imag, h.real))
                phase_resp = np.unwrap(np.arctan2(resp.imag, resp.real))
                plt.plot(np.log10(f), np.log10(phase_resp) - np.log10(phase_paz), \
                                        color = 'black', label = 'RESP - PAZ')

                for j in [0.008, 0.012, 0.025, 0.5, 1, 2, 3, 4]:
                    plt.axvline(np.log10(j), linestyle = '--')

                plt.xlabel('Frequency [Hz] (power of 10)')
                plt.ylabel('Phase [radian] (power of 10)')

                plt.legend()

                # title, centered above both subplots
                # make more room in between subplots for the ylabel of right plot
                plt.subplots_adjust(wspace=0.3)
                """
            plt.show()
                
            
            print str(i+1) + '/' + str(len(ls_first))
            print ls_first[i]
            print '------------------'
            wait = raw_input(id_name)
            print '***************************'
            
        except Exception, error:
            print '##################'
            print error
            print '##################'
Ejemplo n.º 22
0
def ProcessLoopS(filepath,stationnames):
    '''File processing loop for S tomography: Take the components and convert to RTZ, and delte the E and N comps'''
    
    p = os.getcwd()
    
    for station in stationnames:
        print 'Dealing with %s' %station
    
        Rstream = obspy.Stream()

        #Get all SAC files associated with that station
        sacfiles = reversed(sorted(glob.glob('*.%s..*' %station)))
        saccount = 0
        
        for sacfile in sacfiles:
           trace = read(sacfile)
           
           #Only determine the distance and back-azimuth once: This is what the saccount variable is here for
           if saccount == 0:
           
             evlat = trace[0].stats.sac.evla
             evlon = trace[0].stats.sac.evlo
             evdep = trace[0].stats.sac.evdp
             stlat = trace[0].stats.sac.stla
             stlon = trace[0].stats.sac.stlo
           
             dist = locations2degrees(evlat,evlon,stlat,stlon) #find distance from the quake to the station
             arcs = IRISclient.distaz(stalat=stlat,stalon=stlon,evtlat=evlat,evtlon=evlon)
             baz = arcs['backazimuth']
             az = arcs['azimuth']
             
             if evdep > 1e3:
                evdep = evdep/1000.0;
                
             traveltimes = getTravelTimes(dist,evdep, model='iasp91')
             
             P = 0
             S = 0

             for element in traveltimes:
               phaseinfo = element['phase_name']
               if phaseinfo == 'P':
                  Ptime = element['time']
                  P = 1
               if phaseinfo == 'S':
                  Stime = element['time']
                  S = 1
               if (P==1 and S ==1):
                  break

             try:
               P = Ptime
             except:
               Ptime = 0
             try:
               S = Stime
             except:
               Stime = 0
             
            
           #Set the P and S times, and other SAC header data
           trace[0].stats.sac.az = float(az)
           trace[0].stats.sac.baz = float(baz)

           if Ptime > 0:
            trace[0].stats.sac.t1 = Ptime
           if Stime > 0:
            trace[0].stats.sac.t2 = Stime

           trace[0].stats.sac.evdp = evdep*1000 #dbpick wants depth to be in meters
           trace[0].stats.sac.o = 0 #add origin time

           #other operations - remove the mean and resample
           trace[0].detrend('demean')
           trace[0].resample(20)
           
           if ('BHE' in sacfile) or ('BHN' in sacfile):
              Rstream += trace
              #to save disk space, delete the E and N components
              os.system('rm %s' %sacfile)
           else:
              #Write the Z component directly (writes over the existing file)
              trace.write(sacfile,format='SAC')
              #print 'Appended arrivals to %s' %sacfile
           
           saccount += 1
        
        #Convert to radial and transverse components
        try: 
           rotstream = Rstream.rotate(method='NE->RT',back_azimuth=baz)
        
           for obj in rotstream:
              nt = obj.stats.network
              sta = obj.stats.station
              channel = obj.stats.channel
              name = "vel."+str(nt)+"."+str(station)+".."+str(channel)
              obj.write(name,format="SAC")
              #print 'Written new file %s' %name
        except:
           print 'Cannot rotate files in Rstream %s' %Rstream
           
    #create antelope database - just for S waves, which are picked on the traverse.
    os.system('rm *.01.*')
    os.system('sac2db *.BHT T')
Ejemplo n.º 23
0
    def downloadLocalData(self, net, evtTime, evtLat, evtLon, evtDep, mag, fileFormat):

        if fileFormat == "MSEED":
            fileFormat = str.lower(fileFormat)

        print "Setting local DB..."
        stations = self.arclink.getStations(evtTime, evtTime + 3600, net)
        # return stations

        for station in stations:
            staName = station["code"]
            staLon = station["longitude"]
            staLat = station["latitude"]
            staElev = station["elevation"]

            # delta = taup.locations2degrees(evtLat, evtLon, staLat, staLon) # delta stores distance in degrees
            delta = util.locations2degrees(evtLat, evtLon, staLat, staLon)  # delta stores distance in degrees
            itp = taup()  # calling IAG Taup class
            itp.getTravelTimes(delta, evtDep)
            pTime = itp.P()
            sTime = itp.S()
            # to use in sac headers...
            seisTime = evtTime + pTime - self.beforeP
            originTime = self.beforeP - pTime

            fileTime = str(evtTime).replace("T", "-").replace(":", "-")[:-8]

            try:

                st = self.arclink.getWaveform(
                    net, staName, "*", "*", evtTime + pTime - self.beforeP, evtTime + sTime + self.afterS
                )
                print "\nData found for station " + staName
                st.merge(method=1, fill_value="interpolate")

                try:
                    os.mkdir(staName)
                except:
                    pass

                os.mkdir(staName + "/" + fileTime)

                for tr in st:
                    loc = str(tr.stats.location)
                    fileName = (
                        staName
                        + "/"
                        + fileTime
                        + "/"
                        + net
                        + "."
                        + staName
                        + "."
                        + loc
                        + "."
                        + str(tr.stats.channel)
                        + "."
                        + fileTime
                        + "."
                        + fileFormat
                    )
                    tr.write(fileName, fileFormat)
                    print fileName, "saved."
                    if fileFormat == "SAC":
                        self.fillSACHeaders(
                            fileName, seisTime, originTime, evtLat, evtLon, evtDep, mag, staLat, staLon, staElev
                        )

            except:
                pass
Ejemplo n.º 24
0
def geteventslist(times, stationlocation, radii, eventbox, mags, homedir):
    ''''Takes a set of lists containing the values of these parameters. Outputs all the quakes that satisfy the paramters by first looking
	at the global quake catalog
	'''

    start = time.time()

    quakefile = open(
        str(homedir) + '/Global_Quake_Cat/' + 'globalquake_parsed.dat', 'r')
    lines = quakefile.readlines()
    quakefile.close()

    extractquakes = []

    ##########################
    #Introducted for speed: first determine the indices of each of the years, so that the quake seatch can be as fast as possible
    ##########################
    import datetime
    cyear = int(str(datetime.datetime.now()).split('-')[0])
    years = range(1970, cyear + 1)

    i = 0
    yearcount = len(years) - 1
    yearslist = {}

    for element in lines:
        vals = element.split(',')
        year = float(vals[0].split('-')[0])
        if year != years[yearcount]:
            yearslist[year] = i
            yearcount = yearcount - 1
        i += 1

    ##########################

    endtimeyearnum = times[0].year
    starttimeyearnum = times[1].year
    maxcount = yearslist[endtimeyearnum - 1]
    if starttimeyearnum < cyear - 1:
        mincount = yearslist[starttimeyearnum + 1]
    else:
        mincount = 0

    for line in lines[mincount:maxcount]:
        vals = line.split(',')
        quaketime = UTCDateTime(str(vals[0]))
        quakelat = float(vals[1])
        quakelon = float(vals[2])
        quakemag = float(vals[4])
        quakedepth = float(vals[3])
        #print times[0], times[1], quaketime, quakelat, quakelon, quakemag, quakedepth, stationlocation, radii, eventbox
        if (times[0] < quaketime < times[1]) and (float(mags[0]) < quakemag <
                                                  float(mags[1])):
            quakedist = locations2degrees(quakelat, quakelon,
                                          stationlocation[1],
                                          stationlocation[0])
            if (float(radii[0]) < quakedist < float(radii[1])) and (float(
                    eventbox[0]) < quakelon < float(eventbox[1])) and (float(
                        eventbox[2]) < quakelat < float(eventbox[3])):
                extractquakes.append(
                    [quaketime, quakelon, quakelat, quakemag, quakedepth])

    return extractquakes
Ejemplo n.º 25
0
def ncCreate_core(input, ls_saved_stas, address, eventgrp):
    
    """
    """
    
    global rootgrp, client_iris
    
    eventname = address.split('/')[-1]
    
    print "========================"
    print "Create netCDF file from:"
    print address
    print "========================"
    
    print "All available stations:"
    print len(ls_saved_stas)

    tr_tmp = read(ls_saved_stas[0])[0]
    
    eventgrp.evla = tr_tmp.stats.sac.evla
    eventgrp.evlo = tr_tmp.stats.sac.evlo
    eventgrp.evdp = tr_tmp.stats.sac.evdp
    eventgrp.mag = tr_tmp.stats.sac.mag
    
    stgrp = eventgrp.createGroup('stations')
    
    stationIDS = str(len(ls_saved_stas)) + ' --- , '
    
    eventgrp.createDimension('latitude', len(ls_saved_stas))
    eventgrp.createDimension('longitude', len(ls_saved_stas))
    eventgrp.createDimension('depth', len(ls_saved_stas))
    eventgrp.createDimension('elevation', len(ls_saved_stas))
    eventgrp.createDimension('epicentral', len(ls_saved_stas))
    
    stasla = eventgrp.createVariable('latitude', 'f4', ('latitude',) , zlib = True)
    staslo = eventgrp.createVariable('longitude', 'f4', ('longitude',) , zlib = True)
    stasdp = eventgrp.createVariable('depth', 'f4', ('depth',) , zlib = True)
    stasel = eventgrp.createVariable('elevation', 'f4', ('elevation',) , zlib = True)
    stasepi = eventgrp.createVariable('epicentral', 'f4', ('epicentral',) , zlib = True)
    
    
    for i in range(0, len(ls_saved_stas)):
        
        print str(i+1),
        
        try:
            tr = read(ls_saved_stas[i])[0]
        except Exception, e:
            print "\nProblem with reading the: " + ls_saved_stas[i]
            print e
            print "------------------------------------------------"
            continue
        
        stationID = tr.stats.network + '.' + tr.stats.station + '.' + \
                        tr.stats.location + '.' + tr.stats.channel
        
        stationIDS += stationID + ' , '
        
        stasla[i] = tr.stats.sac.stla
        staslo[i] = tr.stats.sac.stlo
        stasdp[i] = tr.stats.sac.stdp
        stasel[i] = tr.stats.sac.stel
        stasepi[i] = locations2degrees(lat1 = eventgrp.evla, \
                        long1 = eventgrp.evlo, lat2 = stasla[i], \
                        long2 = staslo[i])
        
        """
Ejemplo n.º 26
0
def ncCreate_core(input, ls_saved_stas, address, eventgrp):
    """
    """

    global rootgrp, client_iris

    eventname = address.split('/')[-1]

    print "========================"
    print "Create netCDF file from:"
    print address
    print "========================"

    print "All available stations:"
    print len(ls_saved_stas)

    tr_tmp = read(ls_saved_stas[0])[0]

    eventgrp.evla = tr_tmp.stats.sac.evla
    eventgrp.evlo = tr_tmp.stats.sac.evlo
    eventgrp.evdp = tr_tmp.stats.sac.evdp
    eventgrp.mag = tr_tmp.stats.sac.mag

    stgrp = eventgrp.createGroup('stations')

    stationIDS = str(len(ls_saved_stas)) + ' --- , '

    eventgrp.createDimension('latitude', len(ls_saved_stas))
    eventgrp.createDimension('longitude', len(ls_saved_stas))
    eventgrp.createDimension('depth', len(ls_saved_stas))
    eventgrp.createDimension('elevation', len(ls_saved_stas))
    eventgrp.createDimension('epicentral', len(ls_saved_stas))

    stasla = eventgrp.createVariable('latitude',
                                     'f4', ('latitude', ),
                                     zlib=True)
    staslo = eventgrp.createVariable('longitude',
                                     'f4', ('longitude', ),
                                     zlib=True)
    stasdp = eventgrp.createVariable('depth', 'f4', ('depth', ), zlib=True)
    stasel = eventgrp.createVariable('elevation',
                                     'f4', ('elevation', ),
                                     zlib=True)
    stasepi = eventgrp.createVariable('epicentral',
                                      'f4', ('epicentral', ),
                                      zlib=True)

    for i in range(0, len(ls_saved_stas)):

        print str(i + 1),

        try:
            tr = read(ls_saved_stas[i])[0]
        except Exception, e:
            print "\nProblem with reading the: " + ls_saved_stas[i]
            print e
            print "------------------------------------------------"
            continue

        stationID = tr.stats.network + '.' + tr.stats.station + '.' + \
                        tr.stats.location + '.' + tr.stats.channel

        stationIDS += stationID + ' , '

        stasla[i] = tr.stats.sac.stla
        staslo[i] = tr.stats.sac.stlo
        stasdp[i] = tr.stats.sac.stdp
        stasel[i] = tr.stats.sac.stel
        stasepi[i] = locations2degrees(lat1 = eventgrp.evla, \
                        long1 = eventgrp.evlo, lat2 = stasla[i], \
                        long2 = staslo[i])
        """
Ejemplo n.º 27
0
        event_time_str = str(event_time.year) + '.' + str(event_time.julday).zfill(3) + '.' +\
            str(event_time.hour).zfill(2) + str(event_time.minute).zfill(2) + \
            str(event_time.second).zfill(2)
        event_dir = default_dir + '/' + event_time_str
        mkdir(event_dir)
        os.chdir(event_dir)

        for net in stations.select():
            for station in net:
                # print station
                station_latitude = station.latitude
                station_longitude = station.longitude
                station_elevation = station.elevation  # km
                station_code = station.code
                distance_in_degree = locations2degrees(
                    event_latitude, event_longitude,
                    station_latitude, station_longitude)
                # try:
                #     arrivals = model.get_travel_times(
                #         source_depth_in_km=event_depth / 1000.0,
                #         distance_in_degree=distance_in_degree)  # ,
                # except Exception, e:
                #     arrivals = model.get_travel_times(
                #         source_depth_in_km=event_depth / 1000.0,
                #         distance_in_degree=distance_in_degree,
                #         phase_list=["P"])  ## unexcept in taup maybe the program is wrong.   
                # try:
                arrivals = model.get_travel_times(
                    source_depth_in_km=event_depth / 1000.0,
                    distance_in_degree=distance_in_degree,
                    phase_list=["P","Pms"])  ## unexcept in taup maybe the program is wrong.   
Ejemplo n.º 28
0
def cc_core(ls_first, ls_second, identity_all, max_ts, print_sta):
    
    """
    Perform the main part of the cross correlation and creating 
    the cc.txt file
    """
    
    global input
    
    try:
        
        cc_open = open('./cc.txt', 'a')
        
        tr1 = read(ls_first)[0]
            
        if input['phase'] != 'N':
            evsta_dist = util.locations2degrees(lat1 = tr1.stats.sac.evla, \
                                    long1 = tr1.stats.sac.evlo, lat2 = tr1.stats.sac.stla, \
                                    long2 = tr1.stats.sac.stlo)
            
            taup_tt = taup.getTravelTimes(delta = evsta_dist, depth = tr1.stats.sac.evdp)
            
            phase_exist = 'N'
            
            for tt_item in taup_tt:
                if tt_item['phase_name'] == input['phase']:
                    print 'Requested phase:'
                    print input['phase']
                    print '------'
                    print tt_item['phase_name']
                    print 'exists in the waveform!'
                    print '-----------------------'
                    t_phase = tt_item['time']
                    
                    phase_exist = 'Y'
                    break
                    
        if input['phase'] == 'N' or (input['phase'] != 'N' and phase_exist == 'Y'):
            
            # identity of the current waveform
            identity = tr1.stats.network + '.' + tr1.stats.station + '.' + \
                        tr1.stats.location + '.' + tr1.stats.channel
            
            # Keep the current identity in a new variable
            id_name = identity
            
            try:
                tr2 = read(os.path.join(input['second_path'], identity))[0]
            except Exception, error:
                # if it is not possible to read the identity in the second path
                # then change the network part of the identity based on
                # correction unit
                identity = input['corr_unit'] + '.' + tr1.stats.station + '.' + \
                        tr1.stats.location + '.' + tr1.stats.channel
                tr2 = read(os.path.join(input['second_path'], identity))[0]
            
            if input['resample'] != 'N':
                print 'WARNING: you are using resample!!!'
                tr1.resample(input['resample'])
                tr2.resample(input['resample'])
            
            if input['tw'] == 'Y':
                t_cut_1 = tr1.stats.starttime + t_phase - input['preset']
                t_cut_2 = tr1.stats.starttime + t_phase + input['offset']
                tr1.trim(starttime = t_cut_1, endtime = t_cut_2)
                
                t_cut_1 = tr2.stats.starttime + t_phase - input['preset']
                t_cut_2 = tr2.stats.starttime + t_phase + input['offset']
                tr2.trim(starttime = t_cut_1, endtime = t_cut_2)
            
            if input['hlfilter'] == 'Y':
                tr1.filter('lowpass', freq=input['hfreq'], corners=2)
                tr2.filter('lowpass', freq=input['hfreq'], corners=2)
                tr1.filter('highpass', freq=input['lfreq'], corners=2)
                tr2.filter('highpass', freq=input['lfreq'], corners=2)
            
            # normalization of all three waveforms to the 
            # max(max(tr1), max(tr2), max(tr3)) to keep the scales
            #maxi = max(abs(tr1.data).max(), abs(tr2.data).max(), abs(tr3.data).max())
            '''
            maxi = max(abs(tr1.data).max(), abs(tr2.data).max())
            tr1_data = tr1.data/abs(maxi)
            tr2_data = tr2.data/abs(maxi)
            tr3_data = tr3.data/abs(maxi)
            '''
            tr1.data = tr1.data/abs(max(tr1.data))
            tr2.data = tr2.data/abs(max(tr2.data))
        
            cc_np = tr1.stats.sampling_rate * max_ts
            np_shift, coeff = cross_correlation.xcorr(tr1, tr2, int(cc_np))
            t_shift = float(np_shift)/tr1.stats.sampling_rate
            
            # scale_str shows whether the scale of the waveforms are the same or not
            # if scale_str = 'Y' then the scale is correct.
            scale_str = 'Y'
            
            if abs(tr1.data).max() > 2.0 * abs(tr2.data).max():
                label_tr1 = ls_first.split('/')[-2]
                label_tr2 = ls_second[0].split('/')[-2]
                print '#####################################################'
                print "Scale is not correct! " + label_tr1 + '>' + label_tr2
                print '#####################################################'
                scale_str = 'N'
            elif abs(tr2.data).max() >= 2.0 * abs(tr1.data).max():
                label_tr1 = ls_first.split('/')[-2]
                label_tr2 = ls_second[0].split('/')[-2]
                print '#####################################################'
                print "Scale is not correct! " + label_tr2 + '>' + label_tr1
                print '#####################################################'
                scale_str = 'N'
            
            if not str(coeff) == 'nan':
                cc_open.writelines(id_name + ',' + str(round(coeff, 4)) + ',' + str(t_shift) + \
                                                ',' + scale_str + ',' + '\n')
                                
            print "Cross Correlation:"
            print id_name
            print "Shift:       " + str(t_shift)
            print "Coefficient: " + str(coeff)
            print print_sta
            print '------------------'
       
            cc_open.close()
            cc_open.close()
    
    except Exception, error:
        print '##################'
        print error
        print '##################'