def filterClusterStationMinimumNumber(CentroidList,StationClusterList,Config): newCentroidList = [] newStationClusterList = [] for i in CentroidList: counter = 0 for j in StationClusterList: if i.rank == j.member: counter+=1 streamID = j.net+'.'+j.sta+'.'+j.loc+'.'+j.comp delta = locations2degrees(float(i.lat),float(i.lon), float(j.lat),float(j.lon)) #print i.lat,i.lon,': ',streamID,j.lat,j.lon,delta if counter < int(Config['minclusterstation']): print 'Centroid ',i.rank, counter, ' OUT', i.lat,i.lon else: print 'Centroid ',i.rank, counter, ' IN', i.lat,i.lon newCentroidList.append(i) # for i in newCentroidList: # for j in StationClusterList: # if i.rank == j.member: # newStationClusterList.append(j) #return newStationClusterList,newCentroidList return StationClusterList,newCentroidList
def refTrigger(Waveform,Event,Meta): print Event name = ('%s.%s.%s.%s')%(Waveform[0].stats.network,Waveform[0].stats.station,Waveform[0].stats.location,Waveform[0].stats.channel) i = searchMeta(name,Meta) print i de = locations2degrees(float(Event.lat), float(Event.lon), float(i.lat), float(i.lon)) tt = getTravelTimes(delta=de, depth=float(Event.depth), model='ak135') ptime = 0 if tt[0]['phase_name'] == 'P': ptime = tt[0]['time'] tw = calculateTimeWindows (ptime, Event) stP = readWaveformsPicker (i, tw, Event, ptime) trP = stP[0] cft = recSTALTA(trP.data, int(1 * trP.stats.sampling_rate), int(10 * trP.stats.sampling_rate)) t = triggerOnset(cft,6,1.5) print len(trP),t,type(t) onset = t[0][0]/trP.stats.sampling_rate print 'TRIGGER ',trP.stats.starttime+onset print 'THEORETICAL: ',UTCDateTime(Event.time)+ptime tdiff = (trP.stats.starttime+onset)-(UTCDateTime(Event.time)+ptime) #plotTrigger(trP,cft,6,1.5) print tdiff return tdiff
def traveltimes(MetaDict, Event): logger.info('\033[31m Enter AUTOMATIC FILTER \033[0m') T = [] Wdict = {} SNR = {} for i in MetaDict: de = locations2degrees(float(Event.lat), float(Event.lon), float(i.lat), float(i.lon)) tt = getTravelTimes(delta=de, depth=float(Event.depth), model='ak135') if tt[0]['phase_name'] == 'P': ptime = tt[0]['time'] T.append(ptime) logger.info('\033[31m \n\n+++++++++++++++++++++++++++++++++++++++++++++++++++ \033[0m') print i.getName(), i.lat, i.lon, ptime ttime = ptime tw = calculateTimeWindows(ptime, Event) w, snr = readWaveformsCross(i, tw, Event, ttime) Wdict[i.getName()] = w SNR[i.getName()] = snr logger.info('\033[31m Exit AUTOMATIC FILTER \033[0m') return Wdict, SNR
def filterStations(StationList, Config, Origin, network): F = [] minDist = int (Config['mindist']) maxDist = int (Config['maxdist']) o_lat = float (Origin['lat']) o_lon = float (Origin['lon']) Logfile.red ('Filter stations with configured parameters') for i in StationList: if i.loc == '--': i.loc = '' streamID = i.net + '.' + i.sta + '.' + i.loc + '.' + i.comp for j in network: if fnmatch.fnmatch(streamID, j): sdelta = locations2degrees(o_lat, o_lon, float(i.lat), float(i.lon)) print streamID, sdelta,' degree' if sdelta > minDist and sdelta < maxDist: #if i.net != 'GB': F.append(Station(i.net, i.sta, i.loc, i.comp, i.lat, i.lon, i.ele, i.dip, i.azi, i.gain)) Logfile.red ('%d STATIONS LEFT IN LIST' % len(F)) return F
def filterStations(StationList,Config,Origin,network): F = [] minDist = int(Config['minDist']) maxDist = int(Config['maxDist']) o_lat = float(Origin['lat']) o_lon = float(Origin['lon']) logger.info('\033[31m Filter stations with configured parameters \033[0m') for i in StationList: if i.loc == '--': i.loc='' streamID = i.net+'.'+i.sta+'.'+i.loc+'.'+i.comp for j in network: if fnmatch.fnmatch(streamID, j): sdelta = locations2degrees(o_lat, o_lon, float(i.lat), float(i.lon)) if sdelta > minDist and sdelta < maxDist: F.append(Station(i.net,i.sta,i.loc,i.comp,i.lat,i.lon,i.ele,i.dip,i.azi,i.gain)) logger.info('\033[31m %d STATIONS LEFT IN LIST \033[0m'% len(F)) return F
def checkStationAroundInitialCentroid(station,Config,StationMetaList): counter = 0 for i in StationMetaList: sdelta = locations2degrees(float(station.lat), float(station.lon), float(i.lat), float(i.lon)) if sdelta < int(Config['initialstationdistance']): counter +=1 return counter
def compareClusterCentre(oldCluster,newCluster,Config): counter = 0 for i in range (int(Config['maxcluster'])): delta = locations2degrees(float(oldCluster[i].lat),float(oldCluster[i].lon), float(newCluster[i].lat),float(newCluster[i].lon)) print i,' OLD: ',oldCluster[i].lat,oldCluster[i].lon,' NEW: ',newCluster[i].lat,newCluster[i].lon,' DELTA: ',delta if delta < float(Config['comparedelta']): print 'JO' counter +=1 else: print 'NO' return counter
def deleteFarStations(CentroidList,StationClusterList,Config): for i in CentroidList: for j in StationClusterList: if i.rank == j.member: delta = locations2degrees(float(i.lat),float(i.lon), float(j.lat),float(j.lon)) if delta > int(Config['stationdistance']): j.member = -1 #for index,k in enumerate(StationClusterList): # if k.member == -1: # del StationClusterList[index] return StationClusterList
def addOK(station,stationList,Config,MetaList): t=0 for i in stationList: sdelta = locations2degrees(float(station.lat), float(station.lon), float(i.lat), float(i.lon)) if sdelta > float(Config['centroidmindistance']): aroundcounter = checkStationAroundInitialCentroid(station,Config,MetaList) if aroundcounter >= int(Config['minstationaroundinitialcluster']): t=1 else: t=0 return t else: t=0 return t return t
def stationBelongToCluster(Config,CentroidList,StationMetaList): ClusterList = [] for i in StationMetaList: mind = 100000 c = 0 for j in CentroidList: delta = locations2degrees(float(j.lat), float(j.lon), float(i.lat), float(i.lon)) c+=1 if mind > delta: mind=delta i.member = c ClusterList.append(Station(i.net,i.sta,i.loc,i.comp,i.lat,i.lon,i.ele,i.dip,i.azi,i.gain,i.member)) return ClusterList
def filterStations(StationList,Config,Origin): F = [] minDist = int(Config['mindist']) maxDist = int(Config['maxdist']) o_lat = float(Origin['lat']) o_lon = float(Origin['lon']) logger.info('\033[31m Filter stations with configured parameters \033[0m') for i in StationList: sdelta = locations2degrees(o_lat, o_lon, float(i.lat), float(i.lon)) if sdelta > minDist and sdelta < maxDist: F.append(Station(i.net,i.sta,i.loc,i.comp,i.lat,i.lon,i.ele,i.dip,i.azi,i.gain)) logger.info('\033[31m %d STATIONS LEFT IN LIST \033[0m'% len(F)) return F
def YSPEC_Phase(): """ Create input file (yspec.in) for YSPEC based on the selected Phase """ global input events, address_events = quake_info(input['address'], 'info') for i in range(0, len(events)): sta_ev_select = [] sta_ev = read_station_event(address_events[i]) for j in range(0, len(sta_ev[i])): dist = locations2degrees(lat1 = float(sta_ev[i][j][9]), \ long1 = float(sta_ev[i][j][10]), lat2 = float(sta_ev[i][j][4]), \ long2 = float(sta_ev[i][j][5])) tt = getTravelTimes(delta=dist, depth=float(sta_ev[i][j][11]), \ model=input['model']) sta_ev[i][j][8] = sta_ev[i][j][0] + '_' + sta_ev[i][j][1] for m in range(0, len(tt)): if tt[m]['phase_name'] in input['phase']: sta_ev_select.append(sta_ev[i][j]) #import ipdb; ipdb.set_trace() sta_ev_req = list(unique_items(sta_ev_select)) if os.path.isfile(os.path.join(address_events[i],\ 'info', 'yspec.in')): os.remove(os.path.join(address_events[i],\ 'info', 'yspec.in')) shutil.copy2('./yspec.in', os.path.join(address_events[i],\ 'info', 'yspec.in')) if os.path.isfile(os.path.join(address_events[i],\ 'info', 'sta_yspec')): os.remove(os.path.join(address_events[i],\ 'info', 'sta_yspec')) sta_yspec_open = open(os.path.join(address_events[i],\ 'info', 'sta_yspec'), 'a+') for j in range(0, len(sta_ev_req)): sta_yspec_open.writelines(sta_ev_req[j][0] + ',' + \ sta_ev_req[j][1] + ',' + sta_ev_req[j][2] + ',' + \ sta_ev_req[j][3] + ',' + sta_ev_req[j][4] + ',' + \ sta_ev_req[j][5] + ',' + sta_ev_req[j][6] + ',' + \ sta_ev_req[j][7] + ',' + sta_ev_req[j][8] + ',' + \ sta_ev_req[j][9] + ',' + sta_ev_req[j][10] + ',' + \ sta_ev_req[j][11] + ',' + sta_ev_req[j][12] + ',\n') sta_yspec_open.close() receivers = [] receivers.append('\n') for j in range(0, len(sta_ev_req)): receivers.append( ' ' + \ str(round(float(sta_ev_req[j][4]), 2)) + ' ' + \ str(round(float(sta_ev_req[j][5]), 2)) + \ '\n') yspecin_open = open(os.path.join(address_events[i],\ 'info', 'yspec.in'), 'a+') yspecin_file = yspecin_open.readlines() search = '# source depth (km)' for j in range(0, len(yspecin_file)): if yspecin_file[j].find(search) != -1: yspecin_file[j+1] = ' ' + \ str(round(float(sta_ev_req[0][11]), 2)) + '\n' break search = '# source latitude (deg)' for j in range(0, len(yspecin_file)): if yspecin_file[j].find(search) != -1: yspecin_file[j+1] = ' ' + \ str(round(float(sta_ev_req[0][9]), 2)) + '\n' break search = '# source longitude (deg)' for j in range(0, len(yspecin_file)): if yspecin_file[j].find(search) != -1: yspecin_file[j+1] = ' ' + \ str(round(float(sta_ev_req[0][10]), 2)) + '\n' break search = '# number of receivers' for j in range(0, len(yspecin_file)): if yspecin_file[j].find(search) != -1: yspecin_file[j+1] = ' ' + str(len(receivers)-1) + '\n' break search = '# receiver latitudes and longitudes' for j in range(0, len(yspecin_file)): if yspecin_file[j].find(search) != -1: yspecin_file[j+1:] = receivers break yspecin_open.close() os.remove(os.path.join(address_events[i], 'info', 'yspec.in')) yspecin_open = open(os.path.join(address_events[i],\ 'info', 'yspec.in'), 'a+') for j in range(0, len(yspecin_file)): yspecin_open.write(yspecin_file[j]) yspecin_open.close() print '\n***************************************' print 'Following Parameters have been changed:\n' print 'source depth' print 'source latitude' print 'source longitude' print 'number of receivers' print 'receiver latitude and longitude\n' print 'Please change the rest yourself!' print '***************************************'
def AXISEM_Phase(): """ Create STATIONS file as an input for AXISEM """ global input events, address_events = quake_info(input['address'], 'info') for i in range(0, len(events)): sta_ev_select = [] sta_ev = read_station_event(address_events[i]) for j in range(0, len(sta_ev[i])): dist = locations2degrees(lat1 = float(sta_ev[i][j][9]), \ long1 = float(sta_ev[i][j][10]), lat2 = float(sta_ev[i][j][4]), \ long2 = float(sta_ev[i][j][5])) tt = getTravelTimes(delta=dist, depth=float(sta_ev[i][j][11]), \ model=input['model']) sta_ev[i][j][8] = sta_ev[i][j][0] + '_' + sta_ev[i][j][1] for m in range(0, len(tt)): if tt[m]['phase_name'] in input['phase']: sta_ev_select.append(sta_ev[i][j]) sta_ev_req = list(unique_items(sta_ev_select)) if os.path.isfile(os.path.join(address_events[i],\ 'info', 'receivers.dat')): os.remove(os.path.join(address_events[i],\ 'info', 'receivers.dat')) if os.path.isfile(os.path.join(address_events[i],\ 'info', 'STATIONS')): os.remove(os.path.join(address_events[i],\ 'info', 'STATIONS')) receivers_file = open(os.path.join(address_events[i],\ 'info', 'receivers.dat'), 'a+') receivers_file.writelines(str(len(sta_ev_req)) + '\n') for j in range(0, len(sta_ev_req)): STATIONS_file = open(os.path.join(address_events[i],\ 'info', 'STATIONS'), 'a+') receivers_file = open(os.path.join(address_events[i],\ 'info', 'receivers.dat'), 'a+') STATIONS_file.writelines(sta_ev_req[j][1] + \ ' '*(5 - len('%s' % sta_ev_req[j][0])) + '%s' \ % sta_ev_req[j][0] + \ ' '*(9 - len('%.2f' % float(sta_ev_req[j][4]))) + '%.2f' \ % float(sta_ev_req[j][4]) + \ ' '*(9 - len('%.2f' % float(sta_ev_req[j][5]))) + '%.2f' \ % float(sta_ev_req[j][5]) + \ ' '*(15 - len('0.0000000E+00')) + \ '0.0000000E+00' + \ ' '*(15 - len('0.0000000E+00')) + \ '0.0000000E+00' + '\n') receivers_file.writelines( \ str(round(90.0 - float(sta_ev_req[j][4]), 1)) + ' ' + \ str(float(sta_ev_req[j][5])) + \ '\n')
def YSPEC_Phase(): """ Create input file (yspec.in) for YSPEC based on the selected Phase """ global input events, address_events = quake_info(input['address'], 'info') for i in range(0, len(events)): sta_ev_select = [] sta_ev = read_station_event(address_events[i]) for j in range(0, len(sta_ev[i])): dist = locations2degrees(lat1 = float(sta_ev[i][j][9]), \ long1 = float(sta_ev[i][j][10]), lat2 = float(sta_ev[i][j][4]), \ long2 = float(sta_ev[i][j][5])) tt = getTravelTimes(delta=dist, depth=float(sta_ev[i][j][11]), \ model=input['model']) sta_ev[i][j][8] = sta_ev[i][j][0] + '_' + sta_ev[i][j][1] for m in range(0, len(tt)): if tt[m]['phase_name'] in input['phase']: sta_ev_select.append(sta_ev[i][j]) #import ipdb; ipdb.set_trace() sta_ev_req = list(unique_items(sta_ev_select)) if os.path.isfile(os.path.join(address_events[i],\ 'info', 'yspec.in')): os.remove(os.path.join(address_events[i],\ 'info', 'yspec.in')) shutil.copy2('./yspec.in', os.path.join(address_events[i],\ 'info', 'yspec.in')) if os.path.isfile(os.path.join(address_events[i],\ 'info', 'sta_yspec')): os.remove(os.path.join(address_events[i],\ 'info', 'sta_yspec')) sta_yspec_open = open(os.path.join(address_events[i],\ 'info', 'sta_yspec'), 'a+') for j in range(0, len(sta_ev_req)): sta_yspec_open.writelines(sta_ev_req[j][0] + ',' + \ sta_ev_req[j][1] + ',' + sta_ev_req[j][2] + ',' + \ sta_ev_req[j][3] + ',' + sta_ev_req[j][4] + ',' + \ sta_ev_req[j][5] + ',' + sta_ev_req[j][6] + ',' + \ sta_ev_req[j][7] + ',' + sta_ev_req[j][8] + ',' + \ sta_ev_req[j][9] + ',' + sta_ev_req[j][10] + ',' + \ sta_ev_req[j][11] + ',' + sta_ev_req[j][12] + ',\n') sta_yspec_open.close() receivers = [] receivers.append('\n') for j in range(0, len(sta_ev_req)): receivers.append( ' ' + \ str(round(float(sta_ev_req[j][4]), 2)) + ' ' + \ str(round(float(sta_ev_req[j][5]), 2)) + \ '\n') yspecin_open = open(os.path.join(address_events[i],\ 'info', 'yspec.in'), 'a+') yspecin_file = yspecin_open.readlines() search = '# source depth (km)' for j in range(0, len(yspecin_file)): if yspecin_file[j].find(search) != -1: yspecin_file[j+1] = ' ' + \ str(round(float(sta_ev_req[0][11]), 2)) + '\n' break search = '# source latitude (deg)' for j in range(0, len(yspecin_file)): if yspecin_file[j].find(search) != -1: yspecin_file[j+1] = ' ' + \ str(round(float(sta_ev_req[0][9]), 2)) + '\n' break search = '# source longitude (deg)' for j in range(0, len(yspecin_file)): if yspecin_file[j].find(search) != -1: yspecin_file[j+1] = ' ' + \ str(round(float(sta_ev_req[0][10]), 2)) + '\n' break search = '# number of receivers' for j in range(0, len(yspecin_file)): if yspecin_file[j].find(search) != -1: yspecin_file[j + 1] = ' ' + str(len(receivers) - 1) + '\n' break search = '# receiver latitudes and longitudes' for j in range(0, len(yspecin_file)): if yspecin_file[j].find(search) != -1: yspecin_file[j + 1:] = receivers break yspecin_open.close() os.remove(os.path.join(address_events[i], 'info', 'yspec.in')) yspecin_open = open(os.path.join(address_events[i],\ 'info', 'yspec.in'), 'a+') for j in range(0, len(yspecin_file)): yspecin_open.write(yspecin_file[j]) yspecin_open.close() print '\n***************************************' print 'Following Parameters have been changed:\n' print 'source depth' print 'source latitude' print 'source longitude' print 'number of receivers' print 'receiver latitude and longitude\n' print 'Please change the rest yourself!' print '***************************************'
def AXISEM_Phase(): """ Create STATIONS file as an input for AXISEM """ global input events, address_events = quake_info(input['address'], 'info') for i in range(0, len(events)): sta_ev_select = [] sta_ev = read_station_event(address_events[i]) for j in range(0, len(sta_ev[i])): dist = locations2degrees(lat1 = float(sta_ev[i][j][9]), \ long1 = float(sta_ev[i][j][10]), lat2 = float(sta_ev[i][j][4]), \ long2 = float(sta_ev[i][j][5])) tt = getTravelTimes(delta=dist, depth=float(sta_ev[i][j][11]), \ model=input['model']) sta_ev[i][j][8] = sta_ev[i][j][0] + '_' + sta_ev[i][j][1] for m in range(0, len(tt)): if tt[m]['phase_name'] in input['phase']: sta_ev_select.append(sta_ev[i][j]) sta_ev_req = list(unique_items(sta_ev_select)) if os.path.isfile(os.path.join(address_events[i],\ 'info', 'receivers.dat')): os.remove(os.path.join(address_events[i],\ 'info', 'receivers.dat')) if os.path.isfile(os.path.join(address_events[i],\ 'info', 'STATIONS')): os.remove(os.path.join(address_events[i],\ 'info', 'STATIONS')) receivers_file = open(os.path.join(address_events[i],\ 'info', 'receivers.dat'), 'a+') receivers_file.writelines(str(len(sta_ev_req)) + '\n') for j in range(0, len(sta_ev_req)): STATIONS_file = open(os.path.join(address_events[i],\ 'info', 'STATIONS'), 'a+') receivers_file = open(os.path.join(address_events[i],\ 'info', 'receivers.dat'), 'a+') STATIONS_file.writelines(sta_ev_req[j][1] + \ ' '*(5 - len('%s' % sta_ev_req[j][0])) + '%s' \ % sta_ev_req[j][0] + \ ' '*(9 - len('%.2f' % float(sta_ev_req[j][4]))) + '%.2f' \ % float(sta_ev_req[j][4]) + \ ' '*(9 - len('%.2f' % float(sta_ev_req[j][5]))) + '%.2f' \ % float(sta_ev_req[j][5]) + \ ' '*(15 - len('0.0000000E+00')) + \ '0.0000000E+00' + \ ' '*(15 - len('0.0000000E+00')) + \ '0.0000000E+00' + '\n') receivers_file.writelines( \ str(round(90.0 - float(sta_ev_req[j][4]), 1)) + ' ' + \ str(float(sta_ev_req[j][5])) + \ '\n')
def select_sta(): """ Select required stations """ global input map_proj = Basemap(projection='cyl', llcrnrlat=-90,urcrnrlat=90,\ llcrnrlon=-180,urcrnrlon=180, resolution='c') ev_file = open(os.path.join(os.getcwd(), 'quake_req.txt'), 'r') ev_add = ev_file.read().split('\n')[:-1] select = open(os.path.join(os.getcwd(), input['file'] + '.dat'), 'w') select.close() for k in range(0, len(ev_add)): ''' select = open(os.path.join(os.getcwd(), \ input['file'] + '-' + ev_add[k].split('/')[-1] + \ '.dat'), 'w') ''' (quake_d, quake_t) = read_quake(ev_add[k]) list_sta = glob.glob(os.path.join(ev_add[k], 'BH', \ input['identity'])) for i in range(0, len(list_sta)): try: st = read(list_sta[i]) print '***************************************' print str(i) + '/' + str(len(list_sta)) + ' -- ' + \ str(k) + '/' + str(len(ev_add)) print list_sta[i].split('/')[-1] info_sac = st[0].stats['sac'] if input['all_sta'] == None: dist = locations2degrees(lat1 = quake_d['lat'], \ long1 = quake_d['lon'], lat2 = info_sac['stla'], \ long2 = info_sac['stlo']) tt = getTravelTimes(delta=dist, depth=quake_d['dp'], \ model=input['model']) for m in range(0, len(tt)): if tt[m]['phase_name'] in input['phase']: try: print '--------------------' print list_sta[i].split('/')[-1] + ' has ' + \ tt[m]['phase_name'] + ' phase' if input['freq'] != None: st[0].decimate(int(round(\ st[0].stats['sampling_rate'])/input['freq']), \ no_filter=False) if st[0].stats['sampling_rate'] != input['freq']: print list_sta[i].split('/')[-1] print st[0].stats['sampling_rate'] print '------------------------------------------' ''' np_evt = round((events[0]['datetime'] - st[0].stats['starttime'])*st[0].stats['sampling_rate']) np_pha = np_evt + round(tt[m]['time']*st[0].stats['sampling_rate']) select = open(Address_events + '/' + events[l]['event_id'] + '/IRIS/info/' + name_select, 'a') ''' if tt[m]['phase_name'] != 'Pdiff': lat_1 = str(quake_d['lat']) lon_1 = str(quake_d['lon']) lat_2 = str(info_sac['stla']) lon_2 = str(info_sac['stlo']) elif tt[m]['phase_name'] == 'Pdiff': dist_limit = 97.0 num_gcp = 1000 gcp = map_proj.gcpoints(quake_d['lon'], \ quake_d['lat'], info_sac['stlo'], \ info_sac['stla'], num_gcp) if dist >= dist_limit: diff_dist = dist - dist_limit req_gcp = diff_dist*(float(num_gcp)/dist) req_gcp = round(req_gcp)/2 mid_p = len(gcp[0])/2 #before = int(mid_p - req_gcp) #after = int(mid_p + req_gcp) before = mid_p - int(2.0 * len(gcp[0])/dist) after = mid_p + int(2.0 * len(gcp[0])/dist) x_p, y_p = gcp lat_1 = y_p[before] lat_2 = y_p[after] lon_1 = x_p[before] lon_2 = x_p[after] ph_info = tt[m]['phase_name'] + ',' + \ str(dist) + ',' + \ str(tt[m]['time']) + ',' + \ str(st[0].stats['sampling_rate']) + ',' + \ st[0].stats['network'] + ',' + \ st[0].stats['station'] + \ ',' + st[0].stats['location'] + ',' + \ st[0].stats['channel'] + ',' + \ str(info_sac['stla']) + ',' + \ str(info_sac['stlo']) + ',' + \ str(info_sac['stdp']) + ',' + \ str(info_sac['stel']) + ',' + \ str(quake_d['lat']) + ',' + \ str(quake_d['lon']) + ',' + \ str(quake_d['dp']) + ',' + \ '-----' + ',' + \ str(lat_1) + ',' + \ str(lon_1) + ',' + \ str(lat_2) + ',' + \ str(lon_2) + ',' + \ '-----' + ',' + \ ev_add[k].split('/')[-1] + ',' + \ list_sta[i] + '\n' #select = open(os.path.join(os.getcwd(), \ # input['file'] + '-' + \ # ev_add[k].split('/')[-1] + '.dat'), 'a') select = open(os.path.join(os.getcwd(), \ input['file'] + '.dat'), 'a') select.writelines(ph_info) select.close() except Exception, e: print e elif input['all_sta'] != None: ph_info = 'NA' + ',' + 'NA' + ',' + \ 'NA' + ',' + \ str(st[0].stats['sampling_rate']) + ',' + \ st[0].stats['network'] + ',' + st[0].stats['station'] + \ ',' + st[0].stats['location'] + ',' + \ st[0].stats['channel'] + ',' + \ str(info_sac['stla']) + ',' + \ str(info_sac['stlo']) + ',' + \ str(info_sac['stdp']) + ',' + \ str(info_sac['stel']) + ',' + \ str(quake_d['lat']) + ',' + str(quake_d['lon']) + ',' + \ str(quake_d['dp']) + ',' + \ ev_add[k].split('/')[-1] + ',' + \ list_sta[i] + '\n' ''' select = open(os.path.join(os.getcwd(), \ input['file'] + '-' + \ ev_add[k].split('/')[-1] + '.dat'), 'a') ''' select = open(os.path.join(os.getcwd(), \ input['file'] + '.dat'), 'a') select.writelines(ph_info) select.close() except Exception, e: print e pass
def calcTTT(Config,StationList,Origin): dimX = int(Config['dimX']) dimY = int(Config['dimY']) gridspacing = float(Config['gridspacing']) o_lat = float(Origin['lat']) o_lon = float(Origin['lon']) o_depth = float(Origin['depth']) logger.info(' BOUNDING BOX DIMX: %d DIMY: %d GRIDSPACING: %f \n'%(dimX,dimY,gridspacing)) oLator = o_lat + dimX/2 oLonor = o_lon + dimY/2 oLatul = 0 oLonul = 0 mint= 100000 maxt=-100000 TTTGridMap = {} for station in StationList: GridArray = {} streamID = station.net+'.'+station.sta+'.'+station.loc+'.'+station.comp sdelta = locations2degrees(float(o_lat), float(o_lon), float(station.lat), float(station.lon)) logger.info(' STATION: %s --> DELTA: %f'% (streamID,sdelta)) z=0 for i in xrange(dimX): oLatul = o_lat -((dimX-1)/2)*gridspacing + i*gridspacing if z == 0 and i == 0: Latul = oLatul o=0 for j in xrange (dimY): oLonul = o_lon -((dimY-1)/2)*gridspacing + j*gridspacing if o==0 and j==0: Lonul = oLonul de = locations2degrees(float(oLatul), float(oLonul), float(station.lat), float(station.lon)) tt = getTravelTimes(delta=de,depth=o_depth,model='ak135') #print tt if tt[0]['phase_name'] == Config['ttphase']: time = tt[0]['time'] #print streamID,oLatul,oLonul,' --------> ' ,time , ' -> \n' GridArray[(i,j)] = GridElem(oLatul, oLonul, o_depth,time,de) if (mint > time): mint = time if (maxt < time): maxt = time TTTGridMap[streamID] = TTTGrid(o_depth,mint,maxt,Latul,Lonul,oLator,oLonor,GridArray) logger.info('\033[31m MINT: %g MAXT: %f \033[0m'% (mint,maxt)) return mint, maxt,TTTGridMap
def doCalculation(Config,WaveformDict,FilterMetaData,mint,TTTGridMap,Folder,Origin): logger.info('%s' % ('Enter Semblance Calculation') ) new_frequence = int(Config['new_frequence']) winlen = int(Config['winlen']) step = float(Config['step']) forerun = int(Config['forerun']) duration = int(Config['duration']) dimX = int(Config['dimX']) dimY = int(Config['dimY']) gridspacing = float(Config['gridspacing']) nostat = len(WaveformDict) traveltimes = {} recordstarttime = '' minSampleCount = 999999999 usedarrays = 5 ntimes = int((forerun + duration)/step) nsamp = int(winlen * new_frequence); #Anzahl der Samples pro Zeitschritt nstep = int(step * new_frequence); #number of samples between subse ############################################################################ calcStreamMap = WaveformDict for trace in calcStreamMap.iterkeys(): recordstarttime = calcStreamMap[trace][0].stats.starttime d = calcStreamMap[trace][0].stats.starttime d = d.timestamp if calcStreamMap[trace][0].stats.npts < minSampleCount: minSampleCount = calcStreamMap[trace][0].stats.npts ############################################################################ traces = np.ndarray(shape=(len(calcStreamMap),minSampleCount+1),dtype=float) traveltime = np.ndarray(shape=(len(calcStreamMap),dimX*dimY),dtype=float) latv = np.ndarray(dimX*dimY,dtype=float) lonv = np.ndarray(dimX*dimY,dtype=float) ############################################################################ #for i in TTTGridMap.iterkeys(): # for j in TTTGridMap[i].GridArray.iterkeys(): # print j,TTTGridMap[i].GridArray[j].tt c=0 streamCounter = 0 print 'MSC: ',minSampleCount for key in calcStreamMap.iterkeys(): streamID = key c2 = 0 for i in calcStreamMap[key][0]: traces[c][c2] = i #print 'C: ',c,' C2: ',c2 c2 += 1 for key in TTTGridMap.iterkeys(): if streamID == key: print "IN BEIDEN DA", streamID, key traveltimes[streamCounter] = TTTGridMap[key] else: "NEIN", streamID, key g = traveltimes[streamCounter] dimZ = g.dimZ mint = g.mint maxt = g.maxt Latul = g.Latul Lonul = g.Lonul Lator = g.Lator Lonor = g.Lonor gridElem = g.GridArray for x in range(dimX): for y in range(dimY): elem = gridElem[x, y] traveltime[c][x * dimY + y] = elem.tt latv[x * dimY + y] = elem.lat lonv[x * dimY + y] = elem.lon c += 1 streamCounter += 1 sembDict = {} sembmaxvalue = [] sembmaxlat = [] sembmaxlon = [] rc = UTCDateTime(Origin['time']) rcs = '%s-%s-%s_%02d:%02d:%02d'% (rc.day,rc.month,rc.year, rc.hour,rc.minute,rc.second) fobjsembmax = open(os.path.join(Folder['semb'],'sembmax.txt'),'w') for i in range(ntimes): logger.info('Zeitschritt %d' % i) fobj = open(os.path.join(Folder['semb'],'%03d.ASC' % i),'w') fobj.write('# %s , %s\n' % (d,rcs)) fobj.write('# step %ds| ntimes %d| winlen: %ds\n' % (step,ntimes,winlen)) fobj.write('# \n') fobj.write('# southwestlat: %.2f dlat: %f nlat: %f \n'%(Latul,gridspacing,dimX)) fobj.write('# southwestlon: %.2f dlon: %f nlat: %f \n'%(Lonul,gridspacing,dimY)) fobj.write('# ddepth: 0 ndepth: 1 \n') sembmax = 0 sembmaxX = 0 sembmaxY = 0 for j in range(dimX): for m in range(dimY): nomin=0 denom=0 semb = 0 for l in range(nsamp): summe = 0 for k in range(nostat): relstart_samples = (int)((traveltime[k][j*dimY+m]-mint) * new_frequence + 0.5) + i*nstep; #summe += traces[k][relstart_samples+l] #denom += traces[k][relstart_samples+l]*trace[k][relstart_samples+l] tmp = traces[k][relstart_samples + l] summe += tmp denom += tmp ** 2 nomin += summe*summe; x = latv[j*dimY+m] y = lonv[j*dimY+m] semb = nomin/(float(nostat)*denom); fobj.write('%.2f %.2f %f\n' % (x,y,semb)) if semb > sembmax: sembmax = semb;# search for maximum and position of maximum on semblance grid for given time step sembmaxX = latv[j*dimY+m]; sembmaxY = lonv[j*dimY+m]; delta = locations2degrees(float(sembmaxX), float(sembmaxY), float(Origin['lat']), float(Origin['lon'])) azi = toAzimuth(float(Origin['lat']), float(Origin['lon']),float(sembmaxX), float(sembmaxY)) #print i,sembmax,sembmaxX,sembmaxY,' DELTA: ',delta,' OLAT: ',Origin['lat'],' OLON: ',Origin['lon'],float(sembmaxX), float(sembmaxY) sembmaxvalue.append(sembmax) sembmaxlat.append(sembmaxX) sembmaxlon.append(sembmaxY) fobjsembmax.write('%d %.2f %.2f %f %d %03f %f %03f\n' % (i*step,sembmaxX,sembmaxY,sembmax,usedarrays,delta,float(azi),delta*119.19)) fobj.close() fobjsembmax.close() sembDict['value'] = sembmaxvalue sembDict['lat'] = sembmaxlat sembDict['lon'] = sembmaxlon #staltatriggering(sembmaxvalue,sembmaxlat,sembmaxlon,ntimes) return sembDict
def select_sta(): """ Select required stations """ global input map_proj = Basemap(projection='cyl', llcrnrlat=-90,urcrnrlat=90,\ llcrnrlon=-180,urcrnrlon=180, resolution='c') ev_file = open(os.path.join(os.getcwd(), 'quake_req.txt'), 'r') ev_add = ev_file.read().split('\n')[:-1] select = open(os.path.join(os.getcwd(), input['file'] + '.dat'), 'w') select.close() for k in range(0, len(ev_add)): ''' select = open(os.path.join(os.getcwd(), \ input['file'] + '-' + ev_add[k].split('/')[-1] + \ '.dat'), 'w') ''' (quake_d, quake_t) = read_quake(ev_add[k]) list_sta = glob.glob(os.path.join(ev_add[k], 'BH', \ input['identity'])) for i in range(0, len(list_sta)): try: st = read(list_sta[i]) print '***************************************' print str(i) + '/' + str(len(list_sta)) + ' -- ' + \ str(k) + '/' + str(len(ev_add)) print list_sta[i].split('/')[-1] info_sac = st[0].stats['sac'] if input['all_sta'] == None: dist = locations2degrees(lat1 = quake_d['lat'], \ long1 = quake_d['lon'], lat2 = info_sac['stla'], \ long2 = info_sac['stlo']) tt = getTravelTimes(delta=dist, depth=quake_d['dp'], \ model=input['model']) for m in range(0, len(tt)): if tt[m]['phase_name'] in input['phase']: try: print '--------------------' print list_sta[i].split('/')[-1] + ' has ' + \ tt[m]['phase_name'] + ' phase' if input['freq'] != None: st[0].decimate(int(round(\ st[0].stats['sampling_rate'])/input['freq']), \ no_filter=False) if st[0].stats['sampling_rate'] != input[ 'freq']: print list_sta[i].split('/')[-1] print st[0].stats['sampling_rate'] print '------------------------------------------' ''' np_evt = round((events[0]['datetime'] - st[0].stats['starttime'])*st[0].stats['sampling_rate']) np_pha = np_evt + round(tt[m]['time']*st[0].stats['sampling_rate']) select = open(Address_events + '/' + events[l]['event_id'] + '/IRIS/info/' + name_select, 'a') ''' if tt[m]['phase_name'] != 'Pdiff': lat_1 = str(quake_d['lat']) lon_1 = str(quake_d['lon']) lat_2 = str(info_sac['stla']) lon_2 = str(info_sac['stlo']) elif tt[m]['phase_name'] == 'Pdiff': dist_limit = 97.0 num_gcp = 1000 gcp = map_proj.gcpoints(quake_d['lon'], \ quake_d['lat'], info_sac['stlo'], \ info_sac['stla'], num_gcp) if dist >= dist_limit: diff_dist = dist - dist_limit req_gcp = diff_dist * (float(num_gcp) / dist) req_gcp = round(req_gcp) / 2 mid_p = len(gcp[0]) / 2 #before = int(mid_p - req_gcp) #after = int(mid_p + req_gcp) before = mid_p - int( 2.0 * len(gcp[0]) / dist) after = mid_p + int( 2.0 * len(gcp[0]) / dist) x_p, y_p = gcp lat_1 = y_p[before] lat_2 = y_p[after] lon_1 = x_p[before] lon_2 = x_p[after] ph_info = tt[m]['phase_name'] + ',' + \ str(dist) + ',' + \ str(tt[m]['time']) + ',' + \ str(st[0].stats['sampling_rate']) + ',' + \ st[0].stats['network'] + ',' + \ st[0].stats['station'] + \ ',' + st[0].stats['location'] + ',' + \ st[0].stats['channel'] + ',' + \ str(info_sac['stla']) + ',' + \ str(info_sac['stlo']) + ',' + \ str(info_sac['stdp']) + ',' + \ str(info_sac['stel']) + ',' + \ str(quake_d['lat']) + ',' + \ str(quake_d['lon']) + ',' + \ str(quake_d['dp']) + ',' + \ '-----' + ',' + \ str(lat_1) + ',' + \ str(lon_1) + ',' + \ str(lat_2) + ',' + \ str(lon_2) + ',' + \ '-----' + ',' + \ ev_add[k].split('/')[-1] + ',' + \ list_sta[i] + '\n' #select = open(os.path.join(os.getcwd(), \ # input['file'] + '-' + \ # ev_add[k].split('/')[-1] + '.dat'), 'a') select = open(os.path.join(os.getcwd(), \ input['file'] + '.dat'), 'a') select.writelines(ph_info) select.close() except Exception, e: print e elif input['all_sta'] != None: ph_info = 'NA' + ',' + 'NA' + ',' + \ 'NA' + ',' + \ str(st[0].stats['sampling_rate']) + ',' + \ st[0].stats['network'] + ',' + st[0].stats['station'] + \ ',' + st[0].stats['location'] + ',' + \ st[0].stats['channel'] + ',' + \ str(info_sac['stla']) + ',' + \ str(info_sac['stlo']) + ',' + \ str(info_sac['stdp']) + ',' + \ str(info_sac['stel']) + ',' + \ str(quake_d['lat']) + ',' + str(quake_d['lon']) + ',' + \ str(quake_d['dp']) + ',' + \ ev_add[k].split('/')[-1] + ',' + \ list_sta[i] + '\n' ''' select = open(os.path.join(os.getcwd(), \ input['file'] + '-' + \ ev_add[k].split('/')[-1] + '.dat'), 'a') ''' select = open(os.path.join(os.getcwd(), \ input['file'] + '.dat'), 'a') select.writelines(ph_info) select.close() except Exception, e: print e pass