def filterStations(StationList, Config, Origin, network): F = [] cfg = ConfigObj(dict=Config) minDist, maxDist = cfg.FloatRange('mindist', 'maxdist') origin = Location(Origin['lat'], Origin['lon']) Logfile.red('Filter stations with configured parameters...') print 'nr networks = ', len(network) print 'nr stations = ', len(StationList) for j in network: #print j for i in StationList: if fnmatch.fnmatch(i.getcmpName(), j): pos = Location(i.lat, i.lon) sdelta = loc2degrees(origin, pos) if sdelta > minDist and sdelta < maxDist: s = Station(i.net, i.sta, i.loc, i.comp, i.lat, i.lon, i.ele, i.dip, i.azi, i.gain) if s not in F: F.append(s) Logfile.red('%d STATIONS LEFT IN LIST' % len(F)) return F
def calculateClusterCentre(Config, ClusterStationList): newClusterVector = [] cfg = ConfigObj(dict=Config) for i in range(1, cfg.Int('maxcluster') + 1): sumlat = 0 sumlon = 0 clusterstationcounter = 0 for j in ClusterStationList: if i == j.member: sumlat += float(j.lat) sumlon += float(j.lon) clusterstationcounter += 1 #endfor if clusterstationcounter == 0: # hs-1 newClusterVector.append(Centroid(0.0, 0.0, -1)) # hs-1 else: # hs-1 scla = sumlat / clusterstationcounter sclo = sumlon / clusterstationcounter name = i newClusterVector.append(Centroid(scla, sclo, name)) return newClusterVector
def traveltimes(self, phase): Logfile.red('Enter AUTOMATIC CROSSCORRELATION ') Logfile.red('\n\n+++++++++++++++++++++++++++++++++++++++++++++++\n ') T = [] Wdict = OrderedDict() SNR = OrderedDict() Config = self.Config cfg = ConfigObj(dict=Config) for i in self.StationMeta: Logfile.red('read in %s ' % (i)) de = loc2degrees(self.Origin, i) Phase = cake.PhaseDef(phase) traveltime_model = cfg.Str('traveltime_model') model = cake.load_model('../data/' + traveltime_model) if cfg.colesseo_input() is True: arrivals = model.arrivals([de, de], phases=Phase, zstart=self.Origin.depth, zstop=0.) else: arrivals = model.arrivals([de, de], phases=Phase, zstart=self.Origin.depth * km, zstop=0.) try: ptime = arrivals[0].t except Exception: try: arrivals = model.arrivals([de, de], phases=Phase, zstart=self.Origin.depth * km - 2.1) ptime = arrivals[0].t except Exception: ptime = 0 T.append(ptime) if ptime == 0: Logfile.red('Available phases for station %s in\ range %f deegree' % (i, de)) Logfile.red('you tried phase %s' % (phase)) raise Exception("ILLEGAL: phase definition") else: tw = self.calculateTimeWindows(ptime) if cfg.pyrocko_download() is True: w, snr = self.readWaveformsCross_pyrocko(i, tw, ptime) elif cfg.colesseo_input() is True: w, snr = self.readWaveformsCross_colesseo(i, tw, ptime) else: w, snr = self.readWaveformsCross(i, tw, ptime) Wdict[i.getName()] = w SNR[i.getName()] = snr Logfile.red('\n\n+++++++++++++++++++++++++++++++++++++++++++++++ ') Logfile.red('Exit AUTOMATIC FILTER ') return Wdict, SNR
def readWaveformsCross_pyrocko(self, station, tw, ttime): obspy_compat.plant() cfg = ConfigObj(dict=self.Config) t2 = UTCDateTime(self.Origin.time) if cfg.quantity() == 'displacement': try: traces = io.load(self.EventPath + '/data/traces_rotated.mseed') except Exception: traces = io.load(self.EventPath + '/data/traces_restituted.mseed') else: traces = io.load(self.EventPath + '/data/traces_velocity.mseed') found = False while found is False: for tr in traces: tr_name = str(tr.network + '.' + tr.station + '.' + tr.location + '.' + tr.channel[:3]) if tr_name == str(station)[:-2] or tr_name == str(station)[:]: traces_station = tr es = obspy_compat.to_obspy_trace(traces_station) streamData = station.net + '.' + station.sta + '.'\ + station.loc + '.'\ + station.comp\ + '.D.'\ + str(t2.year) + '.'\ + str("%03d" % t2.julday) st = obspy.Stream() st.extend([es]) stream = '' snr = '' if station.loc == '--': station.loc = '' if len(st.get_gaps()) > 0: st.merge(method=0, fill_value='interpolate', interpolation_samples=0) snr_trace = traces_station.chop(tmin=traces_station.tmin, tmax=traces_station.tmin + ttime - 20., inplace=False) snr = num.var(snr_trace.ydata) stream = self.filterWaveform(st) xname = os.path.join(self.AF, (streamData + '_all.mseed')) stream.write(xname, format='MSEED') stream.trim(tw['xcorrstart'], tw['xcorrend']) found = True return stream, snr if found is False: print('Waveform missing!', tr_name, str(station))
def readWaveformsPicker_colos(self, station, tw, Origin, ttime): obspy_compat.plant() pjoin = os.path.join Config = self.Config cfg = ConfigObj(dict=Config) Syn_in = self.Syn_in syn_in = SynthCfg(Syn_in) t2 = UTCDateTime(self.Origin.time) sdspath = os.path.join(self.EventPath, 'data') traces = io.load(cfg.colosseo_scenario_yml()[:-12] + 'scenario.mseed') for tr in traces: tr_name = str(tr.network + '.' + tr.station + '.' + tr.location + '.' + tr.channel[:3]) if tr_name == str(station): traces_station = tr es = obspy_compat.to_obspy_trace(traces_station) streamData = station.net + '.' + station.sta + '.' + station.loc + '.' + station.comp + '.D.' + str( t2.year) + '.' + str("%03d" % t2.julday) entry = os.path.join(sdspath, station.net, station.sta, station.comp + '.D', streamData) #stl = es.trim(starttime=tw['start'], endtime=tw['end']) st = obspy.Stream() st.extend([es]) stream = '' snr = '' if station.loc == '--': station.loc = '' if len(st.get_gaps()) > 0: st.merge(method=0, fill_value='interpolate', interpolation_samples=0) #snr = self.signoise(st[0], ttime, entry) snr_trace = traces_station.chop(tmin=traces_station.tmin, tmax=traces_station.tmin + ttime - 20., inplace=False) snr = num.var(snr_trace.ydata) stream = self.filterWaveform(st) xname = os.path.join(self.AF, (streamData + '_all.mseed')) stream.write(xname, format='MSEED') stream.trim(tw['xcorrstart'], tw['xcorrend']) return stream else: pass
def checkStationAroundInitialCentroid(station, Config, StationMetaList): cfg = ConfigObj(dict=Config) initDist = cfg.Float('initialstationdistance') counter = 0 for i in StationMetaList: sdelta = loc2degrees(station, i) if sdelta < initDist: counter += 1 return counter
def deleteFarStations(CentroidList, StationClusterList, Config): cfg = ConfigObj(dict=Config) stationdistance = int(cfg.Distance('stationdistance')) for i in CentroidList: for j in StationClusterList: if i.rank == j.member: if loc2degrees(i, j) > stationdistance: j.member = -1 #endfor for index, k in enumerate(StationClusterList): if k.member == -1: del StationClusterList[index] return StationClusterList
def kmean(Config, inputCentroid, FilterMeta, counter, Folder, Origin, flag): counter += 1 Logfile.add('COUNTER ' + str(counter) + ' CUTOFF ' + Config['cutoff']) cfg = ConfigObj(dict=Config) if counter == cfg.UInt('cutoff'): endcheck(inputCentroid, FilterMeta, Config, Folder, Origin, flag) sys.exit() scl = stationBelongToCluster(Config, inputCentroid, FilterMeta) #sys.exit() #print scl,len(scl) acounter = 1 for a in inputCentroid: #print a for i in scl: if acounter == i.member: delta = loc2degrees(i, a) if delta > cfg.Float('initialstationdistance'): i.member = -1 #print 'delete ',a,i,i.member,delta #b=scl.index(i) #del scl[b] #else: # print a,i,i.member,delta acounter += 1 #endfor #sys.exit() nsc = calculateClusterCentre(Config, scl) t = compareClusterCentre(inputCentroid, nsc, Config) Logfile.add('ITERATIONSTEP: ---> ' + str(counter) + ' <-----------------------------') while t != cfg.UInt('maxcluster'): Logfile.add('ANZAHL DER JO in KMEAN: ' + str(t)) kmean(Config, nsc, FilterMeta, counter, Folder, Origin, flag) endcheck(inputCentroid, FilterMeta, Config, Folder, Origin, flag) sys.exit()
def readWaveformsPicker_pyrocko(self, station, tw, Origin, ttime): obspy_compat.plant() cfg = ConfigObj(dict=self.Config) t2 = UTCDateTime(self.Origin.time) sdspath = os.path.join(self.EventPath, 'data') if cfg.quantity() == 'displacement': traces = io.load(self.EventPath + '/data/traces_restituted.mseed') else: traces = io.load(self.EventPath + '/data/traces.mseed') for tr in traces: tr_name = str(tr.network + '.' + tr.station + '.' + tr.location + '.' + tr.channel[:3]) if tr_name == str(station): traces_station = tr es = obspy_compat.to_obspy_trace(traces_station) streamData = station.net + '.' + station.sta + '.' + station.loc + '.' + station.comp + '.D.' + str( t2.year) + '.' + str("%03d" % t2.julday) entry = os.path.join(sdspath, station.net, station.sta, station.comp + '.D', streamData) #stl = es.trim(starttime=tw['start'], endtime=tw['end']) st = obspy.Stream() st.extend([es]) stream = '' snr = '' if station.loc == '--': station.loc = '' if len(st.get_gaps()) > 0: st.merge(method=0, fill_value='interpolate', interpolation_samples=0) #snr = self.signoise(st[0], ttime, entry) stream = self.filterWaveform(st) xname = os.path.join(self.AF, (streamData + '_all.mseed')) stream.trim(tw['xcorrstart'], tw['xcorrend']) return stream else: pass
def filterStations(StationList, Config, Origin): F = [] cfg = ConfigObj(dict=Config) minDist, maxDist = cfg.FloatRange('mindist', 'maxdist') origin = DataTypes.dictToLocation(Origin) Logfile.red('Filter stations with configured parameters') for i in StationList: sdelta = loc2degrees(origin, i) if sdelta > minDist and sdelta < maxDist: F.append( Station(i.net, i.sta, i.loc, i.comp, i.lat, i.lon, i.ele, i.dip, i.azi, i.gain)) Logfile.red('%d STATIONS LEFT IN LIST' % len(F)) return F
def readWaveforms_colesseo(stationlist, w, EventPath, Origin, C): Wdict = OrderedDict() Config = C.parseConfig('config') cfg = ConfigObj(dict=Config) traces_dict = [] traces = io.load(cfg.colosseo_scenario_yml()[:-12] + 'scenario.mseed') for tr in traces: for il in stationlist: tr_name = str(tr.network + '.' + tr.station + '.' + tr.location + '.' + tr.channel[:3]) if tr_name == str(il): st = obspy.Stream() es = obspy_compat.to_obspy_trace(tr) st.extend([es]) traces_dict.append(tr) Wdict[il.getName()] = st return Wdict
def process(self): t = time.time() C = config.Config(self.eventpath) Config = C.parseConfig('config') cfg = ConfigObj(dict=Config) Origin = C.parseConfig('origin') if cfg.pyrocko_download() is True: if cfg.quantity() == 'displacement': disp = True else: disp = False Meta = readpyrockostations(self.eventpath, disp) elif cfg.colesseo_input() is True: scenario = guts.load(filename=cfg.colosseo_scenario_yml()) scenario_path = cfg.colosseo_scenario_yml()[:-12] Meta = readcolosseostations(scenario_path) events = scenario.get_events() ev = events[0] Origin['strike'] = str(ev.moment_tensor.strike1) Origin['rake'] = str(ev.moment_tensor.rake1) Origin['dip'] = str(ev.moment_tensor.dip1) Origin['lat'] = str(ev.lat) Origin['lon'] = str(ev.lon) Origin['depth'] = str(ev.depth / 1000.) else: Meta = readMetaInfoFile(self.eventpath) Folder = createFolder(self.eventpath) FilterMeta = filterStations(Meta, Config, Origin) try: km(Config, FilterMeta, Folder, Origin, t) except Exception: pass return True
def addOK(station, stationList, Config, MetaList): cfg = ConfigObj(dict=Config) minDist = cfg.Distance('centroidmindistance') minAround = cfg.UInt('minstationaroundinitialcluster') t = 0 for i in stationList: sdelta = loc2degrees(station, i) if sdelta > minDist: aroundcounter = checkStationAroundInitialCentroid( station, Config, MetaList) if aroundcounter >= minAround: t = 1 else: t = 0 return t else: t = 0 return t return t
def readWaveformsPicker_pyrocko(self, station, tw, Origin, ttime): obspy_compat.plant() cfg = ConfigObj(dict=self.Config) if cfg.quantity() == 'displacement': try: traces = io.load(self.EventPath + '/data/traces_rotated.mseed') except: traces = io.load(self.EventPath + '/data/traces_restituted.mseed') else: traces = io.load(self.EventPath + '/data/traces.mseed') for tr in traces: tr_name = str(tr.network + '.' + tr.station + '.' + tr.location + '.' + tr.channel[:3]) if tr_name == str(station)[:-2] or tr_name == str(station)[:]: traces_station = tr es = obspy_compat.to_obspy_trace(traces_station) st = obspy.Stream() st.extend([es]) stream = '' if station.loc == '--': station.loc = '' if len(st.get_gaps()) > 0: st.merge(method=0, fill_value='interpolate', interpolation_samples=0) stream = self.filterWaveform(st) stream.trim(tw['xcorrstart'], tw['xcorrend']) return stream else: pass
def readWaveformsPicker_colos(self, station, tw, Origin, ttime): obspy_compat.plant() Config = self.Config cfg = ConfigObj(dict=Config) traces = io.load(cfg.colosseo_scenario_yml()[:-12] + 'scenario.mseed') for tr in traces: tr_name = str(tr.network + '.' + tr.station + '.' + tr.location + '.' + tr.channel[:3]) for tr in traces: tr_name = str(tr.network + '.' + tr.station + '.' + tr.location + '.' + tr.channel[:3]) if tr_name == str(station): traces_station = tr es = obspy_compat.to_obspy_trace(traces_station) st = obspy.Stream() st.extend([es]) stream = '' if station.loc == '--': station.loc = '' if len(st.get_gaps()) > 0: st.merge(method=0, fill_value='interpolate', interpolation_samples=0) stream = self.filterWaveform(st) stream.trim(tw['xcorrstart'], tw['xcorrend']) return stream else: pass
def calcTTTAdvTauP(Config, station, Origin, flag, Xcorrshift=None, Refshift=None): cfg = ConfigObj(dict=Config) dimX = cfg.Int('dimx') dimY = cfg.Int('dimy') gridspacing = cfg.Float('gridspacing') o_lat = float(Origin['lat']) o_lon = float(Origin['lon']) o_depth = float(Origin['depth']) oLator = o_lat + dimX / 2 oLonor = o_lon + dimY / 2 oLatul = 0 oLonul = 0 TTTGridMap = {} LMINMAX = [] GridArray = {} locStation = Location(station.lat, station.lon) sdelta = loc2degrees(Location(o_lat, o_lon), locStation) Logfile.add('TTT PROCESS %d STATION: %s --> DELTA: %f' % (flag, station.getName(), sdelta)) inputpath = str(flag) + '-' + station.getName() + ".input" outputpath = str(flag) + '-' + station.getName() + ".output" errorpath = str(flag) + '-' + station.getName() + '.error' fobjinput = open(inputpath, 'w') fobjinput.write('s\n') fobjinput.write(('%s %s\n') % (station.lat, station.lon)) fobjinput.write('h\n') fobjinput.write(('%s\n') % (o_depth)) for i in xrange(dimX): oLatul = o_lat - ((dimX - 1) / 2) * gridspacing + i * gridspacing for j in xrange(dimY): oLonul = o_lon - ((dimY - 1) / 2) * gridspacing + j * gridspacing fobjinput.write('e\n') fobjinput.write(('%s %s\n') % (oLatul, oLonul)) #endfor fobjinput.close() cmd = ('taup_time -ph P -mod ak135 -time -o %s < %s > %s') % ( outputpath, inputpath, errorpath) #os.system(cmd) p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) p.wait() L = [] output = open(outputpath, 'r') 'OUTPUT: ', outputpath for k in output: k = k.split() if len(k) == 1: tt = k[0].replace('\n', '') tt = float(tt) - float(Xcorrshift[station.getName()].shift) L.append(tt) #endfor output.close() z = 0 for i in xrange(dimX): oLatul = o_lat - ((dimX - 1) / 2) * gridspacing + i * gridspacing if z == 0 and i == 0: Latul = oLatul o = 0 for j in xrange(dimY): oLonul = o_lon - ((dimY - 1) / 2) * gridspacing + j * gridspacing if o == 0 and j == 0: Lonul = oLonul de = loc2degrees(Location(oLatul, oLonul), locStation) time = L[i * dimX + j] GridArray[(i, j)] = GridElem(oLatul, oLonul, o_depth, time, de) LMINMAX.append(time) #endfor mint = float(min(LMINMAX)) maxt = float(max(LMINMAX)) k = MinTMaxT(mint, maxt) TTTGridMap[station.getName()] = TTTGrid(o_depth, mint, maxt, Latul, Lonul, oLator, oLonor, GridArray) tttname = str(flag) + '-ttt.pkl' Basic.dumpToFile(tttname, TTTGridMap) Basic.dumpToFile('minmax-' + str(flag) + '.pkl', k) try: os.remove(inputpath) os.remove(outputpath) os.remove(errorpath) except: Logfile.exception('cannot delete files')
def run_parallel(options): ''' Starts station search procedure :type options: instance :param options: parameter to initialize the networklist class ''' isClient = (options.args != None) if not init(options): return False if isClient: # Run client clt = StationListClient(options) clt.run() return True else: # Run server # Create directory for clients # clientDir = os.path.join(options.evpath, 'keyfiles-' + str(time.time())) Logfile.add('Create keyfile directory ', clientDir, ' ') create_dir(clientDir) # Build network list # C = config.Config(options.evpath) Origin = C.parseConfig('origin') Conf = Globals.ConfigDict checkConfigFile(Conf) globalCfg = ConfigObj(dict=Conf) originCfg = ConfigObj(dict=Origin) ot = originCfg.Time() # str(Origin['time']) elat = originCfg.lat() # Origin['lat'] elon = originCfg.lon() # Origin['lon'] minDist = globalCfg.Distance('mindist') # Conf ['mindist'] maxDist = globalCfg.Distance('maxdist') # Conf ['maxdist'] duration = globalCfg.Duration() # Conf ['duration'] paramList = [ot, maxDist, minDist, elat, elon] BL = [] if 'blacklist' in Conf: K = (Conf['blacklist']).split(',') BL = ['# Network Code'] BL.extend(K) T = NetworkList(ot, elat, elon, minDist, maxDist, duration, blacklist=BL, mail=Conf['mail']) SERVER_NAME = 'network' # Handle Iris networks # inetworks = T.getIRISList() #inetworks = ['BF'] if len(inetworks) == 0: Logfile.error('No iris networks found') else: args = Server.joinClientArgs([IRIS_TAG, clientDir], paramList) ctrl = Server.ServerCtrl(nRetries=1, nParallel=1, waitTime=1.0, printStat=False) srv = Server.ServerBase(SERVER_NAME, checkProcessError, ctrl) #if WINDOWS: srv.control.ClientProc = MainProc if not srv.run(inetworks, args): return False #endif # Handle Geofon networks # gnetworks = T.getWEBDCList() #gnetworks = ['AF'] #gnetworks = ['FR'] #gnetworks = [] if len(gnetworks) == 0: Logfile.error('No geofon networks found') else: # Access network infos now from Geofo # args = Server.joinClientArgs([GEOFON_TAG, clientDir], paramList) ctrl = Server.ServerCtrl(nRetries=4, nParallel=1, waitTime=2.0, printStat=False) srv = Server.ServerBase(SERVER_NAME, checkProcessError, ctrl) #if WINDOWS: srv.control.ClientProc = MainProc if not srv.run(gnetworks, args): return False #endif # Print statistic nIres = len(inetworks) nWebDC = len(gnetworks) nAll = nIres + nWebDC if nIres != 0: Logfile.add(' ', 'Processed ' + str(nIres) + ' IRES networks') if nWebDC != 0: Logfile.add('Processed ' + str(nWebDC) + ' WEBDC networks') if nAll == 0: return Logfile.error('No networks found') if nIres == 0: err = 'No IRIS network found' elif nWebDC == 0: err = 'No WEBDC network found' else: err = None if err != None: Logfile.add(err) # showNextStep # evpath = options.evpath.split('/')[-1] keyfoldername = clientDir.split('/')[-1] Logfile.add(' ', 'NEXT PROCESSING STEP:', ' ') Logfile.add(' 1) change keyfolder value in global.conf to ' + keyfoldername) Logfile.add(' 2) python arraytool.py getdata ' + evpath, ' ') return True
def processLoop(): #==================================get meta info========================================== C = config.Config(evpath) Origin = C.parseConfig('origin') try: Syn_in = C.parseConfig('syn') syn_in = SynthCfg(Syn_in) except: pass Config = C.parseConfig('config') cfg = ConfigObj(dict=Config) if cfg.pyrocko_download() == True: Meta = C.readpyrockostations() # elif cfg.colesseo_input() == True: scenario = guts.load(filename=cfg.colosseo_scenario_yml()) scenario_path = cfg.colosseo_scenario_yml()[:-12] Meta = C.readcolosseostations(scenario_path) else: Meta = C.readMetaInfoFile() #==================================get meta info========================================== #==================================do prerequiries======================================== Folder = C.createFolder() #C.cpSkeleton(Folder,Config) C.writeConfig(Config, Origin, Folder) filter = FilterCfg(Config) ntimes = int( (cfg.UInt('forerun') + cfg.UInt('duration')) / cfg.UInt('step')) origin = OriginCfg(Origin) if cfg.colesseo_input() == True: from pyrocko import util events = scenario.get_events() ev = events[0] origin.strike = str(ev.moment_tensor.strike1) origin.rake = str(ev.moment_tensor.rake1) origin.dip = str(ev.moment_tensor.dip1) strike = ev.moment_tensor.strike1 origin.lat = str(ev.lat) origin.lon = str(ev.lon) origin.depth = str(ev.depth / 1000.) depth = ev.depth origin.time = util.time_to_str(ev.time) time_ev = util.time_to_str(ev.time) lat = ev.lat lon = ev.lon rake = ev.moment_tensor.rake1 dip = ev.moment_tensor.dip1 Origin['strike'] = str(ev.moment_tensor.strike1) Origin['rake'] = str(ev.moment_tensor.rake1) Origin['dip'] = str(ev.moment_tensor.dip1) Origin['lat'] = str(ev.lat) Origin['lon'] = str(ev.lon) Origin['time'] = util.time_to_str(ev.time) Origin['depth'] = str(ev.depth / 1000.) ev = Event(lat, lon, depth, time_ev, strike=strike, dip=dip, rake=rake) else: default = 0 strike = origin.strike(default) # Origin.get ('strike', default) dip = origin.dip(default) # Origin.get ('dip', default) rake = origin.rake(default) # Origin.get ('rake', default) ev = Event(origin.lat(), origin.lon(), origin.depth(), origin.time(), strike=strike, dip=dip, rake=rake) filtername = filter.filterName() Logfile.add('filtername = ' + filtername) #todo crosscorrelation for all arrays before processing XDict = {} RefDict = {} SL = {} if cfg.Int('xcorr') == 1: newFreq = str(filter.newFrequency()) fobjreferenceshiftname = newFreq + '_' + filtername + '.refpkl' rp = os.path.join(Folder['semb'], fobjreferenceshiftname) fobjpickleshiftname = newFreq + '_' + filtername + '.xcorrpkl' ps = os.path.join(Folder['semb'], fobjpickleshiftname) if (os.path.isfile(rp) and os.path.getsize(rp) != 0 and os.path.isfile(ps) and os.path.getsize(ps) != 0): Logfile.add('file exits : ' + rp) Logfile.add('load refshifts') f = open(rp) RefDict = pickle.load(f) x = open(ps) XDict = pickle.load(x) xcorrnetworks = cfg.String('networks').split(',') for i in xcorrnetworks: SL[i] = len(Config[i].split('|')) else: SL = {} xcorrnetworks = cfg.String('networks').split(',') for i in xcorrnetworks: W = {} refshift = 0 network = cfg.String(i).split('|') FilterMeta = ttt.filterStations(Meta, Config, Origin, network) arrayfolder = os.path.join(Folder['semb'], i) if os.access(arrayfolder, os.F_OK) == False: os.makedirs(arrayfolder) if cfg.pyrocko_download() == True: A = Xcorr(ev, FilterMeta, evpath, Config, Syn_in, arrayfolder) else: A = Xcorr(ev, FilterMeta, evpath, Config, Syn_in, arrayfolder) print "run Xcorr" W, triggerobject = A.runXcorr() XDict[i] = W RefDict[i] = triggerobject.tdiff SL[i] = len(network) #endfor fobjrefshift = open(rp, 'w') pickle.dump(RefDict, fobjrefshift) fobjrefshift.close() output = open(ps, 'w') pickle.dump(XDict, output) output.close() for i in sorted(XDict.iterkeys()): Logfile.red('Array %s has %3d of %3d Stations left' % (i, len(XDict[i]), SL[i])) logger.info( '\033[31mFor proceeding without changes press enter or give new comma seperatet network list or quit for exit\033[0m' ) while True: nnl = raw_input("please enter your choice: ") #Logfile.add ('Choise = ' + nnl) if len(nnl) == 0: if not Basic.question('Process all networks ?'): continue Logfile.red('This networks will be used for processing: %s' % (Config['networks'])) break elif str(nnl) == 'quit': sys.exit() elif str(nnl) == 'rerun': event = os.path.join(*evpath.split('/')[-1:]) try: os.remove(rp) os.remove(ps) except: pass mainfolder = os.path.join(os.path.sep, *evpath.split('/')[:-2]) os.chdir(mainfolder) cmd = ('%s arraytool.py process %s') % (sys.executable, event) Logfile.add('cmd = ' + cmd) os.system(cmd) sys.exit() else: # Check if selected array(s) exists names = nnl.split(',') isOk = True for array in names: arrayfolder = os.path.join(Folder['semb'], array) if not os.path.isdir(arrayfolder): Logfile.error('Illegal network name ' + str(array)) isOk = False break #endfor if not isOk: continue # Illegal network : input again # use these networks Logfile.add('This networks will be used for processing: %s' % (nnl)) Config['networks'] = nnl break for i in range(3, 0, -1): time.sleep(1) Logfile.red('Start processing in %d seconds ' % (i)) wd = Origin['depth'] start, stop, step = cfg.String('depths').split(',') start = int(start) stop = int(stop) + 1 step = int(step) filters = cfg.String('filters') filters = int(filters) Logfile.add('working on ' + Config['networks']) #==================================loop over depth====================== for filterindex in xrange(0, filters): for depthindex in xrange(start, stop, step): workdepth = float(wd) + depthindex Origin['depth'] = workdepth ev = Event(Origin['lat'], Origin['lon'], Origin['depth'], Origin['time'], strike=strike, dip=dip, rake=rake) Logfile.add('WORKDEPTH: ' + str(Origin['depth'])) #==================================do prerequiries=============== #==================================loop over arrays================ ASL = [] weights = [] array_centers = [] networks = Config['networks'].split(',') counter = 1 TriggerOnset = [] Wdfs = [] for i in networks: arrayname = i arrayfolder = os.path.join(Folder['semb'], arrayname) network = Config[i].split('|') Logfile.add('network: ' + str(network)) FilterMeta = ttt.filterStations(Meta, Config, Origin, network) #if len(FilterMeta) < 3: continue #hs : wieder rein if len(FilterMeta) < 3: continue W = XDict[i] refshift = RefDict[i] FilterMeta = cmpFilterMetavsXCORR(W, FilterMeta) Logfile.add( 'BOUNDING BOX DIMX: %s DIMY: %s GRIDSPACING: %s \n' % (Config['dimx'], Config['dimy'], Config['gridspacing'])) ##############=======================PARALLEL=========================================== Logfile.red('Calculating Traveltime Grid') t1 = time.time() isParallel = False #10.12.2015 TTTGridMap = [] mint = [] maxt = [] try: f = open( '../tttgrid/tttgrid_%s_%s_%s.pkl' % (ev.time, arrayname, workdepth), 'rb') print "loading travel time grid_%s_%s_%s.pkl" % ( ev.time, arrayname, workdepth) TTTGridMap, mint, maxt = pickle.load(f) f.close() print "loading of travel time grid sucessful" except: print "loading of travel time grid unsucessful, will now calculate the grid:" if isParallel: #hs # maxp = int (Config['ncore']) maxp = 6 #hs po = multiprocessing.Pool(maxp) for i in xrange(len(FilterMeta)): po.apply_async(ttt.calcTTTAdv, (Config, FilterMeta[i], Origin, i, arrayname, W, refshift)) po.close() po.join() else: #hs+ for i in xrange(len(FilterMeta)): t1 = time.time() ttt.calcTTTAdv(Config, FilterMeta[i], Origin, i, arrayname, W, refshift) Logfile.add('ttt.calcTTTAdv : ' + str(time.time() - t1) + ' sec.') #endif #hs- assert len(FilterMeta) > 0 TTTGridMap = deserializer.deserializeTTT(len(FilterMeta)) mint, maxt = deserializer.deserializeMinTMaxT( len(FilterMeta)) f = open( '../tttgrid/tttgrid_%s_%s_%s.pkl' % (ev.time, arrayname, workdepth), 'wb') print "dumping the traveltime grid for this array" pickle.dump([TTTGridMap, mint, maxt], f) f.close() t2 = time.time() Logfile.red('%s took %0.3f s' % ('TTT', (t2 - t1))) switch = filterindex tw = times.calculateTimeWindows(mint, maxt, Config, ev, switch) if cfg.pyrocko_download() == True: if cfg.quantity() == 'displacement': Wd = waveform.readWaveformsPyrocko_restituted( FilterMeta, tw, evpath, ev) else: Wd = waveform.readWaveformsPyrocko( FilterMeta, tw, evpath, ev) # Wdf = waveform.processpyrockoWaveforms(Wd, Config, Folder, arrayname, FilterMeta, ev, switch, W) elif cfg.colesseo_input() == True: Wd = waveform.readWaveforms_colesseo( FilterMeta, tw, evpath, ev, C) else: Wd = waveform.readWaveforms(FilterMeta, tw, evpath, ev) if cfg.Bool('synthetic_test') is True: Wdf = waveform.processdummyWaveforms( Wd, Config, Folder, arrayname, FilterMeta, ev, switch, W) Wdfs.append(Wdf) else: Wdf = waveform.processWaveforms(Wd, Config, Folder, arrayname, FilterMeta, ev, switch, W) Wdfs.append(Wdf) C.writeStationFile(FilterMeta, Folder, counter) Logfile.red('%d Streams added for Processing' % (len(Wd))) t1 = time.time() f = open( '../tttgrid/tttgrid_%s_%s_%s.pkl' % (ev.time, arrayname, workdepth), 'rb') print "loading travel time grid_%s_%s_%s.pkl" % ( ev.time, arrayname, workdepth) TTTGridMap, mint, maxt = pickle.load(f) f.close() if cfg.optimize() == True: optim.solve(counter, Config, Wdf, FilterMeta, mint, maxt, TTTGridMap, Folder, Origin, ntimes, switch, ev, arrayfolder, syn_in) else: arraySemb, weight, array_center = sembCalc.doCalc( counter, Config, Wdf, FilterMeta, mint, maxt, TTTGridMap, Folder, Origin, ntimes, switch, ev, arrayfolder, syn_in) t2 = time.time() Logfile.add('CALC took %0.3f sec' % (t2 - t1)) weights.append(weight) array_centers.append(array_center) ASL.append(arraySemb) counter += 1 sembCalc.writeSembMatricesSingleArray(arraySemb, Config, Origin, arrayfolder, ntimes, switch) fileName = os.path.join(arrayfolder, 'stations.txt') Logfile.add('Write to file ' + fileName) fobjarraynetwork = open(fileName, 'w') for i in FilterMeta: fobjarraynetwork.write( ('%s %s %s\n') % (i.getName(), i.lat, i.lon)) fobjarraynetwork.close() TTTGridMAP = [] if cfg.optimize_all() == True: import optim_csemb from optim_csemb import solve sembmax = sembCalc.collectSemb(ASL, Config, Origin, Folder, ntimes, len(networks), switch) optim_csemb.solve(counter, Config, Wdf, FilterMeta, mint, maxt, TTTGridMap, Folder, Origin, ntimes, switch, ev, arrayfolder, syn_in, ASL, sembmax, evpath, XDict, RefDict, workdepth, filterindex, Wdfs) if ASL: Logfile.red('collect semblance matrices from all arrays') sembmax = sembCalc.collectSemb(ASL, Config, Origin, Folder, ntimes, len(networks), switch, array_centers) if cfg.Bool('weight_by_noise') == True: sembCalc.collectSembweighted(ASL, Config, Origin, Folder, ntimes, len(networks), switch, weights) else: Logfile.red('Nothing to do -> Finish') print "depth:" print workdepth
def doCalc_syn(flag, Config, WaveformDict, FilterMetaData, Gmint, Gmaxt, TTTGridMap, Folder, Origin, ntimes, switch, ev, arrayfolder, syn_in, parameter): ''' method for calculating semblance of one station array ''' Logfile.add('PROCESS %d %s' % (flag, ' Enters Semblance Calculation')) Logfile.add('MINT : %f MAXT: %f Traveltime' % (Gmint, Gmaxt)) cfg = ConfigObj(dict=Config) dimX = cfg.dimX() # ('dimx') dimY = cfg.dimY() # ('dimy') winlen = cfg.winlen() # ('winlen') step = cfg.step() # ('step') new_frequence = cfg.newFrequency() #('new_frequence') forerun = cfg.Int('forerun') duration = cfg.Int('duration') gridspacing = cfg.Float('gridspacing') nostat = len(WaveformDict) traveltimes = {} recordstarttime = '' minSampleCount = 999999999 ntimes = int((forerun + duration) / step) nsamp = int(winlen * new_frequence) nstep = int(step * new_frequence) from pyrocko import obspy_compat from pyrocko import orthodrome, model obspy_compat.plant() ############################################################################ calcStreamMap = WaveformDict stations = [] py_trs = [] for trace in calcStreamMap.iterkeys(): py_tr = obspy_compat.to_pyrocko_trace(calcStreamMap[trace]) py_trs.append(py_tr) for il in FilterMetaData: if str(il) == str(trace): szo = model.Station(lat=il.lat, lon=il.lon, station=il.sta, network=il.net, channels=py_tr.channel, elevation=il.ele, location=il.loc) stations.append(szo) #right number of stations? store_id = syn_in.store() engine = LocalEngine(store_superdirs=[syn_in.store_superdirs()]) targets = [] for st in stations: target = Target(lat=st.lat, lon=st.lon, store_id=store_id, codes=(st.network, st.station, st.location, 'BHZ'), tmin=-1900, tmax=3900, interpolation='multilinear', quantity=cfg.quantity()) targets.append(target) if syn_in.nsources() == 1: if syn_in.use_specific_stf() is True: stf = syn_in.stf() exec(stf) else: stf = STF() if syn_in.source() == 'RectangularSource': source = RectangularSource( lat=float(syn_in.lat_0()), lon=float(syn_in.lon_0()), depth=syn_in.depth_syn_0() * 1000., strike=syn_in.strike_0(), dip=syn_in.dip_0(), rake=syn_in.rake_0(), width=syn_in.width_0() * 1000., length=syn_in.length_0() * 1000., nucleation_x=syn_in.nucleation_x_0(), slip=syn_in.slip_0(), nucleation_y=syn_in.nucleation_y_0(), stf=stf, time=util.str_to_time(syn_in.time_0())) if syn_in.source() == 'DCSource': source = DCSource(lat=float(syn_in.lat_0()), lon=float(syn_in.lon_0()), depth=syn_in.depth_syn_0() * 1000., strike=syn_in.strike_0(), dip=syn_in.dip_0(), rake=syn_in.rake_0(), stf=stf, time=util.str_to_time(syn_in.time_0()), magnitude=syn_in.magnitude_0()) else: sources = [] for i in range(syn_in.nsources()): if syn_in.use_specific_stf() is True: stf = syn_in.stf() exec(stf) else: stf = STF() if syn_in.source() == 'RectangularSource': sources.append( RectangularSource( lat=float(syn_in.lat_1(i)), lon=float(syn_in.lon_1(i)), depth=syn_in.depth_syn_1(i) * 1000., strike=syn_in.strike_1(i), dip=syn_in.dip_1(i), rake=syn_in.rake_1(i), width=syn_in.width_1(i) * 1000., length=syn_in.length_1(i) * 1000., nucleation_x=syn_in.nucleation_x_1(i), slip=syn_in.slip_1(i), nucleation_y=syn_in.nucleation_y_1(i), stf=stf, time=util.str_to_time(syn_in.time_1(i)))) if syn_in.source() == 'DCSource': sources.append( DCSource(lat=float(syn_in.lat_1(i)), lon=float(syn_in.lon_1(i)), depth=syn_in.depth_1(i) * 1000., strike=syn_in.strike_1(i), dip=syn_in.dip_1(i), rake=syn_in.rake_1(i), stf=stf, time=util.str_to_time(syn_in.time_1(i)), magnitude=syn_in.magnitude_1(i))) source = CombiSource(subsources=sources) response = engine.process(source, targets) synthetic_traces = response.pyrocko_traces() if cfg.Bool('synthetic_test_add_noise') is True: from noise_addition import add_noise trs_orgs = [] calcStreamMapsyn = calcStreamMap.copy() #from pyrocko import trace for tracex in calcStreamMapsyn.iterkeys(): for trl in synthetic_traces: if str(trl.name()[4:12]) == str(tracex[4:]): tr_org = obspy_compat.to_pyrocko_trace( calcStreamMapsyn[tracex]) tr_org.downsample_to(2.0) trs_orgs.append(tr_org) store_id = syn_in.store() engine = LocalEngine(store_superdirs=[syn_in.store_superdirs()]) synthetic_traces = add_noise(trs_orgs, engine, source.pyrocko_event(), stations, store_id, phase_def='P') trs_org = [] trs_orgs = [] fobj = os.path.join(arrayfolder, 'shift.dat') xy = num.loadtxt(fobj, usecols=1, delimiter=',') calcStreamMapsyn = calcStreamMap.copy() #from pyrocko import trace for tracex in calcStreamMapsyn.iterkeys(): for trl in synthetic_traces: if str(trl.name()[4:12]) == str(tracex[4:]): mod = trl recordstarttime = calcStreamMapsyn[ tracex].stats.starttime.timestamp recordendtime = calcStreamMapsyn[ tracex].stats.endtime.timestamp tr_org = obspy_compat.to_pyrocko_trace( calcStreamMapsyn[tracex]) trs_orgs.append(tr_org) tr_org_add = mod.chop(recordstarttime, recordendtime, inplace=False) synthetic_obs_tr = obspy_compat.to_obspy_trace(tr_org_add) calcStreamMapsyn[tracex] = synthetic_obs_tr trs_org.append(tr_org_add) calcStreamMap = calcStreamMapsyn if cfg.Bool('shift_by_phase_pws') == True: calcStreamMapshifted = calcStreamMap.copy() from obspy.core import stream stream = stream.Stream() for trace in calcStreamMapshifted.iterkeys(): stream.append(calcStreamMapshifted[trace]) pws_stack = PWS_stack([stream], weight=2, normalize=True) for tr in pws_stack: for trace in calcStreamMapshifted.iterkeys(): calcStreamMapshifted[trace] = tr calcStreamMap = calcStreamMapshifted if cfg.Bool('shift_by_phase_onset') == True: pjoin = os.path.join timeev = util.str_to_time(ev.time) trs_orgs = [] calcStreamMapshifted = calcStreamMap.copy() for trace in calcStreamMapshifted.iterkeys(): tr_org = obspy_compat.to_pyrocko_trace(calcStreamMapshifted[trace]) trs_orgs.append(tr_org) timing = CakeTiming( phase_selection='first(p|P|PP|P(cmb)P(icb)P(icb)p(cmb)p)-20', fallback_time=100.) traces = trs_orgs event = model.Event(lat=float(ev.lat), lon=float(ev.lon), depth=ev.depth * 1000., time=timeev) directory = arrayfolder bf = BeamForming(stations, traces, normalize=True) shifted_traces = bf.process(event=event, timing=timing, fn_dump_center=pjoin( directory, 'array_center.pf'), fn_beam=pjoin(directory, 'beam.mseed')) i = 0 store_id = syn_in.store() engine = LocalEngine(store_superdirs=[syn_in.store_superdirs()]) for trace in calcStreamMapshifted.iterkeys(): recordstarttime = calcStreamMapshifted[ trace].stats.starttime.timestamp recordendtime = calcStreamMapshifted[trace].stats.endtime.timestamp mod = shifted_traces[i] extracted = mod.chop(recordstarttime, recordendtime, inplace=False) shifted_obs_tr = obspy_compat.to_obspy_trace(extracted) calcStreamMapshifted[trace] = shifted_obs_tr i = i + 1 calcStreamMap = calcStreamMapshifted weight = 0. if cfg.Bool('weight_by_noise') == True: from noise_analyser import analyse pjoin = os.path.join timeev = util.str_to_time(ev.time) trs_orgs = [] calcStreamMapshifted = calcStreamMap.copy() for trace in calcStreamMapshifted.iterkeys(): tr_org = obspy_compat.to_pyrocko_trace(calcStreamMapshifted[trace]) trs_orgs.append(tr_org) timing = CakeTiming( phase_selection='first(p|P|PP|P(cmb)P(icb)P(icb)p(cmb)p)-20', fallback_time=100.) traces = trs_orgs event = model.Event(lat=float(ev.lat), lon=float(ev.lon), depth=ev.depth * 1000., time=timeev) directory = arrayfolder bf = BeamForming(stations, traces, normalize=True) shifted_traces = bf.process(event=event, timing=timing, fn_dump_center=pjoin( directory, 'array_center.pf'), fn_beam=pjoin(directory, 'beam.mseed')) i = 0 store_id = syn_in.store() engine = LocalEngine(store_superdirs=[syn_in.store_superdirs()]) weight = analyse(shifted_traces, engine, event, stations, 100., store_id, nwindows=1, check_events=True, phase_def='P') for trace in calcStreamMap.iterkeys(): recordstarttime = calcStreamMap[trace].stats.starttime d = calcStreamMap[trace].stats.starttime d = d.timestamp if calcStreamMap[trace].stats.npts < minSampleCount: minSampleCount = calcStreamMap[trace].stats.npts ############################################################################ traces = num.ndarray(shape=(len(calcStreamMap), minSampleCount), dtype=float) traveltime = num.ndarray(shape=(len(calcStreamMap), dimX * dimY), dtype=float) latv = num.ndarray(dimX * dimY, dtype=float) lonv = num.ndarray(dimX * dimY, dtype=float) ############################################################################ c = 0 streamCounter = 0 for key in calcStreamMap.iterkeys(): streamID = key c2 = 0 for o in calcStreamMap[key]: if c2 < minSampleCount: traces[c][c2] = o c2 += 1 for key in TTTGridMap.iterkeys(): if streamID == key: traveltimes[streamCounter] = TTTGridMap[key] else: "NEIN", streamID, key if not streamCounter in traveltimes: continue #hs : thread crashed before g = traveltimes[streamCounter] dimZ = g.dimZ mint = g.mint maxt = g.maxt Latul = g.Latul Lonul = g.Lonul Lator = g.Lator Lonor = g.Lonor gridElem = g.GridArray for x in range(dimX): for y in range(dimY): elem = gridElem[x, y] traveltime[c][x * dimY + y] = elem.tt latv[x * dimY + y] = elem.lat lonv[x * dimY + y] = elem.lon #endfor c += 1 streamCounter += 1 #endfor ############################## CALCULATE PARAMETER FOR SEMBLANCE CALCULATION ################## nsamp = winlen * new_frequence nstep = int(step * new_frequence) migpoints = dimX * dimY dimZ = 0 new_frequence = cfg.newFrequency() # ['new_frequence'] maxp = int(Config['ncore']) Logfile.add('PROCESS %d NTIMES: %d' % (flag, ntimes)) if False: print('nostat ', nostat, type(nostat)) print('nsamp ', nsamp, type(nsamp)) print('ntimes ', ntimes, type(ntimes)) print('nstep ', nstep, type(nstep)) print('dimX ', dimX, type(dimX)) print('dimY ', dimY, type(dimY)) print('mint ', Gmint, type(mint)) print('new_freq ', new_frequence, type(new_frequence)) print('minSampleCount ', minSampleCount, type(minSampleCount)) print('latv ', latv, type(latv)) print('traces', traces, type(traces)) print('traveltime', traveltime, type(traveltime)) #==================================semblance calculation======================================== t1 = time.time() traces = traces.reshape(1, nostat * minSampleCount) traveltime = traveltime.reshape(1, nostat * dimX * dimY) USE_C_CODE = True try: if USE_C_CODE: import Cm import CTrig start_time = time.time() k = Cm.otest(maxp, nostat, nsamp, ntimes, nstep, dimX, dimY, Gmint, new_frequence, minSampleCount, latv, lonv, traveltime, traces) print("--- %s seconds ---" % (time.time() - start_time)) else: start_time = time.time() k = otest(maxp, nostat, nsamp, ntimes, nstep, dimX, dimY, Gmint, new_frequence, minSampleCount, latv, lonv, traveltime, traces) #hs print("--- %s seconds ---" % (time.time() - start_time)) except: print "loaded tttgrid has probably wrong dimensions or stations, delete\ ttgrid or exchange" t2 = time.time() partSemb = k partSemb_syn = partSemb.reshape(ntimes, migpoints) return partSemb_syn
def doCalc(flag, Config, WaveformDict, FilterMetaData, Gmint, Gmaxt, TTTGridMap, Folder, Origin, ntimes, switch, ev, arrayfolder, syn_in): ''' method for calculating semblance of one station array ''' Logfile.add('PROCESS %d %s' % (flag, ' Enters Semblance Calculation')) Logfile.add('MINT : %f MAXT: %f Traveltime' % (Gmint, Gmaxt)) cfg = ConfigObj(dict=Config) dimX = cfg.dimX() # ('dimx') dimY = cfg.dimY() # ('dimy') winlen = cfg.winlen() # ('winlen') step = cfg.step() # ('step') new_frequence = cfg.newFrequency() #('new_frequence') forerun = cfg.Int('forerun') duration = cfg.Int('duration') gridspacing = cfg.Float('gridspacing') nostat = len(WaveformDict) traveltimes = {} recordstarttime = '' minSampleCount = 999999999 ntimes = int((forerun + duration) / step) nsamp = int(winlen * new_frequence) nstep = int(step * new_frequence) from pyrocko import obspy_compat from pyrocko import orthodrome, model obspy_compat.plant() ############################################################################ calcStreamMap = WaveformDict stations = [] py_trs = [] for trace in calcStreamMap.iterkeys(): py_tr = obspy_compat.to_pyrocko_trace(calcStreamMap[trace]) py_trs.append(py_tr) for il in FilterMetaData: if str(il) == str(trace): szo = model.Station(lat=il.lat, lon=il.lon, station=il.sta, network=il.net, channels=py_tr.channel, elevation=il.ele, location=il.loc) stations.append(szo) #right number of stations? #==================================synthetic BeamForming======================================= if cfg.Bool('shift_by_phase_pws') == True: calcStreamMapshifted = calcStreamMap.copy() from obspy.core import stream stream = stream.Stream() for trace in calcStreamMapshifted.iterkeys(): stream.append(calcStreamMapshifted[trace]) pws_stack = PWS_stack([stream], weight=2, normalize=True) for tr in pws_stack: for trace in calcStreamMapshifted.iterkeys(): calcStreamMapshifted[trace] = tr calcStreamMap = calcStreamMapshifted if cfg.Bool('shift_by_phase_onset') == True: pjoin = os.path.join timeev = util.str_to_time(ev.time) trs_orgs = [] calcStreamMapshifted = calcStreamMap.copy() for trace in calcStreamMapshifted.iterkeys(): tr_org = obspy_compat.to_pyrocko_trace(calcStreamMapshifted[trace]) trs_orgs.append(tr_org) timing = CakeTiming( phase_selection='first(p|P|PP|P(cmb)P(icb)P(icb)p(cmb)p)-20', fallback_time=100.) traces = trs_orgs event = model.Event(lat=float(ev.lat), lon=float(ev.lon), depth=ev.depth * 1000., time=timeev) directory = arrayfolder bf = BeamForming(stations, traces, normalize=True) shifted_traces = bf.process(event=event, timing=timing, fn_dump_center=pjoin( directory, 'array_center.pf'), fn_beam=pjoin(directory, 'beam.mseed')) i = 0 store_id = syn_in.store() engine = LocalEngine(store_superdirs=[syn_in.store_superdirs()]) for trace in calcStreamMapshifted.iterkeys(): recordstarttime = calcStreamMapshifted[ trace].stats.starttime.timestamp recordendtime = calcStreamMapshifted[trace].stats.endtime.timestamp mod = shifted_traces[i] extracted = mod.chop(recordstarttime, recordendtime, inplace=False) shifted_obs_tr = obspy_compat.to_obspy_trace(extracted) calcStreamMapshifted[trace] = shifted_obs_tr i = i + 1 calcStreamMap = calcStreamMapshifted weight = 0. if cfg.Bool('weight_by_noise') == True: from noise_analyser import analyse pjoin = os.path.join timeev = util.str_to_time(ev.time) trs_orgs = [] calcStreamMapshifted = calcStreamMap.copy() for trace in calcStreamMapshifted.iterkeys(): tr_org = obspy_compat.to_pyrocko_trace(calcStreamMapshifted[trace]) trs_orgs.append(tr_org) timing = CakeTiming( phase_selection='first(p|P|PP|P(cmb)P(icb)P(icb)p(cmb)p)-20', fallback_time=100.) traces = trs_orgs event = model.Event(lat=float(ev.lat), lon=float(ev.lon), depth=ev.depth * 1000., time=timeev) directory = arrayfolder bf = BeamForming(stations, traces, normalize=True) shifted_traces = bf.process(event=event, timing=timing, fn_dump_center=pjoin( directory, 'array_center.pf'), fn_beam=pjoin(directory, 'beam.mseed')) i = 0 store_id = syn_in.store() engine = LocalEngine(store_superdirs=[syn_in.store_superdirs()]) weight = analyse(shifted_traces, engine, event, stations, 100., store_id, nwindows=1, check_events=True, phase_def='P') for trace in calcStreamMap.iterkeys(): recordstarttime = calcStreamMap[trace].stats.starttime d = calcStreamMap[trace].stats.starttime d = d.timestamp if calcStreamMap[trace].stats.npts < minSampleCount: minSampleCount = calcStreamMap[trace].stats.npts ############################################################################ traces = num.ndarray(shape=(len(calcStreamMap), minSampleCount), dtype=float) traveltime = num.ndarray(shape=(len(calcStreamMap), dimX * dimY), dtype=float) latv = num.ndarray(dimX * dimY, dtype=float) lonv = num.ndarray(dimX * dimY, dtype=float) ############################################################################ c = 0 streamCounter = 0 for key in calcStreamMap.iterkeys(): streamID = key c2 = 0 for o in calcStreamMap[key]: if c2 < minSampleCount: traces[c][c2] = o c2 += 1 for key in TTTGridMap.iterkeys(): if streamID == key: traveltimes[streamCounter] = TTTGridMap[key] else: "NEIN", streamID, key if not streamCounter in traveltimes: continue #hs : thread crashed before g = traveltimes[streamCounter] dimZ = g.dimZ mint = g.mint maxt = g.maxt Latul = g.Latul Lonul = g.Lonul Lator = g.Lator Lonor = g.Lonor gridElem = g.GridArray for x in range(dimX): for y in range(dimY): elem = gridElem[x, y] traveltime[c][x * dimY + y] = elem.tt latv[x * dimY + y] = elem.lat lonv[x * dimY + y] = elem.lon #endfor c += 1 streamCounter += 1 #endfor ############################## CALCULATE PARAMETER FOR SEMBLANCE CALCULATION ################## nsamp = winlen * new_frequence nstep = int(step * new_frequence) migpoints = dimX * dimY dimZ = 0 new_frequence = cfg.newFrequency() # ['new_frequence'] maxp = int(Config['ncore']) Logfile.add('PROCESS %d NTIMES: %d' % (flag, ntimes)) if False: print('nostat ', nostat, type(nostat)) print('nsamp ', nsamp, type(nsamp)) print('ntimes ', ntimes, type(ntimes)) print('nstep ', nstep, type(nstep)) print('dimX ', dimX, type(dimX)) print('dimY ', dimY, type(dimY)) print('mint ', Gmint, type(mint)) print('new_freq ', new_frequence, type(new_frequence)) print('minSampleCount ', minSampleCount, type(minSampleCount)) print('latv ', latv, type(latv)) print('traces', traces, type(traces)) print('traveltime', traveltime, type(traveltime)) t1 = time.time() traces_org = traces.reshape(1, nostat * minSampleCount) traveltime_org = traveltime.reshape(1, nostat * dimX * dimY) USE_C_CODE = True try: if USE_C_CODE: import Cm import CTrig start_time = time.time() k = Cm.otest(maxp, nostat, nsamp, ntimes, nstep, dimX, dimY, Gmint, new_frequence, minSampleCount, latv, lonv, traveltime_org, traces_org) print("--- %s seconds ---" % (time.time() - start_time)) else: start_time = time.time() k = otest(maxp, nostat, nsamp, ntimes, nstep, dimX, dimY, Gmint, new_frequence, minSampleCount, latv, lonv, traveltime_org, traces_org) #hs print("--- %s seconds ---" % (time.time() - start_time)) except: print "loaded tttgrid has probably wrong dimensions or stations, delete\ ttgrid or exchange" t2 = time.time() Logfile.add('%s took %0.3f s' % ('CALC:', (t2 - t1))) partSemb = k partSemb_data = partSemb.reshape(ntimes, migpoints) return partSemb_data
def collectSemb(SembList, Config, Origin, Folder, ntimes, arrays, switch, array_centers): ''' method to collect semblance matrizes from all processes and write them to file for each timestep ''' Logfile.add('start collect in collectSemb') cfg = ConfigObj(dict=Config) origin = ConfigObj(dict=Origin) dimX = cfg.dimX() #('dimx') dimY = cfg.dimY() #('dimy') if switch == 0: winlen = cfg.winlen() #('winlen') step = cfg.step() #('step') if switch == 1: winlen = cfg.winlen_f2() #('winlen') step = cfg.step_f2() #('step') latv = [] lonv = [] gridspacing = cfg.Float('gridspacing') migpoints = dimX * dimY o_lat = origin.lat() # float(Origin['lat']) o_lon = origin.lon() # float(Origin['lon']) oLatul = 0 oLonul = 0 z = 0 for i in xrange(dimX): oLatul = o_lat - ((dimX - 1) / 2) * gridspacing + i * gridspacing if z == 0 and i == 0: Latul = oLatul o = 0 for j in xrange(dimY): oLonul = o_lon - ((dimY - 1) / 2) * gridspacing + j * gridspacing if o == 0 and j == 0: Lonul = oLonul latv.append(oLatul) lonv.append(oLonul) tmp = 1 origin = DataTypes.dictToLocation(Origin) i = 0 #for a in SembList: # tmp = num.zeros(num.shape(a)) azis = [] for a in SembList: x = array_centers[i][0] y = array_centers[i][1] delta = orthodrome.distance_accurate50m_numpy(x, y, origin.lat, origin.lon) #a = a*((1./delta**2)*1.e+15) tmp *= a #azis.append(toAzimuth(float(Origin['lat']), float(Origin['lon']),x, y)) i = i + 1 #min_coor = num.zeros([i,2]) #i = 0 #for a in SembList: # deltas = [] # x = array_centers[i][0] # y = array_centers[i][1] # for k in range(0,len(latv)): # delta = orthodrome.distance_accurate50m_numpy(x, y, latv[k], lonv[k]) # deltas.append(orthodrome.distance_accurate50m_numpy(x, y, latv[k], lonv[k])) # if delta <= num.min(deltas): # min_coor[i]= [latv[k], lonv[k]] # i = i+1 # array_overlap = num.average(min_coor, axis=0) # delta_center = orthodrome.distance_accurate50m_numpy(array_overlap[0], array_overlap[1], origin.lat, origin.lon) # print(array_overlap) # print(delta_center) # diff_center_lat = origin.lat-array_overlap[0] # diff_center_lon = origin.lon-array_overlap[1] # print(diff_center_lat) # print(diff_center_lon) #for a in SembList: #if num.mean(a)>0: # tmp *= a sembmaxvaluev = num.ndarray(ntimes, dtype=float) sembmaxlatv = num.ndarray(ntimes, dtype=float) sembmaxlonv = num.ndarray(ntimes, dtype=float) rc = UTCDateTime(Origin['time']) rcs = '%s-%s-%s_%02d:%02d:%02d' % (rc.day, rc.month, rc.year, rc.hour, rc.minute, rc.second) d = rc.timestamp usedarrays = arrays folder = Folder['semb'] fobjsembmax = open(os.path.join(folder, 'sembmax_%s.txt' % (switch)), 'w') norm = num.max(num.max(tmp, axis=1)) max_p = 0. sum_i = 0. for a, i in enumerate(tmp): if a < 1: sum_i *= i for a, i in enumerate(tmp): if a < 1: max = num.max(sum_i[:]) for j in range(migpoints): if i[j] > num.max(i[:]) * 0.9 and i[j] > max_p: latvmax = latv[j] lonvmax = lonv[j] max_p = i[j] # delta_lat = origin.lat-latvmax # delta_lon = origin.lon-lonvmax #for a, i in enumerate(tmp): # max_pos = [l for l, k in enumerate(i) if k == i.max()][0] # delta_lat = origin.lat-latv[max_pos] # delta_lon = origin.lon-lonv[max_pos] for j in range(migpoints): latv[j] = latv[j] #+delta_lat lonv[j] = lonv[j] #+delta_lon # latv.append(latv[j]-delta_lat) # lonv.append(lonv[j]-delta_lon) #nix = [] #for a, i in enumerate(tmp): # for j in range(migpoints): # if i[j]/norm > num.max(sum_i/norm)*0.4: # if j in nix: # pass # else: # latv[j] = latv[j]+delta_lat # lonv[j] = lonv[j]+delta_lon # nix.append(j) #if i[j]/norm > num.max(sum_i/norm)*0.4: # print('yes') # delta_lat = origin.lat-latv[j] # delta_lon = origin.lon-lonv[j] # print delta_lat, delta_lon, latvmax, lonvmax # print latv[j], lonv[j], origin.lat, origin.lon # ix = num.where(latv[j]+delta_lat)[0][0] # iy = num.where(lonv[j]+delta_lon)[0][0] # lat = latv[j].copy() # lon = lonv[j].copy() # latv[j] = latv[ix] ## lonv[j] = lonv[iy] # lonv[iy] # #latv[j] = latv[j]+delta_lat #lonv[j] = lonv[j]+delta_lon # print latv[j], lonv[j] # for a, i in enumerate(tmp): logger.info('timestep %d' % a) print(a) fobj = open( os.path.join(folder, '%s-%s_%03d.ASC' % (switch, Origin['depth'], a)), 'w') fobj.write('# %s , %s\n' % (d, rcs)) fobj.write('# step %ds| ntimes %d| winlen: %ds\n' % (step, ntimes, winlen)) fobj.write('# \n') fobj.write('# southwestlat: %.2f dlat: %f nlat: %f \n' % (Latul, gridspacing, dimX)) fobj.write('# southwestlon: %.2f dlon: %f nlon: %f \n' % (Lonul, gridspacing, dimY)) fobj.write('# ddepth: 0 ndepth: 1 \n') sembmax = 0 sembmaxX = 0 sembmaxY = 0 uncert = num.std(i) #maybe not std? for j in range(migpoints): x = latv[j] #+delta_lat y = lonv[j] #+delta_lon # if i[j]/norm > num.max(i[:]/norm)*0.1: # delta_lat = origin.lat-latv[max_pos] # delta_lon = origin.lon-lonv[max_pos] # print delta_lat, delta_lon, latv[max_pos], lonv[max_pos] # print latv[j], lonv[j], origin.lat, origin.lon # x = latv[j]+delta_lat # y = lonv[j]+delta_lon # print x, y semb = i[j] / norm fobj.write('%.2f %.2f %.20f\n' % (x, y, semb)) # xd= latv[j]-delta_lat # yd= lonv[j]-delta_lon # sembd = 0. # fobj.write('%.2f %.2f %.20f\n' %(xd,yd,sembd)) if semb > sembmax: sembmax = semb # search for maximum and position of maximum on semblance grid for given time step sembmaxX = x sembmaxY = y delta = orthodrome.distance_accurate50m_numpy(x, y, origin.lat, origin.lon) azi = toAzimuth(float(Origin['lat']), float(Origin['lon']), float(sembmaxX), float(sembmaxY)) sembmaxvaluev[a] = sembmax sembmaxlatv[a] = sembmaxX sembmaxlonv[a] = sembmaxY fobjsembmax.write('%d %.3f %.3f %.30f %.30f %d %03f %f %03f\n' % (a * step, sembmaxX, sembmaxY, sembmax, uncert, usedarrays, delta, float(azi), delta * 119.19)) fobj.close() fobjsembmax.close() trigger.writeSembMaxValue(sembmaxvaluev, sembmaxlatv, sembmaxlonv, ntimes, Config, Folder) inspect_semb = cfg.Bool('inspect_semb') if inspect_semb is True: trigger.semblancestalta(sembmaxvaluev, sembmaxlatv, sembmaxlonv) return sembmaxvaluev
def filterBestSolution(solution): evp = os.path.join('/', *solution.path.split('/')[:-2]) C = Config(evp) Conf = C.parseConfig('config') cfg = ConfigObj(dict=Conf) SL = [] M = [] fobj = open(os.path.join(solution.path, 'event.stations'), 'r') for s in fobj: try: line = s.split() net, sta, loc, comp = line[0].split('.') slat = line[1] slon = line[2] smember = line[3] M.append(smember) SL.append( Station(net, sta, loc, comp, lat=slat, lon=slon, member=smember)) except: Logfile.exception('filterBestSolution', '<' + s + '>') continue #endfor fobj.close() M = list(set(M)) Logfile.add('number of clusters ' + str(len(M)), 'number of stations ' + str(len(SL))) kd = obs_kilometer2degrees(cfg.Distance('intraclusterdistance')) Logfile.add('icd ' + str(kd)) maxdist = -1 for i in SL: counter = 0 for k in SL: if i.member == '8' and k.member == '8': if i.getName() != k.getName(): delta = loc2degrees(i, k) if delta > maxdist: maxdist = delta print i, i.member, ' <--> ', k, k.member, ' delta: ', delta, ' allowed ', kd if delta < kd: counter += 1 #endif #endif #endfor print i, 'less then allowd ', counter #endfor print 'masxdist ', maxdist
for section_name in parser.sections(): for name, value in parser.items(section_name): cDict[name] = value return cDict options,args = main(sys.argv) Basic.checkExistsDir(options.eventpath, isAbort=True) Globals.setEventDir(options.eventpath) C = config.Config(options.eventpath) Origin = C.parseConfig('origin') Conf = globalConf() Config = C.parseConfig('config') filter = FilterCfg(Config) cfg = ConfigObj(dict=Config) minDist, maxDist = cfg.FloatRange('mindist', 'maxdist') ev = Event(Origin['lat'],Origin['lon'],Origin['depth'],Origin['time'] ) event = model.Event(lat=float(ev.lat), lon=float(ev.lon), depth=float(ev.depth)*1000., time=util.str_to_time(ev.time)) newFreq = float(filter.newFrequency()) options.time = Origin ['time'] options.duration = int(Conf['duration']) sdspath = os.path.join(options.eventpath,'data') model.dump_events([event], sdspath+'event.pf') tmin = util.str_to_time(ev.time)-600. tmax = util.str_to_time(ev.time)+1800. def get_stations(site, lat, lon, rmin, rmax, tmin, tmax, channel_pattern='BH*'): extra = {}
def doCalc(flag, Config, WaveformDict, FilterMetaData, Gmint, Gmaxt, TTTGridMap, Folder, Origin, ntimes): ''' method for calculating semblance of one station array ''' Logfile.add('PROCESS %d %s' % (flag, ' Enters Semblance Calculation')) Logfile.add('MINT : %f MAXT: %f Traveltime' % (Gmint, Gmaxt)) cfg = ConfigObj(dict=Config) dimX = cfg.dimX() # ('dimx') dimY = cfg.dimY() # ('dimy') winlen = cfg.winlen() # ('winlen') step = cfg.step() # ('step') new_frequence = cfg.newFrequency() # ('new_frequence') forerun = cfg.Int('forerun') duration = cfg.Int('duration') gridspacing = cfg.Float('gridspacing') nostat = len(WaveformDict) traveltimes = {} recordstarttime = '' minSampleCount = 999999999 ntimes = int((forerun + duration) / step) nsamp = int(winlen * new_frequence) nstep = int(step * new_frequence) #for i in WaveformDict.iterkeys(): # print i,WaveformDict[i] ############################################################################ calcStreamMap = WaveformDict for trace in calcStreamMap.iterkeys(): recordstarttime = calcStreamMap[trace].stats.starttime d = calcStreamMap[trace].stats.starttime d = d.timestamp if calcStreamMap[trace].stats.npts < minSampleCount: minSampleCount = calcStreamMap[trace].stats.npts ############################################################################ traces = np.ndarray(shape=(len(calcStreamMap), minSampleCount), dtype=float) traveltime = np.ndarray(shape=(len(calcStreamMap), dimX * dimY), dtype=float) latv = np.ndarray(dimX * dimY, dtype=float) lonv = np.ndarray(dimX * dimY, dtype=float) ############################################################################ #traces = np.ndarray (nostat*minSampleCount,dtype=float) #traveltimes = np.ndarray (nostat*dimX*dimY,dtype=float) #latv = np.ndarray (dimX*dimY,dtype=float) #lonv = np.ndarray (dimX*dimY,dtype=float) #print 'minSC: ',minSampleCount,' LCSM: ',len(calcStreamMap) c = 0 streamCounter = 0 for key in calcStreamMap.iterkeys(): streamID = key c2 = 0 #print streamID, len(calcStreamMap[key]),minSampleCount for o in calcStreamMap[key]: if c2 < minSampleCount: traces[c][c2] = o #print 'C: ',c,' C2: ',c2,' TRACES:',traces[c][c2] c2 += 1 #endfor for key in TTTGridMap.iterkeys(): if streamID == key: traveltimes[streamCounter] = TTTGridMap[key] else: "NEIN", streamID, key #endfor if not streamCounter in traveltimes: continue #hs : thread crashed before g = traveltimes[streamCounter] dimZ = g.dimZ mint = g.mint maxt = g.maxt Latul = g.Latul Lonul = g.Lonul Lator = g.Lator Lonor = g.Lonor gridElem = g.GridArray for x in range(dimX): for y in range(dimY): elem = gridElem[x, y] traveltime[c][x * dimY + y] = elem.tt latv[x * dimY + y] = elem.lat lonv[x * dimY + y] = elem.lon #endfor c += 1 streamCounter += 1 #endfor ############################## CALCULATE PARAMETER FOR SEMBLANCE CALCULATION ################## nsamp = winlen * new_frequence nstep = int(step * new_frequence) migpoints = dimX * dimY dimZ = 0 new_frequence = cfg.newFrequency() # ['new_frequence'] maxp = int(Config['ncore']) #maxp = 20 #hs Logfile.add('PROCESS %d NTIMES: %d' % (flag, ntimes)) #k = Csemblance.semb(flag,nostat,nsamp,ntimes,nstep,Gmint,Gmaxt,Lonul,Latul,minSampleCount, # dimZ,dimX,dimY,new_frequence,ntimesstart,ntimesend,winlen,step,gridspacing, # latv,lonv,traveltime,traces,backveclen) #k = sembPar.semb (flag,nostat,nsamp,ntimes,nstep,Gmint,Gmaxt,Lonul,Latul,minSampleCount,dimZ, # dimX,dimY,new_frequence,ntimesstart,ntimesend,winlen,step,gridspacing,latv, # lonv,traveltime,traces,backveclen) if False: print('nostat ', nostat, type(nostat)) print('nsamp ', nsamp, type(nsamp)) print('ntimes ', ntimes, type(ntimes)) print('nstep ', nstep, type(nstep)) print('dimX ', dimX, type(dimX)) print('dimY ', dimY, type(dimY)) print('mint ', Gmint, type(mint)) print('new_freq ', new_frequence, type(new_frequence)) print('minSampleCount ', minSampleCount, type(minSampleCount)) print('latv ', latv, type(latv)) print('traces', traces, type(traces)) print('traveltime', traveltime, type(traveltime)) traveltime = traveltime.reshape(1, nostat * dimX * dimY) traces = traces.reshape(1, nostat * minSampleCount) #print 'traveltime2',traveltime,type(traveltime) t1 = time.time() if USE_C_CODE: k = Cm.otest(maxp, nostat, nsamp, ntimes, nstep, dimX, dimY, Gmint, new_frequence, minSampleCount, latv, lonv, traveltime, traces) else: k = otest(maxp, nostat, nsamp, ntimes, nstep, dimX, dimY, Gmint, new_frequence, minSampleCount, latv, lonv, traveltime, traces) #hs t2 = time.time() Logfile.add('%s took %0.3f s' % ('CALC:', (t2 - t1))) #print 'K',k,len(k),' MUST ',ntimes*dimX*dimY,' RES ',k[1] partSemb = k #partSemb = partSemb.reshape (1,migpoints) partSemb = partSemb.reshape(ntimes, migpoints) #print 'PARTSEMB FLAG: ',partSemb,type(partSemb),partSemb.ndim return partSemb
def refTrigger(self, RefWaveform): Config = self.Config cfg = ConfigObj(dict=Config) name = ('%s.%s.%s.%s') % ( RefWaveform[0].stats.network, RefWaveform[0].stats.station, RefWaveform[0].stats.location, RefWaveform[0].stats.channel) i = self.searchMeta(name, self.StationMeta) de = loc2degrees(self.Origin, i) ptime = 0 Phase = cake.PhaseDef('P') model = cake.load_model() if cfg.colesseo_input() == True: arrivals = model.arrivals([de, de], phases=Phase, zstart=self.Origin.depth, zstop=0.) else: arrivals = model.arrivals([de, de], phases=Phase, zstart=self.Origin.depth * km, zstop=0.) try: ptime = arrivals[0].t except: arrivals = model.arrivals([de, de], phases=Phase, zstart=o_depth * km - 0.1) ptime = arrivals[0].t phasename = ('%sphase') % (os.path.basename(self.AF)) if ptime == 0: print '\033[31mAvailable phases for reference station %s in range %f deegree\033[0m' % ( i, de) print '\033[31m' + '|'.join( [str(item['phase_name']) for item in tt]) + '\033[0m' print '\033[31myou tried phase %s\033[0m' % ( self.Config[phasename]) raise Exception("\033[31mILLEGAL: phase definition\033[0m") tw = self.calculateTimeWindows(ptime) if cfg.pyrocko_download() == True: stP = self.readWaveformsPicker_pyrocko(i, tw, self.Origin, ptime) elif cfg.colesseo_input() == True: stP = self.readWaveformsPicker_colos(i, tw, self.Origin, ptime) else: stP = self.readWaveformsPicker(i, tw, self.Origin, ptime) refuntouchname = os.path.basename(self.AF) + '-refstation-raw.mseed' stP.write(os.path.join(self.EventPath, refuntouchname), format='MSEED', byteorder='>') stP.filter("bandpass", freqmin=float(self.Config['refstationfreqmin']), freqmax=float(self.Config['refstationfreqmax'])) stP.trim(tw['xcorrstart'], tw['xcorrend']) trP = stP[0] trP.stats.starttime = UTCDateTime(3600) refname = os.path.basename(self.AF) + '-refstation-filtered.mseed' trP.write(os.path.join(self.EventPath, refname), format='MSEED', byteorder='>') sta = float(self.Config['refsta']) lta = float(self.Config['reflta']) cft = recSTALTA(trP.data, int(sta * trP.stats.sampling_rate), int(lta * trP.stats.sampling_rate)) t = triggerOnset(cft, lta, sta) try: onset = t[0][0] / trP.stats.sampling_rate print 'ONSET ', onset except: onset = self.mintforerun trigger = trP.stats.starttime + onset print 'TRIGGER ', trigger print 'THEORETICAL: ', UTCDateTime(3600) + self.mintforerun tdiff = (trP.stats.starttime + onset) - (UTCDateTime(3600) + self.mintforerun) print 'TDIFF: ', tdiff refp = UTCDateTime(self.Origin.time) + ptime reftriggeronset = refp + onset - self.mintforerun if int(self.Config['autoxcorrcorrectur']) == 1: try: refmarkername = os.path.join(self.EventPath, ('%s-marker') % (os.path.basename(self.AF))) fobjrefmarkername = open(refmarkername, 'w') fobjrefmarkername.write( '# Snuffler Markers File Version 0.2\n') fobjrefmarkername.write(( 'phase: %s 0 %s None None None XWStart None False\n' ) % (tw['xcorrstart'].strftime('%Y-%m-%d %H:%M:%S.%f'), name)) fobjrefmarkername.write(( 'phase: %s 0 %s None None None XWEnd None False\n' ) % (tw['xcorrend'].strftime('%Y-%m-%d %H:%M:%S.%f'), name)) fobjrefmarkername.write(( 'phase: %s 1 %s None None None TheoP None False\n' ) % (refp.strftime('%Y-%m-%d %H:%M:%S.%f'), name)) fobjrefmarkername.write(( 'phase: %s 3 %s None None None XTrig None False' ) % (reftriggeronset.strftime('%Y-%m-%d %H:%M:%S.%f'), name)) fobjrefmarkername.close() cmd = 'snuffler %s --markers=%s&' % (os.path.join( self.EventPath, refuntouchname), refmarkername) os.system(cmd) thrOn = float(self.Config['reflta']) # 4 thrOff = float(self.Config['refsta']) # 0.7 plotTrigger(trP, cft, thrOn, thrOff) selection = float( raw_input('Enter self picked phase in seconds: ')) tdiff = selection - self.mintforerun refname = os.path.basename(self.AF) + '-shift.mseed' trP.stats.starttime = trP.stats.starttime - selection trP.write(os.path.join(self.EventPath, refname), format='MSEED') except: selection = 0. refname = os.path.basename(self.AF) + '-shift.mseed' trP.stats.starttime = trP.stats.starttime - selection - self.mintforerun trP.write(os.path.join(self.EventPath, refname), format='MSEED') ''' tdiff = 0 trigger = trP.stats.starttime ''' To = Trigger(name, trigger, os.path.basename(self.AF), tdiff) return tdiff, To
def processLoop(): C = config.Config(evpath) Origin = C.parseConfig('origin') flag_rpe = False try: Syn_in = C.parseConfig('syn') syn_in = SynthCfg(Syn_in) except TypeError: pass Config = C.parseConfig('config') cfg = ConfigObj(dict=Config) phases = cfg.Str('ttphases') phases = phases.split(',') if cfg.pyrocko_download() is True: Meta = C.readpyrockostations() elif cfg.colesseo_input() is True: scenario = guts.load(filename=cfg.colosseo_scenario_yml()) scenario_path = cfg.colosseo_scenario_yml()[:-12] Meta = C.readcolosseostations(scenario_path) else: Meta = C.readMetaInfoFile() Folder = C.createFolder() C.writeConfig(Config, Origin, Folder) filter = FilterCfg(Config) if cfg.UInt('forerun') > 0: ntimes = int( (cfg.UInt('forerun') + cfg.UInt('duration')) / cfg.UInt('step')) else: ntimes = int((cfg.UInt('duration')) / cfg.UInt('step')) origin = OriginCfg(Origin) if cfg.colesseo_input() is True: from pyrocko import util events = scenario.get_events() ev = events[0] origin.strike = str(ev.moment_tensor.strike1) origin.rake = str(ev.moment_tensor.rake1) origin.dip = str(ev.moment_tensor.dip1) strike = ev.moment_tensor.strike1 origin.lat = str(ev.lat) origin.lon = str(ev.lon) origin.depth = str(ev.depth / 1000.) depth = ev.depth origin.time = util.time_to_str(ev.time) time_ev = util.time_to_str(ev.time) lat = ev.lat lon = ev.lon rake = ev.moment_tensor.rake1 dip = ev.moment_tensor.dip1 Origin['strike'] = str(ev.moment_tensor.strike1) Origin['rake'] = str(ev.moment_tensor.rake1) Origin['dip'] = str(ev.moment_tensor.dip1) Origin['lat'] = str(ev.lat) Origin['lon'] = str(ev.lon) Origin['time'] = util.time_to_str(ev.time) Origin['depth'] = str(ev.depth / 1000.) ev = Event(lat, lon, depth, time_ev, strike=strike, dip=dip, rake=rake) else: default = 0 strike = origin.strike(default) dip = origin.dip(default) rake = origin.rake(default) ev = Event(origin.lat(), origin.lon(), origin.depth(), origin.time(), strike=strike, dip=dip, rake=rake) if cfg.Bool('correct_shifts_empirical') is True: Origin_emp = C.parseConfig('origin_emp') origin_emp = OriginCfg(Origin_emp) ev_emp = Event(origin_emp.lat(), origin_emp.lon(), origin_emp.depth(), origin_emp.time(), strike=strike, dip=dip, rake=rake) filtername = filter.filterName() Logfile.add('filtername = ' + filtername) XDict = OrderedDict() RefDict = OrderedDict() SL = OrderedDict() refshifts_global = [] newFreq = str(filter.newFrequency()) xcorrnetworks = cfg.String('networks').split(',') if cfg.Int('xcorr') is 1: fobjreferenceshiftname = newFreq + '_' + filtername + '.refpkl' rp = os.path.join(Folder['semb'], fobjreferenceshiftname) fobjreferenceshiftnameemp = newFreq + '_' + filtername + 'emp' + '.refpkl' rpe = os.path.join(Folder['semb'], fobjreferenceshiftnameemp) fobjpickleshiftname = newFreq + '_' + filtername + '.xcorrpkl' ps = os.path.join(Folder['semb'], fobjpickleshiftname) if (os.path.isfile(rp) and os.path.getsize(rp) != 0 and os.path.isfile(ps) and os.path.getsize(ps) != 0): Logfile.add('xcorr/reference shift file exits : ' + rp) Logfile.add('loaded reference shift') if sys.version_info.major >= 3: f = open(rp, 'rb') else: f = open(rp) RefDict = pickle.load(f) if sys.version_info.major >= 3: x = open(ps, 'rb') else: x = open(ps) XDict = pickle.load(x) for i in xcorrnetworks: SL[i] = len(Config[i].split('|')) else: SL = {} for i in xcorrnetworks: W = {} network = cfg.String(i).split('|') FilterMeta = ttt.filterStations(Meta, Config, Origin, network) arrayfolder = os.path.join(Folder['semb'], i) if os.access(arrayfolder, os.F_OK) is False: os.makedirs(arrayfolder) if cfg.pyrocko_download() is True: # TODO check seperate xcoor nescessity A = Xcorr(ev, FilterMeta, evpath, Config, Syn_in, arrayfolder) print("run Xcorr") phase = phases[0] W, triggerobject = A.runXcorr(phase) XDict[i] = W RefDict[i] = triggerobject.tdiff SL[i] = len(network) for j in range(0, len(FilterMeta)): refshifts_global.append(triggerobject.tdiff) if sys.version_info.major >= 3: fobjrefshift = open(rp, 'wb') else: fobjrefshift = open(rp, 'w') pickle.dump(RefDict, fobjrefshift) fobjrefshift.close() if sys.version_info.major >= 3: output = open(ps, 'wb') else: output = open(ps, 'w') pickle.dump(XDict, output) output.close() else: fobjreferenceshiftname = newFreq + '_' + filtername + '.refpkl' rp = os.path.join(Folder['semb'], fobjreferenceshiftname) fobjreferenceshiftnameemp = newFreq + '_' + filtername + 'emp' + '.refpkl' rpe = os.path.join(Folder['semb'], fobjreferenceshiftnameemp) fobjpickleshiftname = newFreq + '_' + filtername + '.xcorrpkl' ps = os.path.join(Folder['semb'], fobjpickleshiftname) refshift = 0 if (os.path.isfile(rp) and os.path.getsize(rp) != 0 and os.path.isfile(ps) and os.path.getsize(ps) != 0): Logfile.add('Temporay Memory file exits : ' + rp) if sys.version_info.major >= 3: f = open(rp, 'rb') else: f = open(rp) RefDict = pickle.load(f) if sys.version_info.major >= 3: x = open(ps, 'rb') else: x = open(ps) XDict = pickle.load(x) for i in xcorrnetworks: SL[i] = len(Config[j].split('|')) network = cfg.String(i).split('|') FilterMeta = ttt.filterStations(Meta, Config, Origin, network) RefDict[i] = refshift for j in range(0, len(FilterMeta)): refshifts_global.append(refshift) else: SL = {} for i in xcorrnetworks: W = {} refshift = 0 network = cfg.String(i).split('|') FilterMeta = ttt.filterStations(Meta, Config, Origin, network) arrayfolder = os.path.join(Folder['semb'], i) if os.access(arrayfolder, os.F_OK) is False: os.makedirs(arrayfolder) if cfg.pyrocko_download() is True: # TODO check seperate xcoor nescessity A = Xcorr(ev, FilterMeta, evpath, Config, Syn_in, arrayfolder) else: A = Xcorr(ev, FilterMeta, evpath, Config, Syn_in, arrayfolder) print("run Xcorr") phase = phases[0] W, triggerobject = A.runXcorr_dummy(phase) XDict[j] = W RefDict[j] = refshift SL[j] = len(network) for j in range(0, len(FilterMeta)): refshifts_global.append(refshift) if sys.version_info.major >= 3: fobjrefshift = open(rp, 'wb') else: fobjrefshift = open(rp, 'w') pickle.dump(RefDict, fobjrefshift) fobjrefshift.close() if sys.version_info.major >= 3: output = open(ps, 'wb') else: output = open(ps, 'w') pickle.dump(XDict, output) output.close() if sys.version_info.major >= 3: for j in sorted(XDict.keys()): Logfile.red('Array %s has %3d of %3d Stations left' % (j, len(XDict[j]), SL[j])) else: for j in sorted(XDict.keys()): Logfile.red('Array %s has %3d of %3d Stations left' % (j, len(XDict[j]), SL[j])) while True: if sys.version_info.major >= 3: nnl = input("please enter your choice: ") else: nnl = raw_input("please enter your choice: ") if len(nnl) == 0: if not Basic.question('Process all networks ?'): continue Logfile.red('This networks will be used for processing: %s' % (Config['networks'])) break elif str(nnl) == 'quit': sys.exit() elif str(nnl) == 'rerun': event = os.path.join(*evpath.split('/')[-1:]) try: os.remove(rp) os.remove(ps) except Exception: pass mainfolder = os.path.join(os.path.sep, *evpath.split('/')[:-2]) os.chdir(mainfolder) cmd = ('%s arraytool.py process %s') % (sys.executable, event) Logfile.add('cmd = ' + cmd) os.system(cmd) sys.exit() else: names = nnl.split(',') isOk = True for array in names: arrayfolder = os.path.join(Folder['semb'], array) if not os.path.isdir(arrayfolder): Logfile.error('Illegal network name ' + str(array)) isOk = False break if not isOk: continue Logfile.add('This networks will be used for processing: %s' % (nnl)) Config['networks'] = nnl break for j in range(3, 0, -1): time.sleep(1) Logfile.red('Start processing in %d seconds ' % (j)) wd = Origin['depth'] start, stop, step = cfg.String('depths').split(',') start = int(start) stop = int(stop) + 1 step_depth = int(step) filters = cfg.String('filters') filters = int(filters) Logfile.add('working on ' + Config['networks']) if cfg.Bool('correct_shifts_empirical') is True: emp_loop = True else: emp_loop = False # ==================================loop over phases====================== for phase in phases: if phase is 'P': desired = 'Z' if phase is 'S': desired = 'T' # ==================================loop over filter setups===== for filterindex in xrange(0, filters): # ==================================loop over depth======= for depthindex in xrange(start, stop, step_depth): workdepth = float(wd) + depthindex Origin['depth'] = workdepth ev = Event(Origin['lat'], Origin['lon'], Origin['depth'], Origin['time'], strike=strike, dip=dip, rake=rake) Logfile.add('WORKDEPTH: ' + str(Origin['depth'])) networks = Config['networks'].split(',') ASL = [] weights = [] array_centers = [] counter = 1 stations_per_array = [] Wdfs = [] Wdfs_emp = [] FilterMetas = [] TTTgrids = OrderedDict() mints = [] maxts = [] refshifts = [] for i in networks: arrayname = i arrayfolder = os.path.join(Folder['semb'], arrayname) network = Config[i].split('|') Logfile.add('network: ' + str(network)) FilterMeta = ttt.filterStations(Meta, Config, Origin, network) W = XDict[i] refshift = RefDict[i] for j in range(0, len(FilterMeta)): if cfg.correct_shifts() is False: refshift = refshift * 0. refshifts.append(refshift) FilterMeta = cmpFilterMetavsXCORR(W, FilterMeta) Logfile.add('BOUNDING BOX DIMX: %s DIMY: %s GRIDSPACING:\ %s \n' % (Config['dimx'], Config['dimy'], Config['gridspacing'])) Logfile.red('Calculating Traveltime Grid') t1 = time.time() isParallel = False TTTGridMap = [] mint = [] maxt = [] ttt_model = cfg.Str('traveltime_model') try: if cfg.Bool('correct_shifts_empirical') is True: f = open( '../tttgrid/tttgrid%s_%s_%s_%s_%s_emp.pkl' % (phase, ttt_model, ev_emp.time, arrayname, workdepth), 'rb') print( "loading travel time grid%s_%s_%s_%s_%s_emp.pkl" % (phase, ttt_model, ev_emp.time, arrayname, workdepth)) TTTGridMap_emp, mint_emp, maxt_emp = pickle.load(f) f.close() f = open( '../tttgrid/tttgrid%s_%s_%s_%s_%s.pkl' % (phase, ttt_model, ev.time, arrayname, workdepth), 'rb') print( "loading travel time grid%s_%s_%s_%s_%s.pkl" % (phase, ttt_model, ev.time, arrayname, workdepth)) TTTGridMap, mint, maxt = pickle.load(f) f.close() print("loading of travel time grid sucessful") except Exception: print("loading of travel time grid unsucessful,\n \ will now calculate the grid:") if isParallel: maxp = 6 po = multiprocessing.Pool(maxp) for i in xrange(len(FilterMeta)): po.apply_async(ttt.calcTTTAdv, (Config, FilterMeta[i], Origin, i, arrayname, W, refshift)) po.close() po.join() else: for i in xrange(len(FilterMeta)): t1 = time.time() ttt.calcTTTAdv(Config, FilterMeta[i], Origin, i, arrayname, W, refshift, phase) Logfile.add('ttt.calcTTTAdv : ' + str(time.time() - t1) + ' sec.') assert len(FilterMeta) > 0 TTTGridMap = deserializer.deserializeTTT( len(FilterMeta)) mint, maxt = deserializer.deserializeMinTMaxT( len(FilterMeta)) f = open( '../tttgrid/tttgrid%s_%s_%s_%s_%s.pkl' % (phase, ttt_model, ev.time, arrayname, workdepth), 'wb') print("dumping the traveltime grid for this array") pickle.dump([TTTGridMap, mint, maxt], f) f.close() if cfg.Bool('correct_shifts_empirical') is True: ttt.calcTTTAdv(Config, FilterMeta[i], Origin_emp, i, arrayname, W, refshift, phase) assert len(FilterMeta) > 0 TTTGridMap_emp = deserializer.deserializeTTT( len(FilterMeta)) mint_emp, maxt_emp = deserializer.deserializeMinTMaxT( len(FilterMeta)) f = open( '../tttgrid/tttgrid%s_%s_%s_%s_%s_emp.pkl' % (phase, ttt_model, ev_emp.time, arrayname, workdepth), 'wb') print("dumping the traveltime grid for this array") pickle.dump([TTTGridMap_emp, mint_emp, maxt_emp], f) f.close() t2 = time.time() Logfile.red('%s took %0.3f s' % ('TTT', (t2 - t1))) switch = filterindex tw = times.calculateTimeWindows(mint, maxt, Config, ev, switch) if cfg.Bool('correct_shifts_empirical') is True: tw_emp = times.calculateTimeWindows( mint_emp, maxt_emp, Config, ev_emp, switch) if cfg.pyrocko_download() is True: if cfg.quantity() == 'displacement': Wd_emp = waveform.readWaveformsPyrocko_restituted( FilterMeta, tw, evpath, ev_emp, desired) elif cfg.Bool('synthetic_test') is True: Wd_emp = waveform.readWaveformsPyrockodummy( FilterMeta, tw_emp, evpath_emp, ev_emp) else: Wd_emp = waveform.readWaveformsPyrocko( FilterMeta, tw_emp, evpath_emp, ev_emp, desired) elif cfg.colesseo_input() is True: Wd_emp = waveform.readWaveforms_colesseo( FilterMeta, tw_emp, evpath_emp, ev_emp, C) else: Wd_emp = waveform.readWaveforms( FilterMeta, tw_emp, evpath_emp, ev_emp) if cfg.Bool('synthetic_test') is True\ or cfg.Bool('dynamic_filter') is True: Wdf_emp = waveform.processdummyWaveforms( Wd_emp, Config, Folder, arrayname, FilterMeta, ev_emp, switch, W) Wdfs_emp.extend(Wdf_emp) else: Wdf_emp = waveform.processWaveforms( Wd_emp, Config, Folder, arrayname, FilterMeta, ev_emp, switch, W) Wdfs_emp.extend(Wdf_emp) if cfg.pyrocko_download() is True: if cfg.quantity() == 'displacement': Wd = waveform.readWaveformsPyrocko_restituted( FilterMeta, tw, evpath, ev, desired) elif cfg.Bool('synthetic_test') is True: Wd = waveform.readWaveformsPyrockodummy( FilterMeta, tw, evpath, ev) else: Wd = waveform.readWaveformsPyrocko( FilterMeta, tw, evpath, ev, desired) elif cfg.colesseo_input() is True: Wd = waveform.readWaveforms_colesseo( FilterMeta, tw, evpath, ev, C) else: Wd = waveform.readWaveforms(FilterMeta, tw, evpath, ev) if cfg.Bool('synthetic_test') is True\ or cfg.Bool('dynamic_filter') is True: Wdf = waveform.processdummyWaveforms( Wd, Config, Folder, arrayname, FilterMeta, ev, switch, W) Wdfs.extend(Wdf) else: Wdf = waveform.processWaveforms( Wd, Config, Folder, arrayname, FilterMeta, ev, switch, W) Wdfs.extend(Wdf) C.writeStationFile(FilterMeta, Folder, counter) Logfile.red('%d Streams added for Processing' % (len(Wd))) t1 = time.time() f = open( '../tttgrid/tttgrid%s_%s_%s_%s_%s.pkl' % (phase, ttt_model, ev.time, arrayname, workdepth), 'rb') TTTGridMap, mint, maxt = pickle.load(f) f.close() if switch == 0: step = cfg.step() if switch == 1: step = cfg.step_f2() if cfg.UInt('forerun') > 0: ntimes = int( (cfg.UInt('forerun') + cfg.UInt('duration')) / step) else: ntimes = int((cfg.UInt('duration')) / step) if cfg.Bool('combine_all') is False: if cfg.optimize() is True: optim.solve(counter, Config, Wdf, FilterMeta, mint, maxt, TTTGridMap, Folder, Origin, ntimes, switch, ev, arrayfolder, syn_in, refshifts, phase, rpe + str(arrayname), flag_rpe) else: if cfg.Bool('correct_shifts_empirical') is True: if cfg.Bool('correct_shifts_empirical_run' ) is True: f = open( '../tttgrid/tttgrid%s_%s_%s_%s_%s_emp.pkl' % (phase, ttt_model, ev_emp.time, arrayname, workdepth), 'rb') TTTGridMap_emp, mint_emp, maxt_emp = pickle.load( f) f.close() flag_rpe = True arraySemb, weight, array_center = sembCalc.doCalc( counter, Config, Wdf_emp, FilterMeta, mint, maxt, TTTGridMap_emp, Folder, Origin, ntimes, switch, ev_emp, arrayfolder, syn_in, refshifts, phase, rpe + str(arrayname), flag_rpe) if sys.version_info.major >= 3: f = open(rpe + str(arrayname), 'rb') else: f = open(rpe + str(arrayname)) RefDict_empirical = pickle.load(f) refshifts = RefDict_empirical for j in range(0, len(FilterMeta)): if cfg.correct_shifts() is False: refshifts[j] = refshifts[j] * 0. flag_rpe = False arraySemb, weight, array_center = sembCalc.doCalc( counter, Config, Wdf, FilterMeta, mint, maxt, TTTGridMap, Folder, Origin, ntimes, switch, ev, arrayfolder, syn_in, refshifts, phase, rpe + str(arrayname), flag_rpe) weights.append(weight) array_centers.append(array_center) ASL.append(arraySemb) sembCalc.writeSembMatricesSingleArray( arraySemb, Config, Origin, arrayfolder, ntimes, switch, phase) fileName = os.path.join(arrayfolder, 'stations.txt') Logfile.add('Write to file ' + fileName) fobjarraynetwork = open(fileName, 'w') for i in FilterMeta: fobjarraynetwork.write( ('%s %s %s\n') % (i.getName(), i.lat, i.lon)) fobjarraynetwork.close() t2 = time.time() Logfile.add('CALC took %0.3f sec' % (t2 - t1)) counter += 1 stations_per_array.append(len(FilterMeta)) TTTgrids.update(TTTGridMap) mints.append(mint) maxts.append(maxt) FilterMetas[len(FilterMetas):] = FilterMeta TTTGridMap = [] if cfg.Bool('combine_all') is True: if cfg.pyrocko_download() is True: if cfg.Bool('synthetic_test') is True: Wd = waveform.readWaveformsPyrockodummy( FilterMetas, tw, evpath, ev) else: if cfg.quantity() == 'displacement': Wd = waveform.readWaveformsPyrocko_restituted( FilterMetas, tw, evpath, ev, desired) else: Wd = waveform.readWaveformsPyrocko( FilterMetas, tw, evpath, ev, desired) elif cfg.colesseo_input() is True: Wd = waveform.readWaveforms_colesseo( FilterMetas, tw, evpath, ev, C) else: Wd = waveform.readWaveforms(FilterMetas, tw, evpath, ev) if cfg.Bool('synthetic_test') is True: Wdf = waveform.processdummyWaveforms( Wd, Config, Folder, arrayname, FilterMetas, ev, switch, W) else: Wdf = waveform.processWaveforms( Wd, Config, Folder, arrayname, FilterMetas, ev, switch, W) mint = num.min(mints) maxt = num.max(maxts) flag_rpe = False if cfg.Bool('bootstrap_array_weights') is False: arraySemb, weight, array_center = sembCalc.doCalc( counter, Config, Wdf, FilterMetas, mint, maxt, TTTgrids, Folder, Origin, ntimes, switch, ev, arrayfolder, syn_in, refshifts_global, phase, rpe + str(arrayname), flag_rpe) ASL.append(arraySemb) weights.append(weight) array_centers.append(array_center) sembCalc.writeSembMatricesSingleArray( arraySemb, Config, Origin, arrayfolder, ntimes, switch, phase) else: nboot = cfg.Int('n_bootstrap') tmp_general = 1 for ibootstrap in range(nboot): f = rstate.uniform(0., 1., size=counter + 1) f = num.sort(f) g = f[1:] - f[:-1] k = 0 ws = [] for wss in range(0, counter - 1): for stats in range(0, stations_per_array[k]): ws.append(g[k]) k = +1 ws = num.asarray(ws) arraySemb, weight, array_center = sembCalc.doCalc( counter, Config, Wdf, FilterMetas, mint, maxt, TTTgrids, Folder, Origin, ntimes, switch, ev, arrayfolder, syn_in, refshifts_global, phase, rpe + str(arrayname), flag_rpe, bs_weights=ws) ASL.append(arraySemb) weights.append(weight) array_centers.append(array_center) sembCalc.writeSembMatricesSingleArray( arraySemb, Config, Origin, arrayfolder, ntimes, switch, phase, bootstrap=ibootstrap) if ASL: Logfile.red('collect semblance matrices from\ all arrays') sembmax, tmp = sembCalc.collectSemb( ASL, Config, Origin, Folder, ntimes, len(networks), switch, array_centers, phase, cboot=ibootstrap) tmp_general *= tmp ASL = [] sembmax, tmp = sembCalc.collectSemb( ASL, Config, Origin, Folder, ntimes, len(networks), switch, array_centers, phase, cboot=None, temp_comb=tmp_general) if cfg.optimize_all() is True: import optim_csemb sembmax, tmp = sembCalc.collectSemb( ASL, Config, Origin, Folder, ntimes, len(networks), switch) optim_csemb.solve(counter, Config, Wdf, FilterMeta, mint, maxt, TTTGridMap, Folder, Origin, ntimes, switch, ev, arrayfolder, syn_in, ASL, sembmax, evpath, XDict, RefDict, workdepth, filterindex, Wdfs) if ASL and cfg.Bool('bootstrap_array_weights') is False: Logfile.red('collect semblance matrices from all arrays') sembmax, tmp = sembCalc.collectSemb( ASL, Config, Origin, Folder, ntimes, len(networks), switch, array_centers, phase) if cfg.Bool('weight_by_noise') is True: sembCalc.collectSembweighted(ASL, Config, Origin, Folder, ntimes, len(networks), switch, weights) else: Logfile.red('Nothing to do -> Finish') print("last work depth:") print(workdepth)
def writeSembMatricesSingleArray(SembList, Config, Origin, arrayfolder, ntimes, switch): ''' method to write semblance matrizes from one processes to file for each timestep ''' logger.info('start write semblance matrices') cfg = ConfigObj(dict=Config) origin = OriginCfg(Origin) dimX = cfg.dimX() # ('dimx') dimY = cfg.dimY() # ('dimy') winlen = cfg.winlen() # ('winlen') step = cfg.step() # ('step') latv = [] lonv = [] gridspacing = cfg.Float('gridspacing') migpoints = dimX * dimY o_lat = origin.lat() # float (Origin['lat']) o_lon = origin.lon() # float (Origin['lon']) oLatul = 0 oLonul = 0 z = 0 for i in xrange(dimX): oLatul = o_lat - ((dimX - 1) / 2) * gridspacing + i * gridspacing if z == 0 and i == 0: Latul = oLatul o = 0 for j in xrange(dimY): oLonul = o_lon - ((dimY - 1) / 2) * gridspacing + j * gridspacing if o == 0 and j == 0: Lonul = oLonul latv.append(oLatul) lonv.append(oLonul) #endfor rc = UTCDateTime(Origin['time']) rcs = '%s-%s-%s_%02d:%02d:%02d' % (rc.day, rc.month, rc.year, rc.hour, rc.minute, rc.second) d = rc.timestamp for a, i in enumerate(SembList): #logger.info('timestep %d' % a) fobj = open( os.path.join(arrayfolder, '%s-%s_%03d.ASC' % (switch, Origin['depth'], a)), 'w') fobj.write('# %s , %s\n' % (d, rcs)) fobj.write('# step %ds| ntimes %d| winlen: %ds\n' % (step, ntimes, winlen)) fobj.write('# \n') fobj.write('# southwestlat: %.2f dlat: %f nlat: %f \n' % (Latul, gridspacing, dimX)) fobj.write('# southwestlon: %.2f dlon: %f nlon: %f \n' % (Lonul, gridspacing, dimY)) fobj.write('# ddepth: 0 ndepth: 1 \n') for j in range(migpoints): x = latv[j] y = lonv[j] z = origin.depth() # float(Origin['depth']) semb = i[j] fobj.write('%.2f %.2f %.2f %.20f\n' % (x, y, z, semb)) #endfor fobj.close()
def collectSembweighted(SembList, Config, Origin, Folder, ntimes, arrays, switch, weights): ''' method to collect semblance matrizes from all processes and write them to file for each timestep ''' Logfile.add('start collect in collectSemb') cfg = ConfigObj(dict=Config) origin = ConfigObj(dict=Origin) dimX = cfg.dimX() # ('dimx') dimY = cfg.dimY() # ('dimy') winlen = cfg.winlen() # ('winlen') step = cfg.step() # ('step') latv = [] lonv = [] gridspacing = cfg.Float('gridspacing') migpoints = dimX * dimY o_lat = origin.lat() # float (Origin['lat']) o_lon = origin.lon() # float (Origin['lon']) oLatul = 0 oLonul = 0 z = 0 for i in xrange(dimX): oLatul = o_lat - ((dimX - 1) / 2) * gridspacing + i * gridspacing if z == 0 and i == 0: Latul = oLatul o = 0 for j in xrange(dimY): oLonul = o_lon - ((dimY - 1) / 2) * gridspacing + j * gridspacing if o == 0 and j == 0: Lonul = oLonul latv.append(oLatul) lonv.append(oLonul) tmp = 1 for a, w in zip(SembList, weights): tmp *= a #sys.exit() sembmaxvaluev = num.ndarray(ntimes, dtype=float) sembmaxlatv = num.ndarray(ntimes, dtype=float) sembmaxlonv = num.ndarray(ntimes, dtype=float) rc = UTCDateTime(Origin['time']) rcs = '%s-%s-%s_%02d:%02d:%02d' % (rc.day, rc.month, rc.year, rc.hour, rc.minute, rc.second) d = rc.timestamp usedarrays = 5 folder = Folder['semb'] fobjsembmax = open(os.path.join(folder, 'sembmax_%s.txt' % (switch)), 'w') for a, i in enumerate(tmp): logger.info('timestep %d' % a) fobj = open( os.path.join( folder, '%s-%s_%03d._weighted_semblance.ASC' % (switch, Origin['depth'], a)), 'w') #fobj = open (os.path.join (folder, '%03d.ASC' % a),'w') fobj.write('# %s , %s\n' % (d, rcs)) fobj.write('# step %ds| ntimes %d| winlen: %ds\n' % (step, ntimes, winlen)) fobj.write('# \n') fobj.write('# southwestlat: %.2f dlat: %f nlat: %f \n' % (Latul, gridspacing, dimX)) fobj.write('# southwestlon: %.2f dlon: %f nlon: %f \n' % (Lonul, gridspacing, dimY)) fobj.write('# ddepth: 0 ndepth: 1 \n') sembmax = 0 sembmaxX = 0 sembmaxY = 0 origin = DataTypes.dictToLocation(Origin) uncert = num.std(i) #maybe not std? for j in range(migpoints): x = latv[j] y = lonv[j] semb = i[j] fobj.write('%.2f %.2f %.20f\n' % (x, y, semb)) if semb > sembmax: sembmax = semb # search for maximum and position of maximum on semblance grid for given time step sembmaxX = x sembmaxY = y delta = loc2degrees(Location(sembmaxX, sembmaxY), origin) azi = toAzimuth(float(Origin['lat']), float(Origin['lon']), float(sembmaxX), float(sembmaxY)) sembmaxvaluev[a] = sembmax sembmaxlatv[a] = sembmaxX sembmaxlonv[a] = sembmaxY fobjsembmax.write('%d %.2f %.2f %.20f %.20f %d %03f %f %03f\n' % (a * step, sembmaxX, sembmaxY, sembmax, uncert, usedarrays, delta, float(azi), delta * 119.19)) fobj.close() fobjsembmax.close() durationpath = os.path.join(folder, "duration.txt") trigger.writeSembMaxValue(sembmaxvaluev, sembmaxlatv, sembmaxlonv, ntimes, Config, Folder) print 'DD2: ', durationpath trigger.semblancestalta(sembmaxvaluev, sembmaxlatv, sembmaxlonv)
def optimization(*params, **args): counter = params[1] Config = params[2] Wdf = params[3] FilterMeta = params[4] mint = params[5] maxt = params[6] TTTGridMap = params[7] Folder = params[8] Origin = params[9] ntimes = params[10] switch = params[11] ev = params[12] arrayfolder = params[13] syn_in = params[14] data = params[15] evpath = params[16] XDict = params[17] RefDict = params[18] workdepth = params[19] filterindex = params[20] Wdfs = params[21] networks = Config['networks'].split(',') params = num.asarray(params) parameter = num.ndarray.tolist(params) ASL_syn = [] C = config.Config(evpath) Config = C.parseConfig('config') cfg = ConfigObj(dict=Config) if cfg.pyrocko_download() == True: Meta = C.readpyrockostations() # elif cfg.colesseo_input() == True: scenario = guts.load(filename=cfg.colosseo_scenario_yml()) scenario_path = cfg.colosseo_scenario_yml()[:-12] Meta = C.readcolosseostations(scenario_path) else: Meta = C.readMetaInfoFile() l = 0 for i in networks: arrayname = i arrayfolder = os.path.join(Folder['semb'], arrayname) network = Config[i].split('|') FilterMeta = ttt.filterStations(Meta, Config, Origin, network) if len(FilterMeta) < 3: continue W = XDict[i] refshift = RefDict[i] FilterMeta = cmpFilterMetavsXCORR(W, FilterMeta) Logfile.add('BOUNDING BOX DIMX: %s DIMY: %s GRIDSPACING: %s \n' % (Config['dimx'], Config['dimy'], Config['gridspacing'])) f = open( '../tttgrid/tttgrid_%s_%s_%s.pkl' % (ev.time, arrayname, workdepth), 'rb') TTTGridMap, mint, maxt = pickle.load(f) f.close() switch = filterindex tw = times.calculateTimeWindows(mint, maxt, Config, ev, switch) Wdf = Wdfs[l] semb_syn = doCalc_syn(counter, Config, Wdf, FilterMeta, mint, maxt, TTTGridMap, Folder, Origin, ntimes, switch, ev, arrayfolder, syn_in, parameter[0]) ASL_syn.append(semb_syn) counter += 1 l += 1 sembmax_syn = sembCalc.collectSemb(ASL_syn, Config, Origin, Folder, ntimes, len(networks), switch) misfit_list = [] # init a list for a all the singular misfits norm_list = [] # init a list for a all the singular normalizations taper = trace.CosFader( xfade=2.0) # Cosine taper with fade in and out of 2s. bw_filter = trace.ButterworthResponse( corner=0.000055, # in Hz order=4, type='high') # "low"pass or "high"pass setup = trace.MisfitSetup( description='Misfit Setup', norm=2, # L1 or L2 norm taper=taper, filter=bw_filter, domain='time_domain') nsamples = len(data) tmin = util.str_to_time('2010-02-20 15:15:30.100') tr = trace.Trace(station='TEST', channel='Z', deltat=0.5, tmin=tmin, ydata=data) syn = trace.Trace(station='TEST', channel='Z', deltat=0.5, tmin=tmin, ydata=sembmax_syn) misfit, norm = tr.misfit( candidate=syn, setup=setup ) # calculate the misfit of a single observed trace with its synthetics # with the setup from above misfit_list.append(misfit), norm_list.append( norm) # append the misfit into a list global_misfit_normed = num.sqrt( num.nansum((num.asarray(misfit_list))**2) / # sum all the misfits and normalize to get a single minimizable value num.nansum((num.asarray(norm_list))**2)) return global_misfit_normed
def doCalc(flag, Config, WaveformDict, FilterMetaData, Gmint, Gmaxt, TTTGridMap, Folder, Origin, ntimes, switch, ev, arrayfolder, syn_in): ''' method for calculating semblance of one station array ''' Logfile.add('PROCESS %d %s' % (flag, ' Enters Semblance Calculation')) Logfile.add('MINT : %f MAXT: %f Traveltime' % (Gmint, Gmaxt)) cfg = ConfigObj(dict=Config) cfg_f = FilterCfg(Config) timeev = util.str_to_time(ev.time) dimX = cfg.dimX() #('dimx') dimY = cfg.dimY() #('dimy') winlen = cfg.winlen() #('winlen') step = cfg.step() #('step') new_frequence = cfg.newFrequency() #('new_frequence') forerun = cfg.Int('forerun') duration = cfg.Int('duration') nostat = len(WaveformDict) traveltimes = {} recordstarttime = '' minSampleCount = 999999999 ntimes = int((forerun + duration) / step) nsamp = int(winlen * new_frequence) nstep = int(step * new_frequence) from pyrocko import obspy_compat from pyrocko import model obspy_compat.plant() ############################################################################ calcStreamMap = WaveformDict stations = [] py_trs = [] lats = [] lons = [] for trace in calcStreamMap.iterkeys(): py_tr = obspy_compat.to_pyrocko_trace(calcStreamMap[trace]) py_trs.append(py_tr) for il in FilterMetaData: if str(il) == str(trace): szo = model.Station(lat=float(il.lat), lon=float(il.lon), station=il.sta, network=il.net, channels=py_tr.channel, elevation=il.ele, location=il.loc) stations.append(szo) lats.append(float(il.lat)) lons.append(float(il.lon)) array_center = [num.mean(lats), num.mean(lons)] #==================================synthetic BeamForming====================== if cfg.Bool('synthetic_test') is True: store_id = syn_in.store() engine = LocalEngine(store_superdirs=[syn_in.store_superdirs()]) recordstarttimes = [] for tracex in calcStreamMap.iterkeys(): recordstarttimes.append( calcStreamMap[tracex].stats.starttime.timestamp) tr_org = obspy_compat.to_pyrocko_trace(calcStreamMap[tracex]) tmin = tr_org.tmin #tmin= num.min(recordstarttimes) targets = [] sources = [] for st in stations: target = Target(lat=st.lat, lon=st.lon, store_id=store_id, codes=(st.network, st.station, st.location, 'BHZ'), tmin=-6900, tmax=6900, interpolation='multilinear', quantity=cfg.quantity()) targets.append(target) if syn_in.nsources() == 1: if syn_in.use_specific_stf() is True: stf = syn_in.stf() exec(stf) else: stf = STF() if syn_in.source() == 'RectangularSource': sources.append( RectangularSource( lat=float(syn_in.lat_0()), lon=float(syn_in.lon_0()), east_shift=float(syn_in.east_shift_0()) * 1000., north_shift=float(syn_in.north_shift_0()) * 1000., depth=syn_in.depth_syn_0() * 1000., strike=syn_in.strike_0(), dip=syn_in.dip_0(), rake=syn_in.rake_0(), width=syn_in.width_0() * 1000., length=syn_in.length_0() * 1000., nucleation_x=syn_in.nucleation_x_0(), slip=syn_in.slip_0(), nucleation_y=syn_in.nucleation_y_0(), stf=stf, time=util.str_to_time(syn_in.time_0()))) if syn_in.source() == 'DCSource': sources.append( DCSource(lat=float(syn_in.lat_0()), lon=float(syn_in.lon_0()), east_shift=float(syn_in.east_shift_0()) * 1000., north_shift=float(syn_in.north_shift_0()) * 1000., depth=syn_in.depth_syn_0() * 1000., strike=syn_in.strike_0(), dip=syn_in.dip_0(), rake=syn_in.rake_0(), stf=stf, time=util.str_to_time(syn_in.time_0()), magnitude=syn_in.magnitude_0())) else: for i in range(syn_in.nsources()): if syn_in.use_specific_stf() is True: stf = syn_in.stf() exec(stf) else: stf = STF() if syn_in.source() == 'RectangularSource': sources.append( RectangularSource( lat=float(syn_in.lat_1(i)), lon=float(syn_in.lon_1(i)), east_shift=float(syn_in.east_shift_1(i)) * 1000., north_shift=float(syn_in.north_shift_1(i)) * 1000., depth=syn_in.depth_syn_1(i) * 1000., strike=syn_in.strike_1(i), dip=syn_in.dip_1(i), rake=syn_in.rake_1(i), width=syn_in.width_1(i) * 1000., length=syn_in.length_1(i) * 1000., nucleation_x=syn_in.nucleation_x_1(i), slip=syn_in.slip_1(i), nucleation_y=syn_in.nucleation_y_1(i), stf=stf, time=util.str_to_time(syn_in.time_1(i)))) if syn_in.source() == 'DCSource': sources.append( DCSource( lat=float(syn_in.lat_1(i)), lon=float(syn_in.lon_1(i)), east_shift=float(syn_in.east_shift_1(i)) * 1000., north_shift=float(syn_in.north_shift_1(i)) * 1000., depth=syn_in.depth_syn_1(i) * 1000., strike=syn_in.strike_1(i), dip=syn_in.dip_1(i), rake=syn_in.rake_1(i), stf=stf, time=util.str_to_time(syn_in.time_1(i)), magnitude=syn_in.magnitude_1(i))) #source = CombiSource(subsources=sources) synthetic_traces = [] for source in sources: response = engine.process(source, targets) synthetic_traces_source = response.pyrocko_traces() if not synthetic_traces: synthetic_traces = synthetic_traces_source else: for trsource, tr in zip(synthetic_traces_source, synthetic_traces): tr.add(trsource) from pyrocko import trace as trld #trld.snuffle(synthetic_traces) timeev = util.str_to_time(syn_in.time_0()) if cfg.Bool('synthetic_test_add_noise') is True: from noise_addition import add_noise trs_orgs = [] calcStreamMapsyn = calcStreamMap.copy() #from pyrocko import trace for tracex in calcStreamMapsyn.iterkeys(): for trl in synthetic_traces: if str(trl.name()[4:12]) == str(tracex[4:]) or str( trl.name()[3:13]) == str(tracex[3:]) or str( trl.name()[3:11]) == str(tracex[3:]) or str( trl.name()[3:14]) == str(tracex[3:]): tr_org = obspy_compat.to_pyrocko_trace( calcStreamMapsyn[tracex]) tr_org.downsample_to(2.0) trs_orgs.append(tr_org) store_id = syn_in.store() engine = LocalEngine(store_superdirs=[syn_in.store_superdirs()]) synthetic_traces = add_noise(trs_orgs, engine, source.pyrocko_event(), stations, store_id, phase_def='P') trs_org = [] trs_orgs = [] from pyrocko import trace fobj = os.path.join(arrayfolder, 'shift.dat') calcStreamMapsyn = calcStreamMap.copy() for tracex in calcStreamMapsyn.iterkeys(): for trl in synthetic_traces: if str(trl.name()[4:12]) == str(tracex[4:]) or str( trl.name()[3:13]) == str(tracex[3:]) or str( trl.name()[3:11]) == str(tracex[3:]) or str( trl.name()[3:14]) == str(tracex[3:]): mod = trl recordstarttime = calcStreamMapsyn[ tracex].stats.starttime.timestamp recordendtime = calcStreamMapsyn[ tracex].stats.endtime.timestamp tr_org = obspy_compat.to_pyrocko_trace( calcStreamMapsyn[tracex]) if switch == 0: tr_org.bandpass(4, cfg_f.flo(), cfg_f.fhi()) elif switch == 1: tr_org.bandpass(4, cfg_f.flo2(), cfg_f.fhi2()) trs_orgs.append(tr_org) tr_org_add = mod.chop(recordstarttime, recordendtime, inplace=False) synthetic_obs_tr = obspy_compat.to_obspy_trace(tr_org_add) calcStreamMapsyn[tracex] = synthetic_obs_tr trs_org.append(tr_org_add) calcStreamMap = calcStreamMapsyn if cfg.Bool('shift_by_phase_pws') == True: calcStreamMapshifted = calcStreamMap.copy() from obspy.core import stream stream = stream.Stream() for trace in calcStreamMapshifted.iterkeys(): stream.append(calcStreamMapshifted[trace]) pws_stack = PWS_stack([stream], weight=2, normalize=True) for tr in pws_stack: for trace in calcStreamMapshifted.iterkeys(): calcStreamMapshifted[trace] = tr calcStreamMap = calcStreamMapshifted if cfg.Bool('shift_by_phase_cc') is True: from stacking import align_traces calcStreamMapshifted = calcStreamMap.copy() list_tr = [] for trace in calcStreamMapshifted.iterkeys(): tr_org = calcStreamMapshifted[trace] list_tr.append(tr_org) shifts, ccs = align_traces(list_tr, 10, master=False) for shift in shifts: for trace in calcStreamMapshifted.iterkeys(): tr_org = obspy_compat.to_pyrocko_trace( calcStreamMapshifted[trace]) tr_org.shift(shift) shifted = obspy_compat.to_obspy_trace(tr_org) calcStreamMapshifted[trace] = shifted calcStreamMap = calcStreamMapshifted if cfg.Bool('shift_by_phase_onset') is True: pjoin = os.path.join timeev = util.str_to_time(ev.time) trs_orgs = [] calcStreamMapshifted = calcStreamMap.copy() for trace in calcStreamMapshifted.iterkeys(): tr_org = obspy_compat.to_pyrocko_trace(calcStreamMapshifted[trace]) trs_orgs.append(tr_org) timing = CakeTiming( phase_selection='first(p|P|PP|P(cmb)P(icb)P(icb)p(cmb)p)-20', fallback_time=100.) traces = trs_orgs event = model.Event(lat=float(ev.lat), lon=float(ev.lon), depth=ev.depth * 1000., time=timeev) directory = arrayfolder bf = BeamForming(stations, traces, normalize=True) shifted_traces = bf.process(event=event, timing=timing, fn_dump_center=pjoin( directory, 'array_center.pf'), fn_beam=pjoin(directory, 'beam.mseed')) i = 0 store_id = syn_in.store() engine = LocalEngine(store_superdirs=[syn_in.store_superdirs()]) for tracex in calcStreamMapshifted.iterkeys(): for trl in shifted_traces: if str(trl.name()[4:12]) == str(tracex[4:]) or str( trl.name()[3:13]) == str(tracex[3:]) or str( trl.name()[3:11]) == str(tracex[3:]) or str( trl.name()[3:14]) == str(tracex[3:]): mod = trl recordstarttime = calcStreamMapshifted[ tracex].stats.starttime.timestamp recordendtime = calcStreamMapshifted[ tracex].stats.endtime.timestamp tr_org = obspy_compat.to_pyrocko_trace( calcStreamMapshifted[tracex]) tr_org_add = mod.chop(recordstarttime, recordendtime, inplace=False) shifted_obs_tr = obspy_compat.to_obspy_trace(tr_org_add) calcStreamMapshifted[tracex] = shifted_obs_tr calcStreamMap = calcStreamMapshifted weight = 1. if cfg.Bool('weight_by_noise') is True: from noise_analyser import analyse pjoin = os.path.join timeev = util.str_to_time(ev.time) trs_orgs = [] calcStreamMapshifted = calcStreamMap.copy() for trace in calcStreamMapshifted.iterkeys(): tr_org = obspy_compat.to_pyrocko_trace(calcStreamMapshifted[trace]) trs_orgs.append(tr_org) timing = CakeTiming( phase_selection='first(p|P|PP|P(cmb)P(icb)P(icb)p(cmb)p)-20', fallback_time=100.) traces = trs_orgs event = model.Event(lat=float(ev.lat), lon=float(ev.lon), depth=ev.depth * 1000., time=timeev) directory = arrayfolder bf = BeamForming(stations, traces, normalize=True) shifted_traces = bf.process(event=event, timing=timing, fn_dump_center=pjoin( directory, 'array_center.pf'), fn_beam=pjoin(directory, 'beam.mseed')) i = 0 store_id = syn_in.store() engine = LocalEngine(store_superdirs=[syn_in.store_superdirs()]) weight = analyse(shifted_traces, engine, event, stations, 100., store_id, nwindows=1, check_events=True, phase_def='P') if cfg.Bool('array_response') is True: from obspy.signal import array_analysis from obspy.core import stream ntimesr = int((forerun + duration) / step) nsampr = int(winlen) nstepr = int(step) sll_x = -3.0 slm_x = 3.0 sll_y = -3.0 slm_y = 3.0 sl_s = 0.03, # sliding window properties # frequency properties frqlow = 1.0, frqhigh = 8.0 prewhiten = 0 # restrict output semb_thres = -1e9 vel_thres = -1e9 stime = stime etime = etime stream_arr = stream.Stream() for trace in calcStreamMapshifted.iterkeys(): stream_arr.append(calcStreamMapshifted[trace]) results = array_analysis.array_processing(stream_arr, nsamp, nstep,\ sll_x, slm_x, sll_y, slm_y,\ sl_s, semb_thres, vel_thres, \ frqlow, frqhigh, stime, \ etime, prewhiten) timestemp = results[0] relative_relpow = results[1] absolute_relpow = results[2] for trace in calcStreamMap.iterkeys(): recordstarttime = calcStreamMap[trace].stats.starttime d = calcStreamMap[trace].stats.starttime d = d.timestamp if calcStreamMap[trace].stats.npts < minSampleCount: minSampleCount = calcStreamMap[trace].stats.npts ########################################################################### traces = num.ndarray(shape=(len(calcStreamMap), minSampleCount), dtype=float) traveltime = num.ndarray(shape=(len(calcStreamMap), dimX * dimY), dtype=float) latv = num.ndarray(dimX * dimY, dtype=float) lonv = num.ndarray(dimX * dimY, dtype=float) ########################################################################### c = 0 streamCounter = 0 for key in calcStreamMap.iterkeys(): streamID = key c2 = 0 for o in calcStreamMap[key]: if c2 < minSampleCount: traces[c][c2] = o c2 += 1 for key in TTTGridMap.iterkeys(): if streamID == key: traveltimes[streamCounter] = TTTGridMap[key] else: "NEIN", streamID, key if not streamCounter in traveltimes: continue #hs : thread crashed before g = traveltimes[streamCounter] dimZ = g.dimZ mint = g.mint gridElem = g.GridArray for x in range(dimX): for y in range(dimY): elem = gridElem[x, y] traveltime[c][x * dimY + y] = elem.tt latv[x * dimY + y] = elem.lat lonv[x * dimY + y] = elem.lon #endfor c += 1 streamCounter += 1 #endfor ################ CALCULATE PARAMETER FOR SEMBLANCE CALCULATION ######## nsamp = winlen * new_frequence nstep = step * new_frequence migpoints = dimX * dimY dimZ = 0 maxp = int(Config['ncore']) Logfile.add('PROCESS %d NTIMES: %d' % (flag, ntimes)) if False: print('nostat ', nostat, type(nostat)) print('nsamp ', nsamp, type(nsamp)) print('ntimes ', ntimes, type(ntimes)) print('nstep ', nstep, type(nstep)) print('dimX ', dimX, type(dimX)) print('dimY ', dimY, type(dimY)) print('mint ', Gmint, type(mint)) print('new_freq ', new_frequence, type(new_frequence)) print('minSampleCount ', minSampleCount, type(minSampleCount)) print('latv ', latv, type(latv)) print('traces', traces, type(traces)) #===================compressed sensing================================= try: cs = cfg.cs() except: cs = 0 if cs == 1: csmaxvaluev = num.ndarray(ntimes, dtype=float) csmaxlatv = num.ndarray(ntimes, dtype=float) csmaxlonv = num.ndarray(ntimes, dtype=float) folder = Folder['semb'] fobjcsmax = open(os.path.join(folder, 'csmax_%s.txt' % (switch)), 'w') traveltimes = traveltime.reshape(1, nostat * dimX * dimY) traveltime2 = toMatrix(traveltimes, dimX * dimY) # for relstart traveltime = traveltime.reshape(dimX * dimY, nostat) import matplotlib as mpl import scipy.optimize as spopt import scipy.fftpack as spfft import scipy.ndimage as spimg import cvxpy as cvx import matplotlib.pyplot as plt A = spfft.idct(traveltime, norm='ortho', axis=0) n = (nostat * dimX * dimY) vx = cvx.Variable(dimX * dimY) res = cvx.Variable(1) objective = cvx.Minimize(cvx.norm(res, 1)) back2 = num.zeros([dimX, dimY]) l = int(nsamp) fobj = open( os.path.join(folder, '%s-%s_%03d.cs' % (switch, Origin['depth'], l)), 'w') for i in range(ntimes): ydata = [] try: for tr in traces: relstart = int((dimX * dimY - mint) * new_frequence + 0.5) + i * nstep tr = spfft.idct(tr[relstart + i:relstart + i + dimX * dimY], norm='ortho', axis=0) ydata.append(tr) ydata = num.asarray(ydata) ydata = ydata.reshape(dimX * dimY, nostat) constraints = [ res == cvx.sum_entries(0 + num.sum([ ydata[:, x] - A[:, x] * vx for x in range(nostat) ])) ] prob = cvx.Problem(objective, constraints) result = prob.solve(verbose=False, max_iters=200) x = num.array(vx.value) x = num.squeeze(x) back1 = x.reshape(dimX, dimY) sig = spfft.idct(x, norm='ortho', axis=0) back2 = back2 + back1 xs = num.array(res.value) xs = num.squeeze(xs) max_cs = num.max(back1) idx = num.where(back1 == back1.max()) csmaxvaluev[i] = max_cs csmaxlatv[i] = latv[idx[0]] csmaxlonv[i] = lonv[idx[1]] fobj.write('%.5f %.5f %.20f\n' % (latv[idx[0]], lonv[idx[1]], max_cs)) fobjcsmax.write('%.5f %.5f %.20f\n' % (latv[idx[0]], lonv[idx[1]], max_cs)) fobj.close() fobjcsmax.close() except: pass #==================================semblance calculation======================================== t1 = time.time() traces = traces.reshape(1, nostat * minSampleCount) traveltimes = traveltime.reshape(1, nostat * dimX * dimY) USE_C_CODE = False #try: if USE_C_CODE: import Cm import CTrig start_time = time.time() k = Cm.otest(maxp, nostat, nsamp, ntimes, nstep, dimX, dimY, Gmint, new_frequence, minSampleCount, latv, lonv, traveltimes, traces) print("--- %s seconds ---" % (time.time() - start_time)) else: start_time = time.time() ntimes = int((forerun + duration) / step) nsamp = int(winlen) nstep = int(step) Gmint = cfg.Int('forerun') k = otest(maxp, nostat, nsamp, ntimes, nstep, dimX, dimY, Gmint, new_frequence, minSampleCount, latv, lonv, traveltimes, traces, calcStreamMap, timeev) print("--- %s seconds ---" % (time.time() - start_time)) #except ValueError: # k = Cm.otest(maxp,nostat,nsamp,ntimes,nstep,dimX,dimY,Gmint,new_frequence, # minSampleCount,latv,lonv,traveltimes,traces) # print "loaded tttgrid has probably wrong dimensions or stations,\ # delete ttgrid or exchange is recommended" t2 = time.time() Logfile.add('%s took %0.3f s' % ('CALC:', (t2 - t1))) partSemb = k partSemb = partSemb.reshape(ntimes, migpoints) return partSemb, weight, array_center