def filterclusterStationMinimumNumber(CentroidList, StationclusterList, Config): newCentroidList = [] newStationclusterList = [] for i in CentroidList: counter = 0 for j in StationclusterList: if i.rank == j.member: counter += 1 streamID = j.net + '.' + j.sta + '.' + j.loc + '.' + j.comp if counter < int(Config['minclusterstation']): s1 = 'OUT' else: s1 = 'IN ' newCentroidList.append(i) Logfile.red('Centroid %s %d %s %.2f %5.2f' % (i.rank, counter, s1, i.lat, i.lon)) for i in newCentroidList: for j in StationclusterList: if i.rank == j.member: newStationclusterList.append(j) return newStationclusterList, newCentroidList
def filterWaveform(self, Waveform, cfg): Logfile.red('Filter Waveform: ') new_frequence = (cfg.config_filter.newFrequency) st = Stream() for i in Waveform: Logfile.red('Downsampling to %s: from %d' % (new_frequence, i.stats.sampling_rate)) j = i.resample(new_frequence) switch = cfg.config_filter.filterswitch Logfile.add('bandpass filtered \ stream for station %s ' % (i)) j.filter('bandpass', freqmin=cfg.config_filter.flo[switch-1], freqmax=cfg.config_filter.fhi[switch-1], corners=cfg.config_filter.ns[switch-1], zerophase=False) st.append(j) return st
def createFolder(EventPath): Folder = {} Logfile.red('Create working environment') Folder['cluster'] = os.path.join(EventPath, 'cluster') if os.access(Folder['cluster'], os.F_OK) is False: os.makedirs(Folder['cluster']) if os.access(os.getcwd(), os.W_OK): basedir = os.path.join(EventPath, 'work') sembdir = os.path.join(basedir, 'semblance') ascdir = os.path.join(basedir, 'asc') mseeddir = os.path.join(basedir, 'mseed') Folder['base'] = basedir Folder['semb'] = sembdir Folder['asc'] = ascdir Folder['mseed'] = mseeddir Folder['event'] = EventPath else: Logfile.abort('create Folder: No write permissions for ' + os.getcwd()) Folder['config'] = os.path.join(os.getcwd(), 'skeleton') return Folder
def filterStations(StationList, Config, Origin): F = [] cfg = ConfigObj(dict=Config) minDist, maxDist = cfg.FloatRange('mindist', 'maxdist') origin = DataTypes.dictToLocation(Origin) Logfile.red('Filter stations with configured parameters') for i in StationList: sdelta = loc2degrees(origin, i) if sdelta > minDist and sdelta < maxDist: F.append( Station(i.net, i.sta, i.loc, i.comp, i.lat, i.lon, i.ele, i.dip, i.azi, i.gain)) Logfile.red('%d STATIONS LEFT IN LIST' % len(F)) return F
def filterStations(StationList, cfg, Origin): F = [] minDist = cfg.config_cluster.minDist maxDist = cfg.config_cluster.maxDist origin = DataTypes.dictToLocation(Origin) Logfile.red('Filter stations with configured parameters') for i in StationList: sdelta = loc2degrees(origin, i) if sdelta > minDist and sdelta < maxDist: F.append( Station(i.net, i.sta, i.loc, i.comp, i.lat, i.lon, i.ele, i.dip, i.azi, i.gain)) Logfile.red('%d STATIONS LEFT IN LIST' % len(F)) return F
def readWaveforms(stationList, tw, EventPath, Origin): t2 = UTCDateTime(Origin.time) sdspath = os.path.join(EventPath, 'data', str(t2.year)) Wdict = OrderedDict() for i in stationList: streamData = i.getName() + '.D.' + str(t2.year)\ + '.' + str("%03d" % t2.julday) entry = os.path.join(sdspath, i.net, i.sta, i.comp + '.D', streamData) tdiff = tw['end'] - tw['start'] try: st = read(entry, format="MSEED", starttime=tw['start'], endtime=tw['end'], nearest_sample=True) except Exception: Logfile.error('readWaveforms: File not found', entry) pass if len(st.get_gaps()) > 0: st.merge(method=0, fill_value='interpolate', interpolation_samples=0) if len(st) > 0: trdiff = st[0].stats.endtime - st[0].stats.starttime totaldiff = abs(trdiff - tdiff) if totaldiff < 1: Wdict[i.getName()] = st Logfile.add(i.getName() + ' added to StreamList ') else: print(' OUT ', streamData) Logfile.red('%d Streams added with available Data' % len(Wdict)) return Wdict
def cmpFilterMetavsXCORR(XcorrMeta, StationMetaList): FilterList = [] for i in StationMetaList: if sys.version_info.major >= 3: for j in sorted(XcorrMeta.keys()): if i.getName() == j: FilterList.append(i) else: for j in XcorrMeta.keys(): if i.getName() == j: FilterList.append(i) n1 = len(FilterList) n2 = len(StationMetaList) Logfile.red('Xcorr Procedure finished %d of %d stations \ left for processing' % (n1, n2)) return FilterList
def filterStations(StationList, Config, Origin, network, cfg_yaml): F = [] cfg = ConfigObj(dict=Config) minDist = cfg_yaml.config_cluster.minDist maxDist = cfg_yaml.config_cluster.maxDist origin = Location(Origin['lat'], Origin['lon']) Logfile.red('Filter stations with configured parameters...') for j in network: for i in StationList: if str(i.getcmpName()[:-2]) == str(j) or str( i.getcmpName()[:]) == str(j): pos = Location(i.lat, i.lon) sdelta = loc2degrees(origin, pos) if sdelta > minDist and sdelta < maxDist: s = Station(i.net, i.sta, i.loc, i.comp, i.lat, i.lon, i.ele, i.dip, i.azi, i.gain) if s not in F: F.append(s) Logfile.red('%d STATIONS LEFT IN LIST' % len(F)) return F
def filterWaveform(self, Waveform): Logfile.red('Filter Waveform: ') cfg = FilterCfg(self.Config) new_frequence = (cfg.newFrequency()) st = Stream() for i in Waveform: Logfile.red('Downsampling to %s: from %d' % (new_frequence, i.stats.sampling_rate)) j = i.resample(new_frequence) switch = cfg.filterswitch() if switch == 1: Logfile.add('bandpass filtered \ stream for station %s ' % (i)) j.filter('bandpass', freqmin=cfg.flo(), freqmax=cfg.fhi(), corners=cfg.ns(), zerophase=bool(self.Config['zph'])) elif switch == 2: Logfile.add('bandpass filtered \ stream for station %s ' % (i)) j.filter('bandpass', freqmin=cfg.flo2(), freqmax=cfg.fhi2(), corners=cfg.ns2(), zerophase=bool(self.Config['zph'])) st.append(j) return st
def calculateTimeWindows(mint, maxt, Config, Origin, switch): tw = {} st = str(Origin.time)[:-1] if switch == 0: winlen = float(Config.config_filter.winlen) if switch == 1: winlen = float(Config.config_filter.winlen_f2) tw['start'] = UTCDateTime(UTCDateTime(st)+(mint-float(Config.config_filter.forerun))) tw['end'] = tw['start']+float(Config.config_filter.duration) #tw['end'] = UTCDateTime(UTCDateTime(st)+(maxt+float(Config['duration'])+winlen)) #timespan = UTCDateTime(UTCDateTime(st)+(maxt+float(Config['duration'])+winlen)) - UTCDateTime(UTCDateTime(st)+(mint-int(Config['forerun']))) timespan = tw['end']-tw['start'] Logfile.red('ORIGIN TIME %s ' % UTCDateTime(st)) Logfile.red('TIME WINDOW: %s - %s ' % (tw['start'], tw['end']) ) Logfile.red('TIME SPAN: %s Minutes ' % (timespan/60)) return tw
def doXcorr(self, phase, traces): StreamDict, SNRDict = self.traveltimes(phase, traces) t = self.f6(SNRDict) Logfile.add('doXcorr: REFERENCE: ' + t) for i in SNRDict.keys(): Logfile.add('doXcorr: STREAM: ' + i + ' SNR: ' + str(SNRDict[i])) alternativeref = os.path.join( *self.AF.split(os.sep)[-1:]) + 'refstation' if self.Config[alternativeref] == '': t = t else: t = self.Config[alternativeref] corrDict = {} try: ref = StreamDict[t][0].data except Exception: ref = StreamDict[t].data Logfile.red('Reference Station of %s for Xcorr Procedure %s' % (os.path.basename(self.AF), t)) Logfile.red('Enter Xcorr Procedure ') for stream in StreamDict.keys(): a, b = obspy.signal.cross_correlation.xcorr( ref, StreamDict[stream][0], 0) shift = a / StreamDict[stream][0].stats.sampling_rate corrDict[stream] = Corr(shift, b, a) corrDict[stream].value = abs(corrDict[stream].value) msg = 'Index: ' + str(a) + ' Value: ' + str(b) + ' ----> ' msg += (str(stream) + str(StreamDict[stream][0].stats.sampling_rate) + ' SHIFT IN TIME: ' + str(shift)) Logfile.add(msg) Logfile.red('Finish Xcorr Procedure ') return corrDict, StreamDict[t], StreamDict
def readMetaInfoFile(EventPath): Logfile.red('Parsing MetaInfoFile') try: for i in os.listdir(EventPath): if fnmatch.fnmatch(i, 'metainfo-*.meta'): evfile = os.path.join(EventPath, i) MetaL = [] Logfile.add(evfile) fobj = open(evfile, 'r') for i in fobj: line = i.split() net = line[0] sta = line[1] loc = line[2] comp = line[3] lat = line[4] lon = line[5] ele = line[6] dip = line[7] azi = line[8] gain = line[9] if fnmatch.fnmatch(comp, 'Z'): MetaL.append( Station(net, sta, loc, comp, lat, lon, ele, dip, azi, gain)) Logfile.red('%d ENTRIES IN METAFILE FOUND' % (len(MetaL))) except Exception: Logfile.red('METAFILE NOT READABLE') return MetaL
def traveltimes(self, phase, traces, cfg_yaml): Logfile.red('Enter AUTOMATIC CROSSCORRELATION ') Logfile.red('\n\n+++++++++++++++++++++++++++++++++++++++++++++++\n ') T = [] Wdict = OrderedDict() SNR = OrderedDict() Config = self.Config cfg = ConfigObj(dict=Config) for i in self.StationMeta: Logfile.red('read in %s ' % (i)) de = loc2degrees(self.Origin, i) Phase = cake.PhaseDef(phase) traveltime_model = cfg_yaml.config.traveltime_model path = palantiri.__path__ model = cake.load_model(path[0]+'/data/'+traveltime_model) if cfg_yaml.config_data.colesseo_input is True: arrivals = model.arrivals([de, de], phases=Phase, zstart=self.Origin.depth, zstop=0.) else: arrivals = model.arrivals([de, de], phases=Phase, zstart=self.Origin.depth*km, zstop=0.) try: ptime = arrivals[0].t except Exception: try: arrivals = model.arrivals([de, de], phases=Phase, zstart=self.Origin.depth*km-2.1) ptime = arrivals[0].t except Exception: ptime = 0 T.append(ptime) if ptime == 0: Logfile.red('Available phases for station %s in\ range %f deegree' % (i, de)) Logfile.red('you tried phase %s' % (phase)) raise Exception("ILLEGAL: phase definition") else: tw = self.calculateTimeWindows(ptime) if cfg_yaml.config_data.pyrocko_download is True: w, snr, found = self.readWaveformsCross_pyrocko(i, tw, ptime, traces, cfg_yaml) elif cfg_yaml.config_data.colesseo_input is True: w, snr = self.readWaveformsCross_colesseo(i, tw, ptime, cfg_yaml) else: w, snr = self.readWaveformsCross(i, tw, ptime, cfg_yaml) Wdict[i.getName()] = w SNR[i.getName()] = snr Logfile.red('\n\n+++++++++++++++++++++++++++++++++++++++++++++++ ') Logfile.red('Exit AUTOMATIC FILTER ') return Wdict, SNR
def processWaveforms_obspy(WaveformDict, Config, Folder, network, MetaDict, Event, switch, Xcorr): Logfile.red('Start Processing') cfg = FilterCfg(Config) new_frequence = cfg.newFrequency() for index, i in enumerate(WaveformDict): Logfile.add('%s:%s ---------------------' % (index, i)) if Config['export_unfiltered'].capitalize() is 'True': writeWaveform(Folder, i, WaveformDict[i], 'U', network) station = getStation(i, MetaDict) if cfg.Int('fm') == 1: azi = ttt.bearing(Event, station) bazi = ttt.backazi(station, Event) msg = 'Takeoff ' + str(station.takeoff) + ' Azi ' + str(azi) +\ 'Bazi ' + str(bazi) Logfile.add(msg) gain = float(station.gain) if gain == 0.0 or gain == -1.0: gain = 1 WaveformDict[i][0].data * (1.0 / gain) if switch is 0: Logfile.add('bandpass filtered stream for station %s ' % (i)) WaveformDict[i].filter('bandpass', freqmin=cfg.flo(), freqmax=cfg.fhi(), corners=cfg.ns(), zerophase=bool(Config['zph'])) elif switch is 1: Logfile.add('bandpass filtered stream for station %s ' % (i)) WaveformDict[i].filter('bandpass', freqmin=cfg.flo2(), freqmax=cfg.fhi2(), corners=cfg.ns2(), zerophase=bool(Config['zph2'])) else: Logfile.add('no filter set for station %s ' % (i)) if Config['export_filtered'].capitalize() is 'True': writeWaveform(Folder, i, WaveformDict[i], 'F', network) j = resampleWaveform(WaveformDict[i][0], new_frequence) WaveformDict[i] = j if Config['export_resampled'].capitalize() is 'True': writeWaveform(Folder, i, WaveformDict[i], 'R', network) return WaveformDict
def createRandomInitialCentroids(Config, StationMetaList): Logfile.red('Begin initial centroid search') cfg = ConfigObj(dict=Config) initialCentroids = [] usedIndexes = [] random.seed(time.clock()) if len(StationMetaList) == 0: Logfile.red('Empty station list') return initialCentroids MAX_TIME_ALLOWED = 350 start = time.time() if int(Config['maxcluster']) == 0: to = len(StationMetaList) - 1 else: to = int(Config['maxcluster']) while len(initialCentroids) < to: dist_centroids = float(Config['centroidmindistance']) randomIndex = random.randint(0, len(StationMetaList) - 1) redraw = True while redraw is True: if randomIndex in usedIndexes: randomIndex = random.randint(0, len(StationMetaList) - 1) else: if len(usedIndexes) > 2: for rdx in usedIndexes: s1 = StationMetaList[randomIndex] s2 = StationMetaList[rdx] delta = loc2degrees(s1, s2) if delta >= dist_centroids: redraw = False else: redraw = False usedIndexes.append(randomIndex) around = checkStationAroundInitialCentroid( StationMetaList[randomIndex], Config, StationMetaList) found = False if len(initialCentroids) == 0: initialCentroids.append(StationMetaList[randomIndex]) found = True start = time.time() else: t = addOK(StationMetaList[randomIndex], initialCentroids, Config, StationMetaList) if (time.time() - start) > MAX_TIME_ALLOWED: break if t == 1: if len(usedIndexes) > 1: for rdx in usedIndexes: s1 = StationMetaList[randomIndex] s2 = StationMetaList[rdx] delta = loc2degrees(s1, s2) if delta >= dist_centroids: initialCentroids.append( StationMetaList[randomIndex]) found = True else: initialCentroids.append(StationMetaList[randomIndex]) found = True else: continue if found: initDist = cfg.Float('initialstationdistance') Logfile.red('found initial cluster %d' % (len(initialCentroids))) Logfile.red('centroid %s with %d stations around %s deegree' % (StationMetaList[randomIndex], around, initDist)) Logfile.red('Initial centroid search finished') return initialCentroids