def test_to_phase_defs(self): pdefs = cake.to_phase_defs(['p,P', cake.PhaseDef('PP')]) assert len(pdefs) == 3 for pdef in pdefs: assert isinstance(pdef, cake.PhaseDef) pdefs = cake.to_phase_defs(cake.PhaseDef('PP')) assert len(pdefs) == 1 for pdef in pdefs: assert isinstance(pdef, cake.PhaseDef) pdefs = cake.to_phase_defs('P,p') assert len(pdefs) == 2 for pdef in pdefs: assert isinstance(pdef, cake.PhaseDef)
def test_path(self): mod = cake.load_model() phase = cake.PhaseDef('P') ray = mod.arrivals(phases=[phase], distances=[70.], zstart=100.) z, x, t = ray[0].zxt_path_subdivided() assert z[0].size == 681
def test_angles(self): mod = cake.load_model() data = [ [1.0*km, 1.0*km, 1.0*km, 90., 90., 'P'], [1.0*km, 2.0*km, 1.0*km, 45., 135., 'P\\'], [2.0*km, 1.0*km, 1.0*km, 135., 45., 'p'], [1.0*km, 2.0*km, math.sqrt(3.)*km, 60., 120., 'P\\'], [2.0*km, 1.0*km, math.sqrt(3.)*km, 120., 60., 'p']] for (zstart, zstop, dist, takeoff_want, incidence_want, pdef_want) \ in data: rays = mod.arrivals( zstart=zstart, zstop=zstop, phases=[cake.PhaseDef(sphase) for sphase in 'P,p,P\\,p\\'.split(',')], distances=[dist*cake.m2d]) for ray in rays: takeoff = round(ray.takeoff_angle()) incidence = round(ray.incidence_angle()) pdef = ray.used_phase().definition() assert takeoff == takeoff_want assert incidence == incidence_want assert pdef == pdef_want
def t(self, phase_selection, z_dist): ''':param phase_selection: phase names speparated by vertical bars :param z_dist: tuple with (depth, distance) ''' if 'first' in phase_selection: self.which = 'first' if 'last' in phase_selection: self.which = 'last' if self.which: phase_selection = self.strip(phase_selection) z, dist = z_dist if (phase_selection, dist, z) in self.arrivals.keys(): return self.arrivals[(phase_selection, dist, z)] phases = [cake.PhaseDef(pid) for pid in phase_selection.split('|')] arrivals = self.model.arrivals(distances=[dist * cake.m2d], phases=phases, zstart=z) if arrivals == []: logger.info('none of defined phases at d=%s, z=%s' % (dist, z)) return else: want = self.phase_selector(arrivals) self.arrivals[(phase_selection, dist, z)] = want return want
def convert_phase(phase): if phase == "Pg": phase = "P<(moho)" if phase == "pg": phase = "p<(moho)" if phase == "Sg": phase = "S<(moho)" if phase == "sg": phase = "s<(moho)" if phase == "PG": phase = "P>(moho)" if phase == "pG": phase = "p>(moho)" if phase == "SG": phase = "S>(moho)" if phase == "sG": phase = "s>(moho)" if phase == "P*": phase = "P" if phase == "S*": phase = "S" if phase == "SmS": phase = 'Sv(moho)s' if phase == "PmP": phase = 'Pv(moho)p' if phase == "Pn": phase = 'Pv_(moho)p' if phase == "Sn": phase = 'Sv_(moho)s' cake_phase = cake.PhaseDef(phase) return cake_phase
def calctakeoff(Station, Event, Config): de = loc2degrees(Event, Station) Phase = cake.PhaseDef('P') arrivals = model.arrivals([de, de], phases=Phase, zstart=Event.depth * km) return arrivals[0].takeoff_angle()
def traveltimes(self, phase, traces): Logfile.red('Enter AUTOMATIC CROSSCORRELATION ') Logfile.red('\n\n+++++++++++++++++++++++++++++++++++++++++++++++\n ') T = [] Wdict = OrderedDict() SNR = OrderedDict() Config = self.Config cfg = ConfigObj(dict=Config) for i in self.StationMeta: Logfile.red('read in %s ' % (i)) de = loc2degrees(self.Origin, i) Phase = cake.PhaseDef(phase) traveltime_model = cfg.Str('traveltime_model') path = palantiri.__path__ model = cake.load_model(path[0] + '/data/' + traveltime_model) if cfg.colesseo_input() is True: arrivals = model.arrivals([de, de], phases=Phase, zstart=self.Origin.depth, zstop=0.) else: arrivals = model.arrivals([de, de], phases=Phase, zstart=self.Origin.depth * km, zstop=0.) try: ptime = arrivals[0].t except Exception: try: arrivals = model.arrivals([de, de], phases=Phase, zstart=self.Origin.depth * km - 2.1) ptime = arrivals[0].t except Exception: ptime = 0 T.append(ptime) if ptime == 0: Logfile.red('Available phases for station %s in\ range %f deegree' % (i, de)) Logfile.red('you tried phase %s' % (phase)) raise Exception("ILLEGAL: phase definition") else: tw = self.calculateTimeWindows(ptime) if cfg.pyrocko_download() is True: w, snr, found = self.readWaveformsCross_pyrocko( i, tw, ptime, traces) elif cfg.colesseo_input() is True: w, snr = self.readWaveformsCross_colesseo(i, tw, ptime) else: w, snr = self.readWaveformsCross(i, tw, ptime) Wdict[i.getName()] = w SNR[i.getName()] = snr Logfile.red('\n\n+++++++++++++++++++++++++++++++++++++++++++++++ ') Logfile.red('Exit AUTOMATIC FILTER ') return Wdict, SNR
def traveltime(station): dist = event.distance_to(station) arrivals = mod.arrivals(zstart=event.depth, zstop=0., distances=[dist * cake.m2d], phases=[cake.PhaseDef(self.phasename)]) if not arrivals: raise NoArrival() return arrivals[0].t
def make_reference_markers_cake(source, targets, model): assert len(source) == 1 ref_marker = defaultdict(dict) phases_start = ['p', 'P'] phases_start = [cake.PhaseDef(ph) for ph in phases_start] phases_end = ['s', 'S'] phases_end = [cake.PhaseDef(ph) for ph in phases_end] for s in source: for target in targets: dist = orthodrome.distance_accurate50m(s, target) * cake.m2d tmin = min(model.arrivals([dist], phases_start, zstart=s.depth, zstop=s.depth), key=lambda x: x.t).t tmax = min(model.arrivals([dist], phases_end, zstart=s.depth, zstop=s.depth), key=lambda x: x.t).t tmin += s.time tmax += s.time assert tmin != tmax m = gui_util.PhaseMarker(nslc_ids=target.codes, tmin=tmin, tmax=tmax, kind=1, event=source, phasename='range') ref_marker[s][target] = m return ref_marker
def prep_data_batch(data_dir, store_id, stations=None, pre=0.5, post=3, reference_event=None, min_len=420, pick_sigma=0.02): engine = LocalEngine(store_superdirs=['/home/asteinbe/gf_stores']) store = engine.get_store(store_id) mod = store.config.earthmodel_1d gf_freq = store.config.sample_rate cake_phase = cake.PhaseDef("P") phase_list = [cake_phase] events = [] waveforms = [] waveforms_shifted = [] events = scedc_util.scedc_fm_to_pyrocko(file) labels = labels_from_events(events) pathlist = Path(data_dir).glob('ev_0/') for path in sorted(pathlist): try: targets = [] path = str(path)+"/" event = model.load_events(path+"event.txt")[0] traces_loaded = io.load(path+"traces.mseed") stations_unsorted = model.load_stations(data_dir+"stations.pf") for st in stations_unsorted: st.dist = orthodrome.distance_accurate50m(st.lat, st.lon, event.lat, event.lon) st.azi = orthodrome.azimuth(st.lat, st.lon, event.lat, event.lon) stations = sorted(stations_unsorted, key=lambda x: x.dist, reverse=True) traces_processed = [] traces = check_traces(traces_loaded, stations, min_len=min_len) traces_processed, nsamples = wp.process_loaded_waveforms(traces, stations, event, gf_freq, mod, pre, post) events.append(event) waveforms.append(traces_processed) except: pass return waveforms, nsamples, events, waveforms_shifted
def print_arrivals(model, distances=[], phases=cake.PhaseDef('P'), zstart=0.0, zstop=0.0, as_degrees=False): headers = 'slow dist time take inci effi spre phase used'.split() space = (7, 5, 6, 4, 4, 4, 4, 17, 17) if as_degrees: units = 's/deg deg s deg deg % %'.split() else: units = 's/km km s deg deg % %'.split() hline = ' '.join(x.ljust(s) for (x, s) in zip(headers, space)) uline = ' '.join(('%s' % x).ljust(s) for (x, s) in zip(units, space)) print(hline) print(uline) print('-' * len(hline)) for ray in model.arrivals(distances=distances, phases=phases, zstart=zstart, zstop=zstop): if as_degrees: sd = ray.x slow = ray.p / cake.r2d else: sd = ray.x * (cake.d2r * cake.earthradius / cake.km) slow = ray.p / (r2d * cake.d2m / cake.km) su = '(%s)' % ray.path.used_phase(p=ray.p, eps=1.0).used_repr() print(' '.join( tuple( mini_fmt(x, s).rjust(s) for (x, s) in zip((slow, sd, ray.t, ray.takeoff_angle(), ray.incidence_angle(), 100 * ray.efficiency(), 100 * ray.spreading() * ray.surface_sphere()), space)) + tuple(x.ljust(17) for x in (ray.path.phase.definition(), su))))
def t(self, mod, z_dist, get_ray=False): """:param phase_selection: phase names separated by vertical bars :param z_dist: tuple with (depth, distance) """ z, dist = z_dist if (dist, z) in self.arrivals.keys(): return self.return_time(self.arrivals[(dist, z)]) phases = [cake.PhaseDef(pid) for pid in self.phases] arrivals = mod.arrivals(distances=[dist * cake.m2d], phases=phases, zstart=z) if not arrivals: logger.warn("no phase at d=%s, z=%s. (return fallback time)" % (dist, z)) want = None else: want = self.phase_selector(arrivals) self.arrivals[(dist, z)] = want if get_ray: return want else: return self.return_time(want)
def wanted_phases(self): try: wanted = [] for iphase, name in enumerate(self._phase_names): if getattr(self, 'wantphase_%i' % iphase): if name in self._phases: phases = self._phases[name] else: if name.startswith('~'): phases = [cake.PhaseDef(name[1:])] else: phases = cake.PhaseDef.classic(name) self._phases[name] = phases for pha in phases: pha.name = name wanted.extend(phases) except (cake.UnknownClassicPhase, cake.PhaseDefParseError) as e: self.fail(str(e)) return wanted
def phasedef_or_horvel(x): try: return float(x) except ValueError: return cake.PhaseDef(x)
def calcTTTAdv(Config, station, Origin, flag, arrayname, Xcorrshift=None, Refshift=None): phasename = ('%sphase') % (os.path.basename(arrayname)) cfg = ConfigObj(dict=Config) dimX = cfg.Int('dimx') dimY = cfg.Int('dimy') gridspacing = cfg.Float('gridspacing') o_lat = float(Origin['lat']) o_lon = float(Origin['lon']) o_depth = float(Origin['depth']) oLator = o_lat + dimX / 2 oLonor = o_lon + dimY / 2 oLatul = 0 oLonul = 0 o_dip = 80. plane = False TTTGridMap = {} LMINMAX = [] GridArray = {} locStation = Location(station.lat, station.lon) sdelta = loc2degrees(Location(o_lat, o_lon), locStation) Phase = cake.PhaseDef(Config[phasename]) model = cake.load_model() z = 0 if plane is True: depth = np.linspace(0., 40., num=dimY) for i in xrange(70): oLatul = o_lat - ((dimX - 1) / 2) * gridspacing + i * gridspacing if z == 0 and i == 0: Latul = oLatul o = 0 start_time = time.clock() for j in xrange(40): oLonul = o_lon - ( (dimY - 1) / 2) * gridspacing + j * gridspacing / np.cos(o_dip) if o == 0 and j == 0: Lonul = oLonul de = loc2degrees(Location(oLatul, oLonul), locStation) arrivals = model.arrivals([de, de], phases=Phase, zstart=depth[j] * km, zstop=0.) try: ttime = arrivals[0].t except: try: arrivals = model.arrivals([de, de], phases=Phase, zstart=depth[j] * km - 2.5, zstop=depth[j] * km + 2.5, refine=True) ttime = arrivals[0].t except: tt = obs_TravelTimes(de, o_depth) for k in tt: if k['phase_name'] == 'P' or k['phase_name'] == ( '%sdiff') % (Config[phasename]): ttime = k['time'] print "Something wrong with phase arrival too large\ distances choosen?" GridArray[(i, j)] = GridElem(oLatul, oLonul, depth[j], ttime, de) LMINMAX.append(ttime) if int(Config['xcorr']) == 1: ttime = ttime-float(Xcorrshift[station.getName()].shift)\ - Refshift GridArray[(i, j)] = GridElem(oLatul, oLonul, o_depth, ttime, de) LMINMAX.append(ttime) if ttime == 0: print '\033[31mAvailable phases for station %s in range %f deegree\033[0m' % ( station, de) print '\033[31m' + '|'.join( [str(item['phase_name']) for item in tt]) + '\033[0m' print '\033[31myou tried phase %s\033[0m' % ( Config[phasename]) raise Exception("\033[31mILLEGAL: phase definition\033[0m") else: for i in xrange(dimX): oLatul = o_lat - ((dimX - 1) / 2) * gridspacing + i * gridspacing if z == 0 and i == 0: Latul = oLatul o = 0 for j in xrange(dimY): oLonul = o_lon - ( (dimY - 1) / 2) * gridspacing + j * gridspacing if o == 0 and j == 0: Lonul = oLonul de = loc2degrees(Location(oLatul, oLonul), locStation) arrivals = model.arrivals([de, de], phases=Phase, zstart=o_depth * km) try: ttime = arrivals[0].t except: try: arrivals = model.arrivals([de, de], phases=Phase, zstart=o_depth * km - 2.5, zstop=o_depth * km - 2.5, refine=True) ttime = arrivals[0].t except: arrivals = model.arrivals([de, de], phases=Phase, zstart=o_depth * km - 2.5, zstop=0., refine=True) ttime = arrivals[0].t GridArray[(i, j)] = GridElem(oLatul, oLonul, o_depth, ttime, de) LMINMAX.append(ttime) if int(Config['xcorr']) == 1: ttime = ttime-float(Xcorrshift[station.getName()].shift)\ - Refshift GridArray[(i, j)] = GridElem(oLatul, oLonul, o_depth, ttime, de) LMINMAX.append(ttime) if ttime == 0: print '\033[31mAvailable phases for station %s in range %f deegree\033[0m' % ( station, de) print '\033[31m' + '|'.join( [str(item['phase_name']) for item in tt]) + '\033[0m' print '\033[31myou tried phase %s\033[0m' % ( Config[phasename]) raise Exception("\033[31mILLEGAL: phase definition\033[0m") mint = min(LMINMAX) maxt = max(LMINMAX) TTTGridMap[station.getName()] = TTTGrid(o_depth, mint, maxt, Latul, Lonul, oLator, oLonor, GridArray) k = MinTMaxT(mint, maxt) Basic.dumpToFile(str(flag) + '-ttt.pkl', TTTGridMap) Basic.dumpToFile('minmax-' + str(flag) + '.pkl', k) Basic.dumpToFile('station-' + str(flag) + '.pkl', station)
def plot(settings, show=False): #align_phase = 'P(cmb)P<(icb)(cmb)p' with_onset_line = False fill = True align_phase = 'P' zoom_window = settings.zoom ampl_scaler = '4*standard deviation' quantity = settings.quantity zstart, zstop, inkr = settings.depths.split(':') test_depths = num.arange( float(zstart) * km, float(zstop) * km, float(inkr) * km) try: traces = io.load(settings.trace_filename) except FileLoadError as e: logger.info(e) return event = model.load_events(settings.event_filename) assert len(event) == 1 event = event[0] event.depth = float(settings.depth) * 1000. base_source = MTSource.from_pyrocko_event(event) test_sources = [] for d in test_depths: s = base_source.clone() s.depth = float(d) test_sources.append(s) if settings.store_superdirs: engine = LocalEngine(store_superdirs=settings.store_superdirs) else: engine = LocalEngine(use_config=True) try: store = engine.get_store(settings.store_id) except seismosizer.NoSuchStore as e: logger.info('%s ... skipping.' % e) return stations = model.load_stations(settings.station_filename) station = filter( lambda s: match_nslc('%s.%s.%s.*' % s.nsl(), traces[0].nslc_id), stations) assert len(station) == 1 station = station[0] targets = [ station_to_target(station, quantity=quantity, store_id=settings.store_id) ] try: request = engine.process(targets=targets, sources=test_sources) except seismosizer.NoSuchStore as e: logger.info('%s ... skipping.' % e) return except meta.OutOfBounds as error: if settings.force_nearest_neighbor: logger.warning('%s Using nearest neighbor instead.' % error) mod_targets = [] for t in targets: closest_source = min(test_sources, key=lambda s: s.distance_to(t)) farthest_source = max(test_sources, key=lambda s: s.distance_to(t)) min_dist_delta = store.config.distance_min - closest_source.distance_to( t) max_dist_delta = store.config.distance_max - farthest_source.distance_to( t) if min_dist_delta < 0: azi, bazi = closest_source.azibazi_to(t) newlat, newlon = ortho.azidist_to_latlon( t.lat, t.lon, azi, min_dist_delta * cake.m2d) elif max_dist_delta < 0: azi, bazi = farthest_source.azibazi_to(t) newlat, newlon = ortho.azidist_to_latlon( t.lat, t.lon, azi, max_dist_delta * cake.m2d) t.lat, t.lon = newlat, newlon mod_targets.append(t) request = engine.process(targets=mod_targets, sources=test_sources) else: logger.error("%s: %s" % (error, ".".join(station.nsl()))) return alldepths = list(test_depths) depth_count = dict(zip(sorted(alldepths), range(len(alldepths)))) target_count = dict( zip([t.codes[:3] for t in targets], range(len(targets)))) fig = plt.figure() ax = fig.add_subplot(111) maxz = max(test_depths) minz = min(test_depths) relative_scale = (maxz - minz) * 0.02 for s, t, tr in request.iter_results(): if quantity == 'velocity': tr = integrate_differentiate(tr, 'differentiate') onset = engine.get_store(t.store_id).t('begin', (s.depth, s.distance_to(t))) tr = settings.do_filter(tr) if settings.normalize: tr.set_ydata(tr.get_ydata() / num.max(abs(tr.get_ydata()))) ax.tick_params(axis='y', which='both', left='off', right='off', labelleft='off') y_pos = s.depth xdata = tr.get_xdata() - onset - s.time tr_ydata = tr.get_ydata() * -1 visible = tr.chop(tmin=event.time + onset + zoom_window[0], tmax=event.time + onset + zoom_window[1]) if ampl_scaler == 'trace min/max': ampl_scale = float(max(abs(visible.get_ydata()))) elif ampl_scaler == '4*standard deviation': ampl_scale = 4 * float(num.std(visible.get_ydata())) else: ampl_scale = 1. ampl_scale /= settings.gain ydata = (tr_ydata / ampl_scale) * relative_scale + y_pos ax.plot(xdata, ydata, c='black', linewidth=1., alpha=1.) if False: ax.fill_between(xdata, y_pos, ydata, where=ydata < y_pos, color='black', alpha=0.5) ax.text(zoom_window[0] * 1.09, y_pos, '%1.1f' % (s.depth / 1000.), horizontalalignment='right') #, fontsize=12.) if False: mod = store.config.earthmodel_1d label = 'pP' arrivals = mod.arrivals(phases=[cake.PhaseDef(label)], distances=[s.distance_to(t) * cake.m2d], zstart=s.depth) try: t = arrivals[0].t ydata_absmax = num.max(num.abs(tr.get_ydata())) marker_length = 0.5 x_marker = [t - onset] * 2 y = [ y_pos - (maxz - minz) * 0.025, y_pos + (maxz - minz) * 0.025 ] ax.plot(x_marker, y, linewidth=1, c='blue') ax.text( x_marker[1] - x_marker[1] * 0.005, y[1], label, #fontsize=12, color='black', verticalalignment='top', horizontalalignment='right') except IndexError: logger.warning( 'no pP phase at d=%s z=%s stat=%s' % (s.distance_to(t) * cake.m2d, s.depth, station.station)) pass if len(traces) == 0: raise Exception('No Trace found!') if len(traces) > 1: raise Exception('More then one trace provided!') else: onset = 0 tr = traces[0] correction = float(settings.correction) if quantity == 'displacement': tr = integrate_differentiate(tr, 'integrate') tr = settings.do_filter(tr) onset = engine.get_store(targets[0].store_id).t( 'begin', (event.depth, s.distance_to(targets[0]))) + event.time if settings.normalize: tr.set_ydata(tr.get_ydata() / max(abs(tr.get_ydata()))) ax.tick_params(axis='y', which='both', left='off', right='off', labelleft='off') y_pos = event.depth xdata = tr.get_xdata() - onset + correction tr_ydata = tr.get_ydata() * -1 visible = tr.chop(tmin=onset + zoom_window[0] + correction, tmax=onset + zoom_window[1] + correction) if ampl_scaler == 'trace min/max': ampl_scale = float(max(abs(visible.get_ydata()))) elif ampl_scaler == '4*standard deviation': ampl_scale = 4 * float(num.std(visible.get_ydata())) else: ampl_scale = 1. ydata = (tr_ydata / ampl_scale * settings.gain * settings.gain_record) * relative_scale + y_pos ax.plot(xdata, ydata, c=settings.color, linewidth=1.) ax.set_xlim(zoom_window) zmax = max(test_depths) zmin = min(test_depths) zrange = zmax - zmin ax.set_ylim((zmin - zrange * 0.2, zmax + zrange * 0.2)) ax.set_xlabel('Time [s]') ax.text(0.0, 0.6, 'Source depth [km]', rotation=90, horizontalalignment='left', transform=fig.transFigure) #, fontsize=12.) if fill: ax.fill_between(xdata, y_pos, ydata, where=ydata < y_pos, color=settings.color, alpha=0.5) if with_onset_line: ax.text(0.08, zmax + zrange * 0.1, align_phase, fontsize=14) vline = ax.axvline(0., c='black') vline.set_linestyle('--') if settings.title: params = { 'array-id': ''.join(station.nsl()), 'event_name': event.name, 'event_time': time_to_str(event.time) } ax.text(0.5, 1.05, settings.title % params, horizontalalignment='center', transform=ax.transAxes) if settings.auto_caption: cax = fig.add_axes([0., 0., 1, 0.05], label='caption') cax.axis('off') cax.xaxis.set_visible(False) cax.yaxis.set_visible(False) if settings.quantity == 'displacement': quantity_info = 'integrated velocity trace. ' if settings.quantity == 'velocity': quantity_info = 'differentiated synthetic traces. ' if settings.quantity == 'restituted': quantity_info = 'restituted traces. ' captions = {'filters': ''} for f in settings.filters: captions['filters'] += '%s-pass, order %s, f$_c$=%s Hz. ' % ( f.type, f.order, f.corner) captions['quantity_info'] = quantity_info captions['store_sampling'] = 1. / store.config.deltat cax.text( 0, 0, 'Filters: %(filters)s f$_{GF}$=%(store_sampling)s Hz.\n%(quantity_info)s' % captions, fontsize=12, transform=cax.transAxes) plt.subplots_adjust(hspace=.4, bottom=0.15) else: plt.subplots_adjust(bottom=0.1) ax.invert_yaxis() if settings.save_as: logger.info('save as: %s ' % settings.save_as) options = settings.__dict__ options.update({'array-id': ''.join(station.nsl())}) fig.savefig(settings.save_as % options, dpi=160, bbox_inches='tight') if show: plt.show()
# We need a pyrocko.gf.Engine object which provides us with the traces # extracted from the store. In this case we are going to use a local # engine since we are going to query a local store. engine = LocalEngine(store_superdirs=['/home/asteinbe/gf_stores']) from silvertine import scenario from pyrocko import model, cake, orthodrome from silvertine.util.ref_mods import landau_layered_model from silvertine.locate.locate1D import get_phases_list mod = landau_layered_model() scale = 2e-14 cake_phase = cake.PhaseDef("P") phase_list = [cake_phase] waveforms_events = [] waveforms_noise = [] stations = model.load_stations("stations.raw.txt") nstations = len(stations)*3 noised = True nevents = 1200 targets = [] for st in stations: for cha in st.channels: target = Target( lat=st.lat, lon=st.lon, store_id=store_id, interpolation='multilinear',
def call(self): self.cleanup() viewer = self.get_viewer() events = [m.get_event() for m in self.get_selected_event_markers()] for iev, ev in enumerate(events): ev.name = '%05i' % iev show_arrivals = False filters = [] for ident in ['high', 'low']: val = getattr(self, ident) if val != None: filters.append(trace.ButterworthResponse(corner=float(val), order=4, type=ident)) stations = self.get_stations() traces = list(self.chopper_selected_traces(fallback=True, trace_selector= viewer.trace_selector, load_data=False)) traces = [tr for trs in traces for tr in trs ] visible_nslcs = [tr.nslc_id for tr in traces] stations = [x for x in stations if util.match_nslcs( "%s.%s.%s.*" % x.nsl(), visible_nslcs)] # TODO option to choose other models mod = cake.load_model() nevents = len(events) pile = self.get_pile() targets = make_targets(pile, stations) if len(targets)==0: self.fail("No station available") ntargets = len(targets) self.cc = num.zeros((ntargets, nevents, nevents), dtype=num.float) self.similarity_matrix = SimilarityMatrix(targets=targets, events=events, filters=filters, padding=float(self.tpad), windowing_method=self.time_window_choice, vmax=float(self.vmax), vmin=float(self.vmin)) similarities = [] if self.save_traces : figure_dir = self.input_directory(caption='Select directory to store images') for itarget, target in enumerate(targets): print((itarget+1.)/float(ntargets)) ok_filtered = [] markers = [] for iev, ev in enumerate(events): dist = target.distance_to(ev) if self.time_window_choice=='vmin/vmax': tmin = ev.time + dist / self.vmax - self.tpad tmax = ev.time + dist / self.vmin + self.tpad elif self.time_window_choice=='P-phase': d = dist*cake.m2d z = ev.depth t = self.phase_cache.get((mod, d, z), False) if not t: rays = mod.arrivals( phases=[cake.PhaseDef(x) for x in 'p P'.split()], distances=[d], zstart=z) t = rays[0].t self.phase_cache[(mod, d, z)] = t tmin = ev.time + t - self.tpad * 0.1 tmax = ev.time + t + self.tpad * 0.9 trs = pile.chopper(tmin=tmin, tmax=tmax, trace_selector=viewer.trace_selector, want_incomplete=False) tr = [t for trss in trs for t in trss if t.nslc_id==target.codes] if len(tr)==0: continue elif len(tr)==1: tr = tr[0] else: self.fail('Something went wrong') if self.dt_wanted: tr.downsample_to(self.dt_wanted) tr2 = tr.copy() for f in filters: tr2 = tr2.transfer(transfer_function=f) tr2.chop(tmin, tmax) tr2.set_codes(location=ev.name+'f') tr.chop(tmin, tmax) tr.set_codes(location=ev.name+'r') ok_filtered.append((iev, ev, tr2)) ok = ok_filtered while ok: (ia, a_ev, a_tr) = ok.pop() for (ib, b_ev, b_tr) in ok: relamp = 0.0 if a_tr is not None and b_tr is not None: c_tr = trace.correlate(a_tr, b_tr, mode='full', normalization='normal') t_center = c_tr.tmin+(c_tr.tmax-c_tr.tmin)/2. c_tr_chopped = c_tr.chop(t_center-self.tdist, t_center+self.tdist, inplace=False) t_mini, v_mini = c_tr_chopped.min() t_maxi, v_maxi = c_tr_chopped.max() b_tr_shifted = b_tr.copy() if abs(v_mini) > abs(v_maxi): v_cc = v_mini time_lag = -t_mini else: time_lag = -t_maxi v_cc = v_maxi self.cc[itarget, ia, ib] = v_cc b_tr_shifted.shift(time_lag) if self.cc[itarget, ia, ib] != 0.0: tmin = max(a_tr.tmin, b_tr_shifted.tmin) tmax = min(a_tr.tmax, b_tr_shifted.tmax) try: a_tr_chopped = a_tr.chop(tmin, tmax, inplace=False) b_tr_chopped = b_tr_shifted.chop(tmin, tmax) except trace.NoData: logger.warn('NoData %s'%a_tr_chopped) continue ya = a_tr_chopped.ydata yb = b_tr_chopped.ydata relamp = num.sum(ya*yb) / num.sum(ya**2) if self.save_traces: fig, axes = plt.subplots(3,1) fig.suptitle('.'.join(target.codes)) axes[0].plot(a_tr_chopped.get_xdata(), a_tr_chopped.get_ydata()) axes[0].text(0, 1, "id: %s, time: %s" %(a_ev.name, util.time_to_str(a_ev.time)), transform=axes[0].transAxes, verticalalignment='top', horizontalalignment='left') axes[1].plot(b_tr_chopped.get_xdata(), b_tr_chopped.get_ydata()) axes[1].text(0, 1, "id: %s, time: %s" %(b_ev.name, util.time_to_str(b_ev.time)), transform=axes[1].transAxes, verticalalignment='top', horizontalalignment='left') axes[2].plot(c_tr.get_xdata(), c_tr.get_ydata()) axes[2].text(0, 1, 'cc_max: %1.4f' % v_cc, transform=axes[2].transAxes, verticalalignment='top', horizontalalignment='left') fn = op.join(figure_dir, 'cc_T%s.E%s.E%s.png' % (itarget, ia, ib)) fig.savefig(fn, pad_inches=0.1, bbox_inches='tight', tight_layout=True) sim = Similarity( ievent=ia, jevent=ib, itarget=itarget, cross_correlation=float(self.cc[itarget, ia, ib]), relative_amplitude=float(relamp), time_lag=float(-time_lag)) similarities.append(sim) if self.show_results: for itarget, target in enumerate(targets): if not num.any(self.cc[itarget]): continue fig = self.pylab(get='figure') fig.suptitle('.'.join(target.codes)) axes = fig.add_subplot(111) axes.set_xlabel('Event number') axes.set_ylabel('Event number') mesh = axes.pcolormesh(self.cc[itarget,:,:], cmap='RdBu', vmin=-1.0, vmax=1.0) cb = fig.colorbar(mesh, ax=axes) cb.set_label('Max correlation coefficient') fig.canvas.draw() self.similarity_matrix.similarities = similarities self.similarity_matrix.validate()
from pyrocko import cake from numpy import arange, array km = 1000. # Load builtin 'prem-no-ocean' model ('.m': medium resolution variant) #model = cake.load_model('prem-no-ocean.m') model = cake.load_model( '/Users/dmelgar/code/pyrocko/build/lib.macosx-10.6-x86_64-2.7/pyrocko/data/earthmodels/Nocal.nd' ) # Source depth [m]. #source_depth = 300. * km source_depth = 5.8 # Distances as a numpy array [deg]. distances = array([5, 10, 20, 50, 100]) * km * cake.m2d # Define the phase to use. Phase = cake.PhaseDef('P') # calculate distances and arrivals and print them: print 'distance [km] time [s]' for arrival in model.arrivals(distances, phases=Phase, zstart=source_depth): print '%13g %13g' % (arrival.x * cake.d2m / km, arrival.t)
def generate_test_data_grid(store_id, store_dirs, coordinates, geometry_params, pre=0.5, post=3, stations_input=None, batch_loading=256, paths_disks=None): engine = LocalEngine(store_superdirs=[store_dirs]) store = engine.get_store(store_id) mod = store.config.earthmodel_1d cake_phase = cake.PhaseDef("P") phase_list = [cake_phase] waveforms_events = [] waveforms_events_uncut = [] waveforms_noise = [] sources = [] lats = coordinates[0] lons = coordinates[1] depths = coordinates[2] if stations_input is None: stations_unsorted = model.load_stations("data/stations.pf") else: stations_unsorted = model.load_stations(stations_input) for st in stations_unsorted: st.dist = orthodrome.distance_accurate50m(st.lat, st.lon, lats[0], lons[0]) st.azi = orthodrome.azimuth(st.lat, st.lon, lats[0], lons[0]) stations = sorted(stations_unsorted, key=lambda x: x.dist, reverse=True) targets = [] events = [] mean_lat = [] mean_lon = [] max_rho = 0. for st in stations: mean_lat.append(st.lat) mean_lon.append(st.lon) for cha in st.channels: if cha.name is not "R" and cha.name is not "T" and cha.name is not "Z": target = Target(lat=st.lat, lon=st.lon, store_id=store_id, interpolation='multilinear', quantity='displacement', codes=st.nsl() + (cha.name, )) targets.append(target) strikes = geometry_params[0] dips = geometry_params[1] rakes = geometry_params[2] vs = geometry_params[3] ws = geometry_params[4] grid_points = [] for lat in lats: for lon in lons: for depth in depths: grid_points.append([lat, lon, depth]) ray.init(num_cpus=num_cpus - 1) npm = len(lats) * len(lons) * len(depths) npm_geom = len(strikes) * len(dips) * len(rakes) results = ray.get([ get_parallel_mtqt.remote(i, targets, store_id, post, pre, stations, mod, grid_points[i], strikes, dips, rakes, vs, ws, store_dirs, batch_loading=batch_loading, npm=npm_geom, paths_disks=paths_disks) for i in range(len(grid_points)) ]) ray.shutdown() return waveforms_events
def load_data(data_dir, store_id, stations=None, pre=0.5, post=3, reference_event=None, min_len=420, error_t=None, lat=None, lon=None, depth=None, engine=None, ev_id=None): store = engine.get_store(store_id) mod = store.config.earthmodel_1d gf_freq = store.config.sample_rate cake_phase = cake.PhaseDef("P") phase_list = [cake_phase] events = [] waveforms = [] waveforms_shifted = [] if ev_id is None: pathlist = Path(data_dir).glob('ev_*/') else: pathlist = Path(data_dir).glob('ev_%s/' % ev_id) for path in sorted(pathlist): targets = [] path = str(path) + "/" event = model.load_events(path + "event.txt")[0] traces_loaded = io.load(path + "waveforms/rest/traces.mseed") stations_unsorted = model.load_stations(data_dir + "stations.pf") for st in stations_unsorted: st.dist = orthodrome.distance_accurate50m(st.lat, st.lon, event.lat, event.lon) st.azi = orthodrome.azimuth(st.lat, st.lon, event.lat, event.lon) stations = sorted(stations_unsorted, key=lambda x: x.dist, reverse=True) traces_processed = [] if lat is not None: event.lat = lat event.lon = lon event.depth = depth traces = check_traces(traces_loaded, stations, min_len=min_len, event=event) if error_t is not None: traces_shift = copy.deepcopy(traces) traces_processed, nsamples = process_loaded_waveforms( traces, stations, event, gf_freq, mod, pre, post) if error_t is not None: traces_processed_shifted = process_loaded_waveforms_shift( traces_shift, stations, event, gf_freq, mod, pre, post, shift_max=error_t) waveforms_shifted.append(traces_processed_shifted) events.append(event) waveforms.append(traces_processed) return waveforms, nsamples, events, waveforms_shifted
def calcTTTAdv_cube(Config, station, Origin, flag, arrayname, Xcorrshift, Refshift, phase, flag_rpe=False): cfg = ConfigObj(dict=Config) if flag_rpe is True: dimX = cfg.Int('dimx_emp') dimY = cfg.Int('dimy_emp') dimZ = cfg.Int('dimz_emp') else: dimX = cfg.Int('dimx') dimY = cfg.Int('dimy') dimZ = cfg.Int('dimz') orig_depth = float(Origin['depth']) start, stop, step = cfg.String('depths').split(',') start = orig_depth + float(start) stop = orig_depth + float(stop) depths = np.linspace(start, stop, num=dimZ) gridspacing = cfg.config_geometry.gridspacing traveltime_model = cfg_yaml.config.traveltime_model o_lat = float(Origin['lat']) o_lon = float(Origin['lon']) oLator = o_lat + dimX / 2 oLonor = o_lon + dimY / 2 oLatul = 0 oLonul = 0 o_dip = 80. plane = False TTTGridMap = {} LMINMAX = [] GridArray = {} locStation = Location(station.lat, station.lon) sdelta = loc2degrees(Location(o_lat, o_lon), locStation) Phase = cake.PhaseDef(phase) path = palantiri.__path__ model = cake.load_model(path[0] + '/data/' + traveltime_model) for depth in depths: o_depth = depth for i in xrange(dimX): oLatul = o_lat - ((dimX - 1) / 2) * gridspacing + i * gridspacing if i == 0: Latul = oLatul o = 0 for j in xrange(dimY): oLonul = o_lon - ( (dimY - 1) / 2) * gridspacing + j * gridspacing if o == 0 and j == 0: Lonul = oLonul de = loc2degrees(Location(oLatul, oLonul), locStation) arrivals = model.arrivals([de, de], phases=Phase, zstart=o_depth * km) try: ttime = arrivals[0].t except Exception: try: arrivals = model.arrivals([de, de], phases=Phase, zstart=o_depth * km, zstop=o_depth * km, refine=True) ttime = arrivals[0].t except Exception: arrivals = model.arrivals([de, de], phases=Phase, zstart=o_depth * km - 2.5, zstop=o_depth * km + 2.5, refine=True) ttime = arrivals[0].t GridArray[(i, j, depth)] = GridElem(oLatul, oLonul, o_depth, ttime, de) LMINMAX.append(ttime) if ttime == 0: raise Exception("\033[31mILLEGAL: phase definition\033[0m") mint = min(LMINMAX) maxt = max(LMINMAX) TTTGridMap[station.getName()] = TTTGrid(o_depth, mint, maxt, Latul, Lonul, oLator, oLonor, GridArray) k = MinTMaxT(mint, maxt) if flag_rpe is True: Basic.dumpToFile(str(flag) + '-ttt_emp.pkl', TTTGridMap) Basic.dumpToFile('minmax-emp' + str(flag) + '.pkl', k) Basic.dumpToFile('station-emp' + str(flag) + '.pkl', station) else: Basic.dumpToFile(str(flag) + '-ttt.pkl', TTTGridMap) Basic.dumpToFile('minmax-' + str(flag) + '.pkl', k) Basic.dumpToFile('station-' + str(flag) + '.pkl', station)
def call(self): self.cleanup() viewer = self.get_viewer() master = viewer.get_active_event() if master is None: self.fail('no master event selected') stations = list(viewer.stations.values()) stations.sort(key=lambda s: (s.network, s.station)) if not stations: self.fail('no station information available') # gather events to be processed events = [] for m in viewer.markers: if isinstance(m, EventMarker): if m.kind == 0: events.append(m.get_event()) events.sort(key=lambda ev: ev.time) event_to_number = {} for iev, ev in enumerate(events): event_to_number[ev] = iev if self.model_select.startswith('Global'): model_key = 'global' else: model_key = master.lat, master.lon if model_key != self.model_key: if self.model_select.startswith('Global'): self.model = cake.load_model() else: latlon = master.lat, master.lon profile = crust2x2.get_profile(*latlon) profile.set_layer_thickness(crust2x2.LWATER, 0.0) self.model = cake.LayeredModel.from_scanlines( cake.from_crust2x2_profile(profile)) self.model_key = model_key phases = { 'P': ([cake.PhaseDef(x) for x in 'P p'.split()], 'Z'), 'S': ([cake.PhaseDef(x) for x in 'S s'.split()], 'NE'), } phasenames = list(phases.keys()) phasenames.sort() # synthetic arrivals and ray geometry for master event master_depth = master.depth if self.master_depth_km is not None: master_depth = self.master_depth_km * km tt = {} g = {} for iphase, phasename in enumerate(phasenames): for istation, station in enumerate(stations): dist = orthodrome.distance_accurate50m(master, station) azi = orthodrome.azimuth(master, station) arrivals = self.model.arrivals(phases=phases[phasename][0], distances=[dist * cake.m2d], zstart=master_depth, zstop=0.0) if arrivals: first = arrivals[0] tt[station.network, station.station, phasename] = first.t takeoff = first.takeoff_angle() u = first.path.first_straight().u_in(first.endgaps) g[iphase, istation] = num.array([ math.cos(azi * d2r) * math.sin(takeoff * d2r) * u, math.sin(azi * d2r) * math.sin(takeoff * d2r) * u, math.cos(takeoff * d2r) * u ]) # gather picks for each event for ev in events: picks = {} for m2 in viewer.markers: if isinstance(m2, PhaseMarker) and m2.kind == 0: if m2.get_event() == ev: net, sta, _, _ = m2.one_nslc() picks[net, sta, m2.get_phasename()] = (m2.tmax + m2.tmin) / 2.0 ev.picks = picks # time corrections for extraction windows dataobs = [] datasyn = [] for phasename in phasenames: for station in stations: nsp = station.network, station.station, phasename datasyn.append(tt.get(nsp, None)) for ev in events: if nsp in ev.picks: ttobs = ev.picks[nsp] - ev.time else: ttobs = None dataobs.append(ttobs) ttsyn = num.array(datasyn, dtype=num.float).reshape( (len(phasenames), len(stations))) ttobs = num.array(dataobs, dtype=num.float).reshape( (len(phasenames), len(stations), len(events))) ttres = ttobs - ttsyn[:, :, num.newaxis] tt_corr_event = num.nansum( ttres, axis=1) / \ num.nansum( num.isfinite(ttres), axis=1 ) tt_corr_event = num.where(num.isfinite(tt_corr_event), tt_corr_event, 0.) ttres -= tt_corr_event[:, num.newaxis, :] tt_corr_station = num.nansum( ttres, axis=2) / \ num.nansum( num.isfinite(ttres), axis=2 ) tt_corr_station = num.where(num.isfinite(tt_corr_station), tt_corr_station, 0.) ttres -= tt_corr_station[:, :, num.newaxis] tevents_raw = num.array([ev.time for ev in events]) tevents_corr = tevents_raw + num.mean(tt_corr_event, axis=0) # print timing information print('timing stats') for iphasename, phasename in enumerate(phasenames): data = [] for ev in events: iev = event_to_number[ev] for istation, station in enumerate(stations): nsp = station.network, station.station, phasename if nsp in tt and nsp in ev.picks: tarr = ev.time + tt[nsp] tarr_ec = tarr + tt_corr_event[iphasename, iev] tarr_ec_sc = tarr_ec + tt_corr_station[iphasename, istation] tobs = ev.picks[nsp] data.append( (tobs - tarr, tobs - tarr_ec, tobs - tarr_ec_sc)) if data: data = num.array(data, dtype=num.float).T print('event %10s %3s %3i %15.2g %15.2g %15.2g' % ((ev.name, phasename, data.shape[1]) + tuple(num.mean(num.abs(x)) for x in data))) else: print('event %10s %3s no picks' % (ev.name, phasename)) # extract and preprocess waveforms tpad = 0.0 for f in self.corner_highpass, self.corner_lowpass: if f is not None: tpad = max(tpad, 1.0 / f) pile = self.get_pile() waveforms = {} for ev in events: iev = event_to_number[ev] markers = [] for iphasename, phasename in enumerate(phasenames): for istation, station in enumerate(stations): nsp = station.network, station.station, phasename if nsp in tt: tarr = ev.time + tt[nsp] nslcs = [(station.network, station.station, '*', '*')] marker = PhaseMarker(nslcs, tarr, tarr, 1, event=ev, phasename=phasename) markers.append(marker) tarr2 = tarr + tt_corr_station[iphasename, istation] + \ tt_corr_event[iphasename, iev] marker = PhaseMarker(nslcs, tarr2, tarr2, 2, event=ev, phasename=phasename) markers.append(marker) tmin = tarr2 + self.tstart tmax = tarr2 + self.tend marker = PhaseMarker(nslcs, tmin, tmax, 3, event=ev, phasename=phasename) markers.append(marker) trs = pile.all(tmin, tmax, tpad=tpad, trace_selector=lambda tr: tr.nslc_id[:2] == nsp[:2], want_incomplete=False) trok = [] for tr in trs: if num.all(tr.ydata[0] == tr.ydata): continue if self.corner_highpass: tr.highpass(4, self.corner_highpass) if self.corner_lowpass: tr.lowpass(4, self.corner_lowpass) tr.chop(tmin, tmax) tr.set_location(ev.name) #tr.shift( - (tmin - master.time) ) if num.all(num.isfinite(tr.ydata)): trok.append(tr) waveforms[nsp + (iev, )] = trok self.add_markers(markers) def get_channel(trs, cha): for tr in trs: if tr.channel == cha: return tr return None nevents = len(events) nstations = len(stations) nphases = len(phasenames) # correlate waveforms coefs = num.zeros((nphases, nstations, nevents, nevents)) coefs.fill(num.nan) tshifts = coefs.copy() tshifts_picked = coefs.copy() for iphase, phasename in enumerate(phasenames): for istation, station in enumerate(stations): nsp = station.network, station.station, phasename for a in events: ia = event_to_number[a] for b in events: ib = event_to_number[b] if ia == ib: continue if nsp in a.picks and nsp in b.picks: tshifts_picked[iphase,istation,ia,ib] = \ b.picks[nsp] - a.picks[nsp] wa = waveforms[nsp + (ia, )] wb = waveforms[nsp + (ib, )] channels = list(set([tr.channel for tr in wa + wb])) channels.sort() tccs = [] for cha in channels: if cha[-1] not in phases[phasename][1]: continue ta = get_channel(wa, cha) tb = get_channel(wb, cha) if ta is None or tb is None: continue tcc = trace.correlate(ta, tb, mode='full', normalization='normal', use_fft=True) tccs.append(tcc) if not tccs: continue tc = None for tcc in tccs: if tc is None: tc = tcc else: tc.add(tcc) tc.ydata *= 1. / len(tccs) tmid = tc.tmin * 0.5 + tc.tmax * 0.5 tlen = (tc.tmax - tc.tmin) * 0.5 tc_cut = tc.chop(tmid - tlen * 0.5, tmid + tlen * 0.5, inplace=False) tshift, coef = tc_cut.max() if (tshift < tc.tmin + 0.5 * tc.deltat or tc.tmax - 0.5 * tc.deltat < tshift): continue coefs[iphase, istation, ia, ib] = coef tshifts[iphase, istation, ia, ib] = tshift if self.show_correlation_traces: tc.shift(master.time - (tc.tmax + tc.tmin) / 2.) self.add_trace(tc) #tshifts = tshifts_picked coefssum_sta = num.nansum(coefs, axis=2) / num.sum(num.isfinite(coefs), axis=2) csum_sta = num.nansum(coefssum_sta, axis=2) / num.sum( num.isfinite(coefssum_sta), axis=2) for iphase, phasename in enumerate(phasenames): for istation, station in enumerate(stations): print('station %-5s %s %15.2g' % (station.station, phasename, csum_sta[iphase, istation])) coefssum = num.nansum(coefs, axis=1) / num.sum(num.isfinite(coefs), axis=1) csumevent = num.nansum(coefssum, axis=2) / num.sum( num.isfinite(coefssum), axis=2) above = num.where(num.isfinite(coefs), coefs >= self.min_corr, 0) csumabove = num.sum(num.sum(above, axis=1), axis=2) coefssum = num.ma.masked_invalid(coefssum) print('correlation stats') for iphase, phasename in enumerate(phasenames): for ievent, event in enumerate(events): print('event %10s %3s %8i %15.2g' % (event.name, phasename, csumabove[iphase, ievent], csumevent[iphase, ievent])) # plot event correlation matrix fframe = self.figure_frame() fig = fframe.gcf() for iphase, phasename in enumerate(phasenames): p = fig.add_subplot(1, nphases, iphase + 1) p.set_xlabel('Event number') p.set_ylabel('Event number') mesh = p.pcolormesh(coefssum[iphase]) cb = fig.colorbar(mesh, ax=p) cb.set_label('Max correlation coefficient') if self.save: fig.savefig(self.output_filename(dir='correlation.pdf')) fig.canvas.draw() # setup and solve linear system data = [] rows = [] weights = [] for iphase in range(nphases): for istation in range(nstations): for ia in range(nevents): for ib in range(ia + 1, nevents): k = iphase, istation, ia, ib w = coefs[k] if not num.isfinite(tshifts[k]) \ or not num.isfinite(w) or w < self.min_corr: continue row = num.zeros(nevents * 4) row[ia * 4:ia * 4 + 3] = g[iphase, istation] row[ia * 4 + 3] = -1.0 row[ib * 4:ib * 4 + 3] = -g[iphase, istation] row[ib * 4 + 3] = 1.0 weights.append(w) rows.append(row) data.append(tshifts[iphase, istation, ia, ib]) nsamp = len(data) for i in range(4): row = num.zeros(nevents * 4) row[i::4] = 1. rows.append(row) data.append(0.0) if self.fix_depth: for ievent in range(nevents): row = num.zeros(nevents * 4) row[ievent * 4 + 2] = 1.0 rows.append(row) data.append(0.0) a = num.array(rows, dtype=num.float) d = num.array(data, dtype=num.float) w = num.array(weights, dtype=num.float) if self.weighting == 'equal': w[:nsamp] = 1.0 elif self.weighting == 'linear': pass elif self.weighting == 'quadratic': w[:nsamp] = w[:nsamp]**2 a[:nsamp, :] *= w[:, num.newaxis] d[:nsamp] *= w[:nsamp] x, residuals, rank, singular = num.linalg.lstsq(a, d) x0 = num.zeros(nevents * 4) x0[3::4] = tevents_corr mean_abs_residual0 = num.mean( num.abs((num.dot(a[:nsamp], x0) - d[:nsamp]) / w[:nsamp])) mean_abs_residual = num.mean( num.abs((num.dot(a[:nsamp], x) - d[:nsamp]) / w[:nsamp])) print(mean_abs_residual0, mean_abs_residual) # distorted solutions npermutations = 100 noiseamount = mean_abs_residual xdistorteds = [] for i in range(npermutations): dnoisy = d.copy() dnoisy[:nsamp] += num.random.normal( size=nsamp) * noiseamount * w[:nsamp] xdistorted, residuals, rank, singular = num.linalg.lstsq(a, dnoisy) xdistorteds.append(xdistorted) mean_abs_residual = num.mean( num.abs(num.dot(a, xdistorted)[:nsamp] - dnoisy[:nsamp])) tmean = num.mean([e.time for e in events]) north = x[0::4] east = x[1::4] down = x[2::4] etime = x[3::4] + tmean def plot_range(x): mi, ma = num.percentile(x, [10., 90.]) ext = (ma - mi) / 5. mi -= ext ma += ext return mi, ma lat, lon = orthodrome.ne_to_latlon(master.lat, master.lon, north, east) events_out = [] for ievent, event in enumerate(events): event_out = model.Event(time=etime[ievent], lat=lat[ievent], lon=lon[ievent], depth=down[ievent] + master_depth, name=event.name) mark = EventMarker(event_out, kind=4) self.add_marker(mark) events_out.append(event_out) model.Event.dump_catalog(events_out, 'events.relocated.txt') # plot results ned_orig = [] for event in events: n, e = orthodrome.latlon_to_ne(master, event) d = event.depth ned_orig.append((n, e, d)) ned_orig = num.array(ned_orig) ned_orig[:, 0] -= num.mean(ned_orig[:, 0]) ned_orig[:, 1] -= num.mean(ned_orig[:, 1]) ned_orig[:, 2] -= num.mean(ned_orig[:, 2]) north0, east0, down0 = ned_orig.T north2, east2, down2, time2 = num.hstack(xdistorteds).reshape( (-1, 4)).T fframe = self.figure_frame() fig = fframe.gcf() color_sym = (0.1, 0.1, 0.0) color_scat = (0.3, 0.5, 1.0, 0.2) d = u'\u0394 ' if not self.fix_depth: p = fig.add_subplot(2, 2, 1, aspect=1.0) else: p = fig.add_subplot(1, 1, 1, aspect=1.0) mi_north, ma_north = plot_range(north) mi_east, ma_east = plot_range(east) mi_down, ma_down = plot_range(down) p.set_xlabel(d + 'East [km]') p.set_ylabel(d + 'North [km]') p.plot(east2 / km, north2 / km, '.', color=color_scat, markersize=2) p.plot(east / km, north / km, '+', color=color_sym) p.plot(east0 / km, north0 / km, 'x', color=color_sym) p0 = p for i, ev in enumerate(events): p.text(east[i] / km, north[i] / km, ev.name, clip_on=True) if not self.fix_depth: p = fig.add_subplot(2, 2, 2, sharey=p0, aspect=1.0) p.set_xlabel(d + 'Depth [km]') p.set_ylabel(d + 'North [km]') p.plot(down2 / km, north2 / km, '.', color=color_scat, markersize=2) p.plot(down / km, north / km, '+', color=color_sym) for i, ev in enumerate(events): p.text(down[i] / km, north[i] / km, ev.name, clip_on=True) p1 = p p = fig.add_subplot(2, 2, 3, sharex=p0, aspect=1.0) p.set_xlabel(d + 'East [km]') p.set_ylabel(d + 'Depth [km]') p.plot(east2 / km, down2 / km, '.', color=color_scat, markersize=2) p.plot(east / km, down / km, '+', color=color_sym) for i, ev in enumerate(events): p.text(east[i] / km, down[i] / km, ev.name, clip_on=True) p.invert_yaxis() p2 = p p0.set_xlim(mi_east / km, ma_east / km) p0.set_ylim(mi_north / km, ma_north / km) if not self.fix_depth: p1.set_xlim(mi_down / km, ma_down / km) p2.set_ylim(mi_down / km, ma_down / km) if self.save: fig.savefig(self.output_filename(dir='locations.pdf')) fig.canvas.draw()
def plot(settings, show=False): # align_phase = 'P(cmb)P<(icb)(cmb)p' with_onset_line = False fill = True align_phase = "P" zoom_window = list(settings.zoom) ampl_scaler = "4*standard deviation" quantity = settings.quantity zstart, zstop, inkr = settings.depths.split(":") test_depths = num.arange( float(zstart) * km, float(zstop) * km, float(inkr) * km) try: traces = io.load(settings.trace_filename) except FileLoadError as e: logger.info(e) return event = model.load_events(settings.event_filename) assert len(event) == 1 event = event[0] event.depth = float(settings.depth) * 1000.0 base_source = MTSource.from_pyrocko_event(event) test_sources = [] for d in test_depths: s = base_source.clone() s.depth = float(d) test_sources.append(s) if settings.store_superdirs: engine = LocalEngine(store_superdirs=settings.store_superdirs) else: engine = LocalEngine(use_config=True) try: store = engine.get_store(settings.store_id) except seismosizer.NoSuchStore as e: logger.info("%s ... skipping." % e) return stations = model.load_stations(settings.station_filename) station = list( filter(lambda s: match_nslc("%s.%s.%s.*" % s.nsl(), traces[0].nslc_id), stations)) assert len(station) == 1 station = station[0] targets = [ station_to_target(station, quantity=quantity, store_id=settings.store_id) ] try: request = engine.process(targets=targets, sources=test_sources) except seismosizer.NoSuchStore as e: logger.info("%s ... skipping." % e) return except meta.OutOfBounds as error: if settings.force_nearest_neighbor: logger.warning("%s Using nearest neighbor instead." % error) mod_targets = [] for t in targets: closest_source = min(test_sources, key=lambda s: s.distance_to(t)) farthest_source = max(test_sources, key=lambda s: s.distance_to(t)) min_dist_delta = store.config.distance_min - closest_source.distance_to( t) max_dist_delta = (store.config.distance_max - farthest_source.distance_to(t)) if min_dist_delta < 0: azi, bazi = closest_source.azibazi_to(t) newlat, newlon = ortho.azidist_to_latlon( t.lat, t.lon, azi, min_dist_delta * cake.m2d) elif max_dist_delta < 0: azi, bazi = farthest_source.azibazi_to(t) newlat, newlon = ortho.azidist_to_latlon( t.lat, t.lon, azi, max_dist_delta * cake.m2d) t.lat, t.lon = newlat, newlon mod_targets.append(t) request = engine.process(targets=mod_targets, sources=test_sources) else: logger.error("%s: %s" % (error, ".".join(station.nsl()))) return alldepths = list(test_depths) fig = plt.figure() ax = fig.add_subplot(111) maxz = max(test_depths) minz = min(test_depths) relative_scale = (maxz - minz) * 0.02 for s, t, tr in request.iter_results(): if quantity == "velocity": tr = integrate_differentiate(tr, "differentiate") onset = engine.get_store(t.store_id).t("begin", (s.depth, s.distance_to(t))) tr = settings.do_filter(tr) if settings.normalize: tr.set_ydata(tr.get_ydata() / num.max(abs(tr.get_ydata()))) ax.tick_params(axis="y", which="both", left="off", right="off", labelleft="off") y_pos = s.depth xdata = tr.get_xdata() - onset - s.time tr_ydata = tr.get_ydata() * -1 visible = tr.chop( tmin=event.time + onset + zoom_window[0], tmax=event.time + onset + zoom_window[1], ) if ampl_scaler == "trace min/max": ampl_scale = float(max(abs(visible.get_ydata()))) elif ampl_scaler == "4*standard deviation": ampl_scale = 4 * float(num.std(visible.get_ydata())) else: ampl_scale = 1.0 ampl_scale /= settings.gain ydata = (tr_ydata / ampl_scale) * relative_scale + y_pos ax.plot(xdata, ydata, c="black", linewidth=1.0, alpha=1.0) if False: ax.fill_between(xdata, y_pos, ydata, where=ydata < y_pos, color="black", alpha=0.5) ax.text( zoom_window[0] * 1.09, y_pos, "%1.1f" % (s.depth / 1000.0), horizontalalignment="right", ) # , fontsize=12.) if False: mod = store.config.earthmodel_1d label = "pP" arrivals = mod.arrivals( phases=[cake.PhaseDef(label)], distances=[s.distance_to(t) * cake.m2d], zstart=s.depth, ) try: t = arrivals[0].t ydata_absmax = num.max(num.abs(tr.get_ydata())) marker_length = 0.5 x_marker = [t - onset] * 2 y = [ y_pos - (maxz - minz) * 0.025, y_pos + (maxz - minz) * 0.025 ] ax.plot(x_marker, y, linewidth=1, c="blue") ax.text( x_marker[1] - x_marker[1] * 0.005, y[1], label, # fontsize=12, color="black", verticalalignment="top", horizontalalignment="right", ) except IndexError: logger.warning( "no pP phase at d=%s z=%s stat=%s" % (s.distance_to(t) * cake.m2d, s.depth, station.station)) pass if len(traces) == 0: raise Exception("No Trace found!") if len(traces) > 1: raise Exception("More then one trace provided!") else: tr = traces[0] correction = float(settings.correction) if quantity == "displacement": tr = integrate_differentiate(tr, "integrate") tr = settings.do_filter(tr) onset = (engine.get_store(targets[0].store_id).t( "begin", (event.depth, s.distance_to(targets[0]))) + event.time) if settings.normalize: tr.set_ydata(tr.get_ydata() / max(abs(tr.get_ydata()))) ax.tick_params(axis="y", which="both", left="off", right="off", labelleft="off") y_pos = event.depth xdata = tr.get_xdata() - onset + correction tr_ydata = tr.get_ydata() * -1 visible = tr.chop( tmin=onset + zoom_window[0] + correction, tmax=onset + zoom_window[1] + correction, ) if ampl_scaler == "trace min/max": ampl_scale = float(max(abs(visible.get_ydata()))) elif ampl_scaler == "4*standard deviation": ampl_scale = 4 * float(num.std(visible.get_ydata())) else: ampl_scale = 1.0 ydata = (tr_ydata / ampl_scale * settings.gain * settings.gain_record) * relative_scale + y_pos ax.plot(xdata, ydata, c=settings.color, linewidth=1.0) ax.set_xlim(zoom_window) zmax = max(test_depths) zmin = min(test_depths) zrange = zmax - zmin ax.set_ylim((zmin - zrange * 0.2, zmax + zrange * 0.2)) ax.set_xlabel("Time [s]") ax.text( 0.0, 0.6, "Source depth [km]", rotation=90, horizontalalignment="left", transform=fig.transFigure, ) # , fontsize=12.) if fill: ax.fill_between(xdata, y_pos, ydata, where=ydata < y_pos, color=settings.color, alpha=0.5) if with_onset_line: ax.text(0.08, zmax + zrange * 0.1, align_phase, fontsize=14) vline = ax.axvline(0.0, c="black") vline.set_linestyle("--") if settings.title: params = { "array-id": "".join(station.nsl()), "event_name": event.name, "event_time": time_to_str(event.time), } ax.text( 0.5, 1.05, settings.title % params, horizontalalignment="center", transform=ax.transAxes, ) if settings.auto_caption: cax = fig.add_axes([0.0, 0.0, 1, 0.05], label="caption") cax.axis("off") cax.xaxis.set_visible(False) cax.yaxis.set_visible(False) if settings.quantity == "displacement": quantity_info = "integrated velocity trace. " if settings.quantity == "velocity": quantity_info = "differentiated synthetic traces. " if settings.quantity == "restituted": quantity_info = "restituted traces. " captions = {"filters": ""} for f in settings.filters: captions["filters"] += "%s-pass, order %s, f$_c$=%s Hz. " % ( f.type, f.order, f.corner, ) captions["quantity_info"] = quantity_info captions["store_sampling"] = 1.0 / store.config.deltat cax.text( 0, 0, "Filters: %(filters)s f$_{GF}$=%(store_sampling)s Hz.\n%(quantity_info)s" % captions, fontsize=12, transform=cax.transAxes, ) plt.subplots_adjust(hspace=0.4, bottom=0.15) else: plt.subplots_adjust(bottom=0.1) ax.invert_yaxis() if settings.save_as: logger.info("save as: %s " % settings.save_as) options = settings.__dict__ options.update({"array-id": "".join(station.nsl())}) fig.savefig(settings.save_as % options, dpi=160, bbox_inches="tight") if show: plt.show()
def refTrigger(self, RefWaveform): Config = self.Config cfg = ConfigObj(dict=Config) name = ('%s.%s.%s.%s') % ( RefWaveform[0].stats.network, RefWaveform[0].stats.station, RefWaveform[0].stats.location, RefWaveform[0].stats.channel) i = self.searchMeta(name, self.StationMeta) de = loc2degrees(self.Origin, i) ptime = 0 Phase = cake.PhaseDef('P') model = cake.load_model() if cfg.colesseo_input() == True: arrivals = model.arrivals([de, de], phases=Phase, zstart=self.Origin.depth, zstop=0.) else: arrivals = model.arrivals([de, de], phases=Phase, zstart=self.Origin.depth * km, zstop=0.) try: ptime = arrivals[0].t except: arrivals = model.arrivals([de, de], phases=Phase, zstart=o_depth * km - 0.1) ptime = arrivals[0].t phasename = ('%sphase') % (os.path.basename(self.AF)) if ptime == 0: print '\033[31mAvailable phases for reference station %s in range %f deegree\033[0m' % ( i, de) print '\033[31m' + '|'.join( [str(item['phase_name']) for item in tt]) + '\033[0m' print '\033[31myou tried phase %s\033[0m' % ( self.Config[phasename]) raise Exception("\033[31mILLEGAL: phase definition\033[0m") tw = self.calculateTimeWindows(ptime) if cfg.pyrocko_download() == True: stP = self.readWaveformsPicker_pyrocko(i, tw, self.Origin, ptime) elif cfg.colesseo_input() == True: stP = self.readWaveformsPicker_colos(i, tw, self.Origin, ptime) else: stP = self.readWaveformsPicker(i, tw, self.Origin, ptime) refuntouchname = os.path.basename(self.AF) + '-refstation-raw.mseed' stP.write(os.path.join(self.EventPath, refuntouchname), format='MSEED', byteorder='>') stP.filter("bandpass", freqmin=float(self.Config['refstationfreqmin']), freqmax=float(self.Config['refstationfreqmax'])) stP.trim(tw['xcorrstart'], tw['xcorrend']) trP = stP[0] trP.stats.starttime = UTCDateTime(3600) refname = os.path.basename(self.AF) + '-refstation-filtered.mseed' trP.write(os.path.join(self.EventPath, refname), format='MSEED', byteorder='>') sta = float(self.Config['refsta']) lta = float(self.Config['reflta']) cft = recSTALTA(trP.data, int(sta * trP.stats.sampling_rate), int(lta * trP.stats.sampling_rate)) t = triggerOnset(cft, lta, sta) try: onset = t[0][0] / trP.stats.sampling_rate print 'ONSET ', onset except: onset = self.mintforerun trigger = trP.stats.starttime + onset print 'TRIGGER ', trigger print 'THEORETICAL: ', UTCDateTime(3600) + self.mintforerun tdiff = (trP.stats.starttime + onset) - (UTCDateTime(3600) + self.mintforerun) print 'TDIFF: ', tdiff refp = UTCDateTime(self.Origin.time) + ptime reftriggeronset = refp + onset - self.mintforerun if int(self.Config['autoxcorrcorrectur']) == 1: try: refmarkername = os.path.join(self.EventPath, ('%s-marker') % (os.path.basename(self.AF))) fobjrefmarkername = open(refmarkername, 'w') fobjrefmarkername.write( '# Snuffler Markers File Version 0.2\n') fobjrefmarkername.write(( 'phase: %s 0 %s None None None XWStart None False\n' ) % (tw['xcorrstart'].strftime('%Y-%m-%d %H:%M:%S.%f'), name)) fobjrefmarkername.write(( 'phase: %s 0 %s None None None XWEnd None False\n' ) % (tw['xcorrend'].strftime('%Y-%m-%d %H:%M:%S.%f'), name)) fobjrefmarkername.write(( 'phase: %s 1 %s None None None TheoP None False\n' ) % (refp.strftime('%Y-%m-%d %H:%M:%S.%f'), name)) fobjrefmarkername.write(( 'phase: %s 3 %s None None None XTrig None False' ) % (reftriggeronset.strftime('%Y-%m-%d %H:%M:%S.%f'), name)) fobjrefmarkername.close() cmd = 'snuffler %s --markers=%s&' % (os.path.join( self.EventPath, refuntouchname), refmarkername) os.system(cmd) thrOn = float(self.Config['reflta']) # 4 thrOff = float(self.Config['refsta']) # 0.7 plotTrigger(trP, cft, thrOn, thrOff) selection = float( raw_input('Enter self picked phase in seconds: ')) tdiff = selection - self.mintforerun refname = os.path.basename(self.AF) + '-shift.mseed' trP.stats.starttime = trP.stats.starttime - selection trP.write(os.path.join(self.EventPath, refname), format='MSEED') except: selection = 0. refname = os.path.basename(self.AF) + '-shift.mseed' trP.stats.starttime = trP.stats.starttime - selection - self.mintforerun trP.write(os.path.join(self.EventPath, refname), format='MSEED') ''' tdiff = 0 trigger = trP.stats.starttime ''' To = Trigger(name, trigger, os.path.basename(self.AF), tdiff) return tdiff, To
def traveltimes(self): Logfile.red('Enter AUTOMATIC CROSSCORRELATION ') Logfile.red( '\n\n+++++++++++++++++++++++++++++++++++++++++++++++++++\n ') T = [] Wdict = {} SNR = {} Config = self.Config cfg = ConfigObj(dict=Config) for i in self.StationMeta: Logfile.red('read in %s ' % (i)) de = loc2degrees(self.Origin, i) Phase = cake.PhaseDef('P') model = cake.load_model() if cfg.colesseo_input() == True: arrivals = model.arrivals([de, de], phases=Phase, zstart=self.Origin.depth, zstop=0.) else: arrivals = model.arrivals([de, de], phases=Phase, zstart=self.Origin.depth * km, zstop=0.) try: ptime = arrivals[0].t except: try: arrivals = model.arrivals([de, de], phases=Phase, zstart=self.Origin.depth * km - 2.1) ptime = arrivals[0].t except: ptime = ptime T.append(ptime) if ptime == 0: Logfile.red( 'Available phases for station %s in range %f deegree' % (i, de)) Logfile.red('you tried phase %s' % (self.Config[phasename])) raise Exception("ILLEGAL: phase definition") tw = self.calculateTimeWindows(ptime) # try: if cfg.pyrocko_download() == True: w, snr = self.readWaveformsCross_pyrocko(i, tw, ptime) elif cfg.colesseo_input() == True: w, snr = self.readWaveformsCross_colesseo(i, tw, ptime) else: w, snr = self.readWaveformsCross(i, tw, ptime) Wdict[i.getName()] = w SNR[i.getName()] = snr # except: # pass Logfile.red( '\n\n+++++++++++++++++++++++++++++++++++++++++++++++++++ ') Logfile.red('Exit AUTOMATIC FILTER ') return Wdict, SNR
def optparse(required=(), optional=(), args=sys.argv, usage='%prog [options]', descr=None): want = required + optional parser = OptionParser(prog='cake', usage=usage, description=descr.capitalize() + '.', add_help_option=False, formatter=util.BetterHelpFormatter()) parser.add_option('-h', '--help', action='help', help='Show help message and exit.') if 'phases' in want: group = OptionGroup( parser, 'Phases', ''' Seismic phase arrivals may be either specified as traditional phase names (e.g. P, S, PP, PcP, ...) or in Cake's own syntax which is more powerful. Use the --classic option, for traditional phase names. Use the --phase option if you want to define phases in Cake's syntax. ''') group.add_option( '--phase', '--phases', dest='phases', action="append", default=[], metavar='PHASE1,PHASE2,...', help='''Comma separated list of seismic phases in Cake\'s syntax. The definition of a seismic propagation path in Cake's phase syntax is a string consisting of an alternating sequence of "legs" and "knees". A "leg" represents seismic wave propagation without any conversions, encountering only super-critical reflections. Legs are denoted by "P", "p", "S", or "s". The capital letters are used when the take-off of the "leg" is in downward direction, while the lower case letters indicate a take-off in upward direction. A "knee" is an interaction with an interface. It can be a mode conversion, a reflection, or propagation as a headwave or diffracted wave. * conversion is simply denoted as: "(INTERFACE)" or "DEPTH" * upperside reflection: "v(INTERFACE)" or "vDEPTH" * underside reflection: "^(INTERFACE)" or "^DEPTH" * normal kind headwave or diffracted wave: "v_(INTERFACE)" or "v_DEPTH" The interface may be given by name or by depth: INTERFACE is the name of an interface defined in the model, DEPTH is the depth of an interface in [km] (the interface closest to that depth is chosen). If two legs appear consecutively without an explicit "knee", surface interaction is assumed. The preferred standard interface names in cake are "conrad", "moho", "cmb" (core-mantle boundary), and "cb" (inner core boundary). The phase definition may end with a backslash "\\", to indicate that the ray should arrive at the receiver from above instead of from below. It is possible to restrict the maximum and minimum depth of a "leg" by appending "<(INTERFACE)" or "<DEPTH" or ">(INTERFACE)" or ">DEPTH" after the leg character, respectively. When plotting rays or travel-time curves, the color can be set by appending "{COLOR}" to the phase definition, where COLOR is the name of a color or an RGB or RGBA color tuple in the format "R/G/B" or "R/G/B/A", respectively. The values can be normalized to the range [0, 1] or to [0, 255]. The latter is only assumed when any of the values given exceeds 1.0. ''') group.add_option( '--classic', dest='classic_phases', action='append', default=[], metavar='PHASE1,PHASE2,...', help='''Comma separated list of seismic phases in classic nomenclature. Run "cake list-phase-map" for a list of available phase names. When plotting, color can be specified in the same way as in --phases.''') parser.add_option_group(group) if 'model' in want: group = OptionGroup(parser, 'Model') group.add_option( '--model', dest='model_filename', metavar='(NAME or FILENAME)', help='Use builtin model named NAME or user model from file ' 'FILENAME. By default, the "ak135-f-continental.m" model is ' 'used. Run "cake list-models" for a list of builtin models.') group.add_option( '--format', dest='model_format', metavar='FORMAT', choices=['nd', 'hyposat'], default='nd', help='Set model file format (available: nd, hyposat; default: ' 'nd).') group.add_option( '--crust2loc', dest='crust2loc', metavar='LAT,LON', help='Set model from CRUST2.0 profile at location (LAT,LON).') group.add_option( '--crust2profile', dest='crust2profile', metavar='KEY', help='Set model from CRUST2.0 profile with given KEY.') parser.add_option_group(group) if any(x in want for x in ('zstart', 'zstop', 'distances', 'sloc', 'rloc')): group = OptionGroup(parser, 'Source-receiver geometry') if 'zstart' in want: group.add_option('--sdepth', dest='sdepth', type='float', default=0.0, metavar='FLOAT', help='Source depth [km] (default: 0)') if 'zstop' in want: group.add_option('--rdepth', dest='rdepth', type='float', default=0.0, metavar='FLOAT', help='Receiver depth [km] (default: 0)') if 'distances' in want: group.add_option('--distances', dest='sdist', metavar='DISTANCES', help='Surface distances as "start:stop:n" or ' '"dist1,dist2,..." [km]') group.add_option('--sloc', dest='sloc', metavar='LAT,LON', help='Source location (LAT,LON).') group.add_option('--rloc', dest='rloc', metavar='LAT,LON', help='Receiver location (LAT,LON).') parser.add_option_group(group) if 'material' in want: group = OptionGroup( parser, 'Material', 'An isotropic elastic material may be specified by giving ' 'a combination of some of the following options. ') group.add_option('--vp', dest='vp', default=None, type='float', metavar='FLOAT', help='P-wave velocity [km/s]') group.add_option('--vs', dest='vs', default=None, type='float', metavar='FLOAT', help='S-wave velocity [km/s]') group.add_option('--rho', dest='rho', default=None, type='float', metavar='FLOAT', help='density [g/cm**3]') group.add_option('--qp', dest='qp', default=None, type='float', metavar='FLOAT', help='P-wave attenuation Qp (default: 1456)') group.add_option('--qs', dest='qs', default=None, type='float', metavar='FLOAT', help='S-wave attenuation Qs (default: 600)') group.add_option('--poisson', dest='poisson', default=None, type='float', metavar='FLOAT', help='Poisson ratio') group.add_option('--lambda', dest='lame_lambda', default=None, type='float', metavar='FLOAT', help='Lame parameter lambda [GPa]') group.add_option('--mu', dest='lame_mu', default=None, type='float', metavar='FLOAT', help='Shear modulus [GPa]') group.add_option('--qk', dest='qk', default=None, type='float', metavar='FLOAT', help='Bulk attenuation Qk') group.add_option('--qmu', dest='qmu', default=None, type='float', metavar='FLOAT', help='Shear attenuation Qmu') parser.add_option_group(group) if any(x in want for x in ('vred', 'as_degrees', 'accuracy', 'slowness', 'interface', 'aspect', 'shade_model')): group = OptionGroup(parser, 'General') if 'vred' in want: group.add_option('--vred', dest='vred', type='float', metavar='FLOAT', help='Velocity for time reduction in plot [km/s]') if 'as_degrees' in want: group.add_option( '--degrees', dest='as_degrees', action='store_true', default=False, help='Distances are in [deg] instead of [km], velocities in ' '[deg/s] instead of [km/s], slownesses in [s/deg] ' 'instead of [s/km].') if 'accuracy' in want: group.add_option('--accuracy', dest='accuracy', type='float', metavar='MAXIMUM_RELATIVE_RMS', default=0.002, help='Set accuracy for model simplification.') if 'slowness' in want: group.add_option( '--slowness', dest='slowness', type='float', metavar='FLOAT', default=0.0, help='Select surface slowness [s/km] (default: 0)') if 'interface' in want: group.add_option('--interface', dest='interface', metavar='(NAME or DEPTH)', help='Name or depth [km] of interface to select') if 'aspect' in want: group.add_option('--aspect', dest='aspect', type='float', metavar='FLOAT', help='Aspect ratio for plot') if 'shade_model' in want: group.add_option('--no-shade-model', dest='shade_model', action='store_false', default=True, help='Suppress shading of earth model layers') parser.add_option_group(group) if any(x in want for x in ('output_format', )): group = OptionGroup(parser, 'Output') if 'output_format' in want: group.add_option( '--output-format', dest='output_format', metavar='FORMAT', default='textual', choices=('textual', 'nd'), help='Set model output format (available: textual, nd, ' 'default: textual)') parser.add_option_group(group) if usage == 'cake help-options': parser.print_help() (options, args) = parser.parse_args(args) if len(args) != 2: parser.error( 'Cake arguments should look like "--option" or "--option=...".') d = {} as_degrees = False if 'as_degrees' in want: as_degrees = options.as_degrees d['as_degrees'] = as_degrees if 'accuracy' in want: d['accuracy'] = options.accuracy if 'output_format' in want: d['output_format'] = options.output_format if 'aspect' in want: d['aspect'] = options.aspect if 'shade_model' in want: d['shade_model'] = options.shade_model if 'phases' in want: phases = [] phase_colors = {} try: for ss in options.phases: for s in ss.split(','): s = process_color(s, phase_colors) phases.append(cake.PhaseDef(s)) for pp in options.classic_phases: for p in pp.split(','): p = process_color(p, phase_colors) phases.extend(cake.PhaseDef.classic(p)) except (cake.PhaseDefParseError, cake.UnknownClassicPhase) as e: parser.error(e) if not phases and 'phases' in required: s = process_color('P', phase_colors) phases.append(cake.PhaseDef(s)) if phases: d['phase_colors'] = phase_colors d['phases'] = phases if 'model' in want: if options.model_filename: d['model'] = cake.load_model(options.model_filename, options.model_format) if options.crust2loc or options.crust2profile: if options.crust2loc: try: args = tuple( [float(x) for x in options.crust2loc.split(',')]) except Exception: parser.error('format for --crust2loc option is ' '"LATITUDE,LONGITUDE"') elif options.crust2profile: args = (options.crust2profile.upper(), ) else: assert False if 'model' in d: d['model'] = d['model'].replaced_crust(args) else: from pyrocko import crust2x2 profile = crust2x2.get_profile(*args) d['model'] = cake.LayeredModel.from_scanlines( cake.from_crust2x2_profile(profile)) if 'vred' in want: d['vred'] = options.vred if d['vred'] is not None: if not as_degrees: d['vred'] *= r2d * cake.km / cake.earthradius if 'distances' in want: distances = None if options.sdist: if options.sdist.find(':') != -1: ssn = options.sdist.split(':') if len(ssn) != 3: parser.error('format for distances is ' '"min_distance:max_distance:n_distances"') distances = num.linspace(*map(float, ssn)) else: distances = num.array(list(map(float, options.sdist.split(','))), dtype=num.float) if not as_degrees: distances *= r2d * cake.km / cake.earthradius if options.sloc and options.rloc: try: slat, slon = tuple([float(x) for x in options.sloc.split(',')]) rlat, rlon = tuple([float(x) for x in options.rloc.split(',')]) except Exception: parser.error('format for --sloc and --rloc options is ' '"LATITUDE,LONGITUDE"') distance_sr = orthodrome.distance_accurate50m_numpy( slat, slon, rlat, rlon) distance_sr *= r2d / cake.earthradius if distances is not None: distances = num.concatenate((distances, [distance_sr])) else: distances = num.array([distance_sr], dtype=num.float) if distances is not None: d['distances'] = distances else: if 'distances' not in required: d['distances'] = None if 'slowness' in want: d['slowness'] = options.slowness / cake.d2r if not as_degrees: d['slowness'] /= cake.km * cake.m2d if 'interface' in want: if options.interface: try: d['interface'] = float(options.interface) * cake.km except ValueError: d['interface'] = options.interface else: d['interface'] = None if 'zstart' in want: d['zstart'] = options.sdepth * cake.km if 'zstop' in want: d['zstop'] = options.rdepth * cake.km if 'material' in want: md = {} userfactor = dict(vp=1000., vs=1000., rho=1000., qp=1., qs=1., qmu=1., qk=1., lame_lambda=1.0e9, lame_mu=1.0e9, poisson=1.) for k in userfactor.keys(): if getattr(options, k) is not None: md[k] = getattr(options, k) * userfactor[k] if not (bool('lame_lambda' in md) == bool('lame_mu' in md)): parser.error('lambda and mu must be specified both.') if 'lame_lambda' in md and 'lame_mu' in md: md['lame'] = md.pop('lame_lambda'), md.pop('lame_mu') if md: try: d['material'] = cake.Material(**md) except cake.InvalidArguments as e: parser.error(str(e)) for k in list(d.keys()): if k not in want: del d[k] for k in required: if k not in d: if k == 'model': d['model'] = cake.load_model('ak135-f-continental.m') elif k == 'distances': d['distances'] = num.linspace(10*cake.km, 100*cake.km, 10) \ / cake.earthradius * r2d elif k == 'phases': d['phases'] = list(map(cake.PhaseDef, 'Pp')) else: parser.error('missing %s' % k) return Anon(d)
axes.set_xlim(-1.1, 1.1) axes.set_ylim(-1.1, 1.1) projection = 'lambert' beachball.plot_beachball_mpl(mt, axes, position=(0., 0.), size=2.0, color_t=(0.3, 0.3, 0.8), projection=projection, size_units='data') for rlat, rlon in rlatlons: distance = orthodrome.distance_accurate50m(slat, slon, rlat, rlon) rays = mod.arrivals(phases=cake.PhaseDef('P'), zstart=sdepth, zstop=rdepth, distances=[distance * cake.m2d]) if not rays: continue takeoff = rays[0].takeoff_angle() azi = orthodrome.azimuth(slat, slon, rlat, rlon) # to spherical coordinates, r, theta, phi in radians rtp = num.array([[1., num.deg2rad(takeoff), num.deg2rad(90. - azi)]]) # to 3D coordinates (x, y, z) points = beachball.numpy_rtp2xyz(rtp)
def refTrigger(self, RefWaveform, phase, cfg_yaml): Config = self.Config cfg = ConfigObj(dict=Config) name = ('%s.%s.%s.%s') % (RefWaveform[0].stats.network, RefWaveform[0].stats.station, RefWaveform[0].stats.location, RefWaveform[0].stats.channel) i = self.searchMeta(name, self.StationMeta) de = loc2degrees(self.Origin, i) ptime = 0 Phase = cake.PhaseDef(phase) model = cake.load_model() if cfg_yaml.config_data.colesseo_input is True: arrivals = model.arrivals([de, de], phases=Phase, zstart=self.Origin.depth, zstop=0.) else: arrivals = model.arrivals([de, de], phases=Phase, zstart=self.Origin.depth*km, zstop=0.) try: ptime = arrivals[0].t except Exception: arrivals = model.arrivals([de, de], phases=Phase, zstart=self.Origin.depth*km-0.1) ptime = arrivals[0].t if ptime == 0: raise Exception("\033[31mILLEGAL: phase definition\033[0m") tw = self.calculateTimeWindows(ptime) if cfg_yaml.config_data.pyrocko_download is True: stP = self.readWaveformsPicker_pyrocko(i, tw, self.Origin, ptime, cfg_yaml) elif cfg_yaml.config_data.colesseo_input is True: stP = self.readWaveformsPicker_colos(i, tw, self.Origin, ptime, cfg_yaml) else: stP = self.readWaveformsPicker(i, tw, self.Origin, ptime, cfg_yaml) refuntouchname = os.path.basename(self.AF)+'-refstation-raw.mseed' stP.write(os.path.join(self.EventPath, refuntouchname), format='MSEED', byteorder='>') stP.filter("bandpass", freqmin=float(cfg_yaml.config_xcorr.refstationfreqmin), freqmax=float(cfg_yaml.config_xcorr.refstationfreqmax)) stP.trim(tw['xcorrstart'], tw['xcorrend']) trP = stP[0] trP.stats.starttime = UTCDateTime(3600) refname = os.path.basename(self.AF)+'-refstation-filtered.mseed' trP.write(os.path.join(self.EventPath, refname), format='MSEED', byteorder='>') sta = float(cfg_yaml.config_xcorr.refsta) lta = float(cfg_yaml.config_xcorr.reflta) cft = recSTALTA(trP.data, int(sta * trP.stats.sampling_rate), int(lta * trP.stats.sampling_rate)) t = triggerOnset(cft, lta, sta) try: onset = t[0][0] / trP.stats.sampling_rate except Exception: onset = self.mintforerun trigger = trP.stats.starttime+onset tdiff = (trP.stats.starttime + onset)-(UTCDateTime(3600) + self.mintforerun) refp = UTCDateTime(self.Origin.time)+ptime reftriggeronset = refp+onset-self.mintforerun if cfg_yaml.config_xcorr.autoxcorrcorrectur is True: refmarkername = os.path.join(self.EventPath, ('%s-marker') % (os.path.basename( self.AF))) fobjrefmarkername = open(refmarkername, 'w') fobjrefmarkername.write('# Snuffler Markers File Version\ 0.2\n') fobjrefmarkername.write(('phase: %s 0 %s None None None XWStart None False\n') % (tw['xcorrstart'].strftime('%Y-%m-%d %H:%M:%S.%f'), name)) fobjrefmarkername.write(('phase: %s 0 %s None None None XWEnd None False\n') % (tw['xcorrend'].strftime('%Y-%m-%d %H:%M:%S.%f'), name)) fobjrefmarkername.write(('phase: %s 1 %s None None None TheoP None False\n') % (refp.strftime('%Y-%m-%d %H:%M:%S.%f'), name)) fobjrefmarkername.write(('phase: %s 3 %s None None None XTrig None False') % (reftriggeronset.strftime('%Y-%m-%d %H:%M:%S.%f'), name)) fobjrefmarkername.close() cmd = 'snuffler %s --markers=%s&' % (os.path.join( self.EventPath, refuntouchname), refmarkername) os.system(cmd) thrOn = float(self.Config['reflta']) thrOff = float(self.Config['refsta']) plotTrigger(trP, cft, thrOn, thrOff) selection = float(input('Enter self picked phase in seconds: ')) tdiff = selection-self.mintforerun refname = os.path.basename(self.AF)+'-shift.mseed' trP.stats.starttime = trP.stats.starttime - selection trP.write(os.path.join(self.EventPath, refname), format='MSEED') ''' tdiff = 0 trigger = trP.stats.starttime ''' To = Trigger(name, trigger, os.path.basename(self.AF), tdiff) return tdiff, To
def calcTTTAdv(cfg, station, Origin, flag, arrayname, Xcorrshift, Refshift, phase, flag_rpe=False): if flag_rpe is True: dimX = cfg.config_geometry.dimx_emp dimY = cfg.config_geometry.dimy_emp else: dimX = cfg.config_geometry.dimx dimY = cfg.config_geometry.dimy gridspacing = cfg.config_geometry.gridspacing o_lat = float(Origin['lat']) o_lon = float(Origin['lon']) o_depth = float(Origin['depth']) oLator = o_lat + dimX / 2 oLonor = o_lon + dimY / 2 oLatul = 0 oLonul = 0 o_dip = 80. plane = False TTTGridMap = {} LMINMAX = [] GridArray = {} locStation = Location(station.lat, station.lon) sdelta = loc2degrees(Location(o_lat, o_lon), locStation) Phase = cake.PhaseDef(phase) path = palantiri.__path__ traveltime_model = cfg.config.traveltime_model model = cake.load_model(path[0] + '/data/' + traveltime_model) z = 0 if plane is True: depth = np.linspace(0., 40., num=dimY) for i in xrange(70): oLatul = o_lat - ((dimX - 1) / 2) * gridspacing + i * gridspacing if z == 0 and i == 0: Latul = oLatul o = 0 start_time = time.clock() for j in xrange(40): oLonul = o_lon - ( (dimY - 1) / 2) * gridspacing + j * gridspacing / np.cos(o_dip) if o == 0 and j == 0: Lonul = oLonul de = loc2degrees(Location(oLatul, oLonul), locStation) arrivals = model.arrivals([de, de], phases=Phase, zstart=depth[j] * km, zstop=0.) try: ttime = arrivals[0].t except Exception: try: arrivals = model.arrivals([de, de], phases=Phase, zstart=depth[j] * km - 2.5, zstop=depth[j] * km + 2.5, refine=True) ttime = arrivals[0].t except Exception: tt = obs_TravelTimes(de, o_depth) for k in tt: if k['phase_name'] == 'P' or k['phase_name'] == ( '%sdiff') % (Config[phasename]): ttime = k['time'] print("Something wrong with phase arrival, too large\ distances choosen?") GridArray[(i, j)] = GridElem(oLatul, oLonul, depth[j], ttime, de) LMINMAX.append(ttime) if ttime == 0: raise Exception("\033[31mILLEGAL: phase definition\033[0m") else: for i in xrange(dimX): oLatul = o_lat - ((dimX - 1) / 2) * gridspacing + i * gridspacing if z == 0 and i == 0: Latul = oLatul o = 0 for j in xrange(dimY): oLonul = o_lon - ( (dimY - 1) / 2) * gridspacing + j * gridspacing if o == 0 and j == 0: Lonul = oLonul de = loc2degrees(Location(oLatul, oLonul), locStation) arrivals = model.arrivals([de, de], phases=Phase, zstart=o_depth * km) try: ttime = arrivals[0].t except: try: arrivals = model.arrivals([de, de], phases=Phase, zstart=o_depth * km, zstop=o_depth * km, refine=True) ttime = arrivals[0].t except: arrivals = model.arrivals([de, de], phases=Phase, zstart=o_depth * km - 2.5, zstop=o_depth * km + 2.5, refine=True) ttime = arrivals[0].t GridArray[(i, j)] = GridElem(oLatul, oLonul, o_depth, ttime, de) LMINMAX.append(ttime) if ttime == 0: raise Exception("\033[31mILLEGAL: phase definition\033[0m") mint = min(LMINMAX) maxt = max(LMINMAX) TTTGridMap[station.getName()] = TTTGrid(o_depth, mint, maxt, Latul, Lonul, oLator, oLonor, GridArray) k = MinTMaxT(mint, maxt) if flag_rpe is True: Basic.dumpToFile(str(flag) + '-ttt_emp.pkl', TTTGridMap) Basic.dumpToFile('minmax-emp' + str(flag) + '.pkl', k) Basic.dumpToFile('station-emp' + str(flag) + '.pkl', station) else: Basic.dumpToFile(str(flag) + '-ttt.pkl', TTTGridMap) Basic.dumpToFile('minmax-' + str(flag) + '.pkl', k) Basic.dumpToFile('station-' + str(flag) + '.pkl', station)