def test_writeread(self): nslc_ids = [('', 'STA', '', '*')] event = model.Event(lat=111., lon=111., depth=111., time=111.) _marker = marker.Marker(nslc_ids=nslc_ids, tmin=1., tmax=10.) emarker = marker.EventMarker(event=event) pmarker = marker.PhaseMarker(nslc_ids=nslc_ids, tmin=1., tmax=10.) pmarker.set_event(event) emarker.set_alerted(True) markers = [_marker, emarker, pmarker] fn = tempfile.mkstemp()[1] marker.save_markers(markers, fn) in_markers = marker.load_markers(fn) in__marker, in_emarker, in_pmarker = in_markers for i, m in enumerate(in_markers): if not isinstance(m, marker.EventMarker): assert (m.tmax - m.tmin) == 9. else: assert not m.is_alerted() marker.associate_phases_to_events([in_pmarker, in_emarker]) in_event = in_pmarker.get_event() assert all((in_event.lat == 111., in_event.lon == 111., in_event.depth == 111., in_event.time == 111.)) assert in_pmarker.get_event_hash() == in_event.get_hash() assert in_pmarker.get_event_time() == 111.
def process(self, fband, taper, twd, debug): no_events = len(self.events) for i_ev, event in enumerate(self.events): tr_nslc_ids = [] self.logs.info('Processing event %s of %s' % (i_ev, no_events)) section = Section(event, self.stations) skipped = 0 unskipped = 0 for i_s, s in enumerate(self.stations): dist = distance_accurate50m(event, s) arrival = self.arrT[i_ev, i_s] if num.isnan(arrival): skipped += 1 self.logs.warning('skipped %s.%s %s' % (s.network, s.station, event.time)) continue else: unskipped += 1 selector = lambda tr: (s.network, s.station, self.component)\ == (tr.network, tr.station, tr.channel) tr_generator = self.data_pile.chopper(tmin=arrival - twd[0], tmax=arrival + twd[1], trace_selector=selector, load_data=True) if self.method == 'syn_comp': tr_syn_generator = self.syn_data_pile.chopper( tmin=arrival - twd[0], tmax=arrival + twd[1], trace_selector=selector, load_data=True) for tr in tr_generator: if not len(tr) > 1 and tr: tr = tr[0] if len(tr.ydata) > 0 and num.max( num.abs(tr.get_ydata())) != 0: dtype = type(tr.ydata[0]) tr.ydata -= dtype(tr.get_ydata().mean()) # make SNR threshold here! st_s = num.argmax(num.abs(tr.ydata)) - 10 snr = num.mean([y*y for y in tr.ydata[st_s:st_s+60]])/\ num.mean([y*y for y in tr.ydata[0:60]]) if snr < self.snr_thresh: continue # mean(A*A_signal)/mean(A*A_noise) tr.highpass(fband['order'], fband['corner_hp']) tr.taper(taper, chop=False) tr.lowpass(fband['order'], fband['corner_lp']) if debug is True: self.logs.debug('SNR %s' % snr) self.logs.debug('arrival time %s' % util.time_to_str(arrival)) trace.snuffle(tr, markers=[ pm.Marker( nslc_ids=[tr.nslc_id], tmin=arrival, tmax=arrival + 3) ]) if num.max(num.abs(tr.get_ydata())) != 0: section.max_tr[tr.nslc_id] = num.max( num.abs(tr.get_ydata())) tr_nslc_ids.append(tr.nslc_id) else: for t in tr: tt = t #[0] if len(tt.ydata) > 0 and num.max( num.abs(tt.get_ydata())) != 0: dtype = type(tt.ydata[0]) # print(tr.ydata, type(tr.ydata)) tt.ydata -= dtype(tt.get_ydata().mean()) st_s = num.argmax(num.abs(tt.ydata)) - 10 snr = num.mean([y*y for y in tt.ydata[st_s:st_s+60]])/\ num.mean([y*y for y in tt.ydata[0:60]]) # print('SNR', snr) if snr < self.snr_thresh: continue tt.highpass(fband['order'], fband['corner_hp']) tt.taper(taper, chop=False) tt.lowpass(fband['order'], fband['corner_lp']) if debug is True: self.logs.debug('SNR %s' % snr) self.logs.debug('arrival time %s' % util.time_to_str(arrival)) trace.snuffle( tt, markers=[ pm.Marker(nslc_ids=[tt.nslc_id], tmin=arrival, tmax=arrival + 3) ]) if num.max(num.abs(tt.get_ydata())) != 0: section.max_tr[tt.nslc_id] = num.max( num.abs(tt.get_ydata())) tr_nslc_ids.append(tt.nslc_id) if self.method == 'syn_comp': for tr in tr_syn_generator: if not len(tr) > 1 and tr: tr = tr[0] if len(tr.ydata) > 0 and num.max( num.abs(tr.get_ydata())) != 0: dtype = type(tr.ydata[0]) tr.ydata -= dtype(tr.get_ydata().mean()) st_s = num.argmax(num.abs(tr.ydata)) - 10 snr = num.mean([y*y for y in tr.ydata[st_s:st_s+60]])/\ num.mean([y*y for y in tr.ydata[0:60]]) tr.highpass(fband['order'], fband['corner_hp']) tr.taper(taper, chop=False) tr.lowpass(fband['order'], fband['corner_lp']) if debug is True: self.logs.debug('SNR %s' % snr) self.logs.debug('arrival time %s' % util.time_to_str(arrival)) trace.snuffle( tr, markers=[ pm.Marker(nslc_ids=[tr.nslc_id], tmin=arrival, tmax=arrival + 3) ]) if num.max(num.abs(tr.get_ydata())) != 0: section.max_tr_syn[ tr.nslc_id[0:2]] = num.max( num.abs(tr.get_ydata())) # tr_nslc_ids_syn.append(tr.nslc_id) else: for t in tr: tt = t #[0] if len(tt.ydata) > 0 and num.max( num.abs(tt.get_ydata())) != 0: dtype = type(tt.ydata[0]) # print(tr.ydata, type(tr.ydata)) tt.ydata -= dtype(tt.get_ydata().mean()) st_s = num.argmax(num.abs(tt.ydata)) - 10 snr = num.mean([y*y for y in tt.ydata[st_s:st_s+60]])/\ num.mean([y*y for y in tt.ydata[0:60]]) # print('SNR', snr) if snr < self.snr_thresh: continue tt.highpass(fband['order'], fband['corner_hp']) tt.taper(taper, chop=False) tt.lowpass(fband['order'], fband['corner_lp']) if debug is True: self.logs.debug('SNR %s' % snr) self.logs.debug( 'arrival time %s' % util.time_to_str(arrival)) trace.snuffle( tt, markers=[ pm.Marker( nslc_ids=[tt.nslc_id], tmin=arrival, tmax=arrival + 3) ]) if num.max(num.abs(tt.get_ydata())) != 0: section.max_tr_syn[ tt.nslc_id[0:2]] = num.max( num.abs(tt.get_ydata())) #tr_nslc_ids_syn.append(tt.nslc_id) #else: # print('no trace', s.network, s.station, tr, util.time_to_str(event.time)) #break # print(i_s) self.logs.debug('skipped %s/%s' % (skipped, unskipped)) section.finish(self.method, fband, taper, i_ev) self.all_nslc_ids.update(tr_nslc_ids) gc.collect() self.sections.append(section) if self.method == 'median_all_avail' and i_ev == no_events - 1: self.handle_median_stats_option()
def evaluate(self, engine, source, targets, dataset=None, trs=None, extra_responses=[], debug=False): from ..waveform import target as base trs_processed = [] trs_orig = [] for itarget, target in enumerate(targets): if target.codes[-1] not in self.channels: continue store = engine.get_store(target.store_id) tmin = source.time + store.t(self.timing_tmin, source, target) tmax = source.time + store.t(self.timing_tmax, source, target) if self.fmin is not None and self.fmax is not None: freqlimits = [ self.fmin / 2., self.fmin, self.fmax, self.fmax * 2. ] tfade = 1. / self.fmin else: freqlimits = None tfade = 0.0 if dataset is not None: bazi = base.backazimuth_for_waveform(target.azimuth, target.codes) tr = dataset.get_waveform(target.codes, tinc_cache=1.0 / self.fmin, quantity=self.quantity, tmin=tmin, tmax=tmax, freqlimits=freqlimits, tfade=tfade, deltat=store.config.deltat, cache=True, backazimuth=bazi) else: tr = trs[itarget] tr.extend(tmin - tfade, tmax + tfade, fillmethod='repeat') tr = tr.transfer(freqlimits=freqlimits, tfade=tfade) trs_orig.append(tr) tr = tr.copy() responses = [] responses.extend(extra_responses) ndiff = \ WaveformQuantity.choices.index(self.quantity) - \ WaveformQuantity.choices.index(target.quantity) if ndiff > 0: responses.append(trace.DifferentiationResponse(ndiff)) if ndiff < 0: responses.append(trace.IntegrationResponse(-ndiff)) if self.response: responses.append(self.response) if self.named_response: responses.append(NamedResponse.map[self.named_response]) if responses: trans = trace.MultiplyResponse(responses) try: tr = tr.transfer(transfer_function=trans) except trace.TraceTooShort: raise FeatureMeasurementFailed('transfer: trace too short') if tmin is None or tmax is None: raise FeatureMeasurementFailed( 'timing determination failed (phase unavailable?)') tr.chop(tmin, tmax) tr.set_location(tr.location + '-' + self.name + '-proc') trs_processed.append(tr) markers = [] marker_candidates = [] if self.method in ['peak_component', 'peak_to_peak_component']: component_amp_maxs = [] for tr in trs_processed: y = tr.get_ydata() if self.method == 'peak_component': yabs = num.abs(y) i_at_amax = num.argmax(yabs) amax = yabs[i_at_amax] if debug: t_at_amax = tr.tmin + i_at_amax * tr.deltat mark = marker.Marker([tr.nslc_id], t_at_amax, t_at_amax, 0) marker_candidates.append(mark) component_amp_maxs.append(amax) else: i_at_amax = num.argmax(y) i_at_amin = num.argmin(y) amax = y[i_at_amax] amin = y[i_at_amin] if debug: t_at_amax = tr.tmin + i_at_amax * tr.deltat t_at_amin = tr.tmin + i_at_amin * tr.deltat ts = sorted([t_at_amax, t_at_amin]) mark = marker.Marker([tr.nslc_id], ts[0], ts[1], 0) marker_candidates.append(mark) component_amp_maxs.append(amax - amin) i_at_amax = num.argmax(component_amp_maxs) if debug: markers.append(marker_candidates[i_at_amax]) amp_max = component_amp_maxs[i_at_amax] elif self.method == 'peak_absolute_vector': trsum = None for tr in trs_processed: tr.set_ydata(tr.get_ydata()**2) if trsum is None: trsum = tr else: trsum.add(tr) trsum.set_ydata(num.sqrt(tr.get_ydata)) trsum.set_codes(channel='SUM') yabs = trsum.get_ydata() i_at_amax = num.argmax(yabs) amax = yabs[i_at_amax] t_at_amax = tr.tmin + i_at_amax * tr.deltat amp_max = amax if debug: markers.append( marker.Marker([trsum.nslc_id], t_at_amax, t_at_amax, 0)) trs_processed.append(trsum) elif self.method == 'spectral_average': component_amp_maxs = [] for tr in trs_processed: freqs, values = tr.spectrum() component_amp_maxs.append( num.mean( num.abs(values[num.logical_and(self.fmin <= freqs, freqs <= self.fmax)]))) amp_max = num.mean(component_amp_maxs) if debug: trs_out = [] for tr in trs_orig: tr_out = tr.copy() tr_out.set_location(tr_out.location + '-' + self.name) trs_out.append(tr_out) return amp_max, (trs_out + trs_processed, markers) return amp_max, None
def prep_orient(datapath, st, loc, catalog, dir_ro, v_rayleigh, bp, dt_start, dt_stop, ccmin=0.80, plot_heatmap=False, plot_distr=False, debug=False): """ Perform orientation analysis using Rayleigh waves, main function. time wdw: 20s before 4.0km/s arrival and 600 s afterwards (Stachnik et al. 2012) - compute radial component for back values of 0 to 360 deg - for each c-c of hilbert(R) with Z comp. - call plotting functions and/or write results to file :param datapath: path to rrd data :param st: current station (pyrocko station object) :param catalog: list of pyrocko events used for analysis :param dir_ro: output directory :param plot_heatmap: bool, optional :param plot_distr: bool, optional """ logs = logging.getLogger('prep_orient') st_data_pile = pile.make_pile(datapath, regex='%s_%s_' % (st.network, st.station), show_progress=False) n_ev = len(catalog) if st_data_pile.tmin is not None and st_data_pile.tmax is not None: # calculate dist between all events and current station r_arr_by_ev = num.empty(n_ev) ev_lats = num.asarray([ev.lat for ev in catalog]) ev_lons = num.asarray([ev.lon for ev in catalog]) dists = distance_accurate50m_numpy(a_lats=ev_lats, a_lons=ev_lons, b_lats=st.lat, b_lons=st.lon, implementation='c') r_arr_by_ev = (dists / 1000.) / v_rayleigh cc_i_ev_vs_rota = num.empty((n_ev, 360)) rot_angles = range(-180, 180, 1) for i_ev, ev in enumerate(catalog): arrT = ev.time + r_arr_by_ev[i_ev] start_twd1 = ev.time end_twd1 = arrT + 1800 trZ = get_tr_by_cha(st_data_pile, start_twd1, end_twd1, loc, 'Z') trR = get_tr_by_cha(st_data_pile, start_twd1, end_twd1, loc, 'R') trT = get_tr_by_cha(st_data_pile, start_twd1, end_twd1, loc, 'T') start_twd2 = ev.time + r_arr_by_ev[i_ev] - dt_start end_twd2 = arrT + dt_stop if len(trZ) == 1 and len(trR) == 1 and len(trT) == 1: trZ = trZ[0] trR = trR[0] trT = trT[0] # debugging - window selection: if debug is True: trace.snuffle([trZ, trR, trT], markers=[ pm.Marker(nslc_ids=[ trZ.nslc_id, trR.nslc_id, trT.nslc_id ], tmin=start_twd2, tmax=end_twd2), pm.Marker(nslc_ids=[ trZ.nslc_id, trR.nslc_id, trT.nslc_id ], tmin=arrT, tmax=arrT + 3) ]) else: cc_i_ev_vs_rota[i_ev, :] = num.nan continue try: trZ.bandpass(bp[0], bp[1], bp[2]) trZ.chop(tmin=start_twd2, tmax=end_twd2) except trace.NoData: logs.warning('no data %s %s %s' % (trZ, trR, trT)) continue for i_r, r in enumerate(rot_angles): print('rotation angle [deg]: %5d' % r, end='\r') rot_2, rot_3 = trace.rotate(traces=[trR, trT], azimuth=r, in_channels=['R', 'T'], out_channels=['2', '3']) rot_2_y = rot_2.ydata rot_2_hilb = num.imag(trace.hilbert(rot_2_y, len(rot_2_y))) rot_2_hilb_tr = trace.Trace(deltat=rot_2.deltat, ydata=rot_2_hilb, tmin=rot_2.tmin) # problem: rot_2 and rot_2_hilb look exactly the same! # --> no phase shift. why? should be num.imag!!! # trace.snuffle([rot_2, rot_2_hilb_tr]) rot_2_hilb_tr.bandpass(bp[0], bp[1], bp[2]) rot_2_hilb_tr.chop(tmin=start_twd2, tmax=end_twd2) # if st.station == 'RORO' and r == 0: # trace.snuffle([rot_2_hilb_tr, trZ]) # normalize traces trZ.ydata /= abs(max(trZ.ydata)) rot_2_hilb_tr.ydata /= abs(max(rot_2_hilb_tr.ydata)) c = trace.correlate(trZ, rot_2_hilb_tr, mode='valid', normalization='normal') t, coef = c.max() t2, coef2 = max_or_min(c) ''' if st.station == 'MATE' and r == 0: print(i_ev, ev.name, ev.depth) print(r, t, coef, t2, coef2) trace.snuffle([trZ, trR, rot_2_hilb_tr]) ''' cc_i_ev_vs_rota[i_ev, i_r] = coef ''' if st.station == 'MATE': for i_ev in range(n_ev): print(num.argmax(cc_i_ev_vs_rota[i_ev,:]), num.max(cc_i_ev_vs_rota[i_ev,:])) ''' if plot_heatmap is True: fig, ax = plt.subplots(nrows=1, ncols=1, figsize=(8, 2)) cax = ax.imshow(cc_i_ev_vs_rota, interpolation='nearest', vmin=-1.0, vmax=1.0, aspect='auto', extent=[-180, 180, n_ev, 0], cmap='binary') ax.set_ylabel('i_ev') ax.set_xlabel('Correction angle (deg)') ax.set_title('%s %s' % (st.network, st.station)) cbar = fig.colorbar(cax, ticks=[0, 0.5, 1.0], orientation='horizontal', fraction=0.05, pad=0.5) cbar.ax.set_xticklabels(['0', '0.5', '1.0']) plt.tight_layout() # plt.show(fig) fig.savefig( os.path.join( dir_ro, '%s_%s_%s_rot_cc_heatmap.png' % (st.network, st.station, loc))) plt.close() if plot_distr is True: plot_ccdistr_each_event(cc_i_ev_vs_rota, catalog, rot_angles, st, loc, dir_ro) median_a, mean_a, std_a, switched, n_ev =\ get_m_angle_switched(cc_i_ev_vs_rota, catalog, st, ccmin) dict_ev_angle = get_m_angle_all(cc_i_ev_vs_rota, catalog, st, ccmin) return median_a, mean_a, std_a, switched, dict_ev_angle, n_ev