def _run_single_station(db_evid, angles, config_filtering, config_processing): """ Internal processing function for running sequence of candidate angles over a single station. :param db_evid: Dictionary of event streams (3-channel ZNE) keyed by event ID. \ Best obtained using class NetworkEventDataset :type db_evid: sortedcontainers.SortedDict or similar dict-like :param angles: Sequence of candidate correction angles to try (degrees) :type angles: Iterable(float) :param config_filtering: Waveform filtering options for RF processing :type config_filtering: dict :param config_processing: RF processing options :type config_processing: dict :return: Amplitude metric as a function of angle. Same length as angles array. :rtype: list(float) """ ampls = [] for correction in angles: rf_stream_all = RFStream() for evid, stream in db_evid.items(): stream_rot = copy.deepcopy(stream) for tr in stream_rot: tr.stats.back_azimuth += correction while tr.stats.back_azimuth < 0: tr.stats.back_azimuth += 360 while tr.stats.back_azimuth >= 360: tr.stats.back_azimuth -= 360 # end for rf_3ch = transform_stream_to_rf(evid, RFStream(stream_rot), config_filtering, config_processing) if rf_3ch is None: continue rf_stream_all += rf_3ch # end for if len(rf_stream_all) > 0: rf_stream_R = rf_stream_all.select(component='R') rf_stream_R.trim2(-5, 5, reftime='onset') rf_stream_R.detrend('linear') rf_stream_R.taper(0.1) R_stack = rf_stream_R.stack().trim2(-1, 1, reftime='onset')[0].data ampl_mean = np.mean(R_stack) else: ampl_mean = np.nan # endif ampls.append(ampl_mean) # end for return ampls
def __iter__(self): logger = logging.getLogger(__name__) logger.setLevel(logging.INFO) logger.info("Scanning jobs metadata from file {}".format(self.h5_filename)) with self._open_source_file() as f: wf_data = f['waveforms'] num_stations = len(wf_data) count = 0 event_count = 0 create_event_id = False first_loop = True for station_id in wf_data: count += 1 logger.info("Station {} {}/{}".format(station_id, count, num_stations)) station_data = wf_data[station_id] for event_time in station_data: event_traces = station_data[event_time] if not event_traces: continue if first_loop: first_loop = False tmp = list(event_traces.keys())[0] create_event_id = ('event_id' not in event_traces[tmp].attrs) traces = [] for trace_id in event_traces: trace = dataset2trace(event_traces[trace_id]) traces.append(trace) stream = RFStream(traces=traces) if len(stream) != self.num_components and self.channel_pattern is not None: for ch_mask in self.channel_pattern.split(','): _stream = stream.select(channel=ch_mask) logging.info("Tried channel mask {}, got {} channels".format(ch_mask, len(_stream))) if len(_stream) == self.num_components: stream = _stream break # end for # end if if len(stream) != self.num_components: logging.warning("Incorrect number of traces ({}) for stn {} event {}, skipping" .format(len(stream), station_id, event_time)) continue # end if # Force order of traces to ZNE ordering. stream.traces = sorted(stream.traces, key=zne_order) # Strongly assert expected ordering of traces. This must be respected so that # RF normalization works properly. assert stream.traces[0].stats.channel[-1] == 'Z' assert stream.traces[1].stats.channel[-1] == 'N' assert stream.traces[2].stats.channel[-1] == 'E' event_count += 1 if create_event_id: event_id = event_count else: event_id = traces[0].stats.event_id assert np.all([(tr.stats.event_id == event_id) for tr in traces]) # end if yield station_id, event_id, event_time, stream # end for # end for # end with logger.info("Yielded {} event traces to process".format(event_count))
stream3c.rf() stream3c.moveout() stream.extend(stream3c) stream.write(rffile, 'H5') print(stream) ## Plot receiver function plot_rf = 0 if plot_rf: stream = read_rf(rffile, 'H5') kw = { 'trim': (-5, 20), 'fillcolors': ('black', 'gray'), 'trace_height': 0.1 } stream.select(component='L', station='PB01').sort(['back_azimuth']).plot_rf(**kw) plt.savefig('PB01' + '_L_RF.png') for sta in ('PB01', 'PB04'): stream.select(component='Q', station=sta).sort(['back_azimuth']).plot_rf(**kw) plt.savefig(sta + '_Q_RF.png') if not os.path.exists(profilefile): stream = read_rf(rffile, 'H5') ppoints = stream.ppoints(70) boxes = get_profile_boxes((-21.3, -70.7), 90, np.linspace(0, 180, 73), width=530) plt.figure(figsize=(10, 10)) plot_profile_map(boxes, inventory=inventory, ppoints=ppoints)