def test_obspy_snuffle(self): fn = common.test_data_file('test1.mseed') stream = obspy.read(fn) stream.snuffle(launch_hook=close_win) trace = stream[0] trace.snuffle(launch_hook=close_win)
def forward(rundir_or_config_path, event_names): if not event_names: return if op.isdir(rundir_or_config_path): rundir = rundir_or_config_path config = read_config(op.join(rundir, 'config.yaml')) problem, xs, misfits, _ = load_problem_info_and_data( rundir, subset='harvest') gms = problem.combine_misfits(misfits) ibest = num.argmin(gms) xbest = xs[ibest, :] ds = config.get_dataset(problem.base_source.name) problem.set_engine(config.engine_config.get_engine()) for target in problem.targets: target.set_dataset(ds) payload = [(problem, xbest)] else: config = read_config(rundir_or_config_path) payload = [] for event_name in event_names: ds = config.get_dataset(event_name) event = ds.get_event() problem = config.get_problem(event) xref = problem.preconstrain( problem.pack(problem.base_source)) payload.append((problem, xref)) all_trs = [] events = [] for (problem, x) in payload: ds.empty_cache() results = problem.evaluate(x) event = problem.get_source(x).pyrocko_event() events.append(event) for result in results: if isinstance(result, WaveformMisfitResult): result.filtered_obs.set_codes(location='ob') result.filtered_syn.set_codes(location='sy') all_trs.append(result.filtered_obs) all_trs.append(result.filtered_syn) markers = [] for ev in events: markers.append(pmarker.EventMarker(ev)) trace.snuffle(all_trs, markers=markers, stations=ds.get_stations())
def forward(env, show='filtered'): payload = [] if env.have_rundir(): env.setup_modelling() history = env.get_history(subset='harvest') xbest = history.get_best_model() problem = env.get_problem() ds = env.get_dataset() payload.append((ds, problem, xbest)) else: for event_name in env.get_selected_event_names(): env.set_current_event_name(event_name) env.setup_modelling() problem = env.get_problem() ds = env.get_dataset() xref = problem.preconstrain(problem.get_reference_model()) payload.append((ds, problem, xref)) all_trs = [] events = [] stations = {} for (ds, problem, x) in payload: results = problem.evaluate(x) event = problem.get_source(x).pyrocko_event() events.append(event) for result in results: if isinstance(result, WaveformMisfitResult): if show == 'filtered': result.filtered_obs.set_codes(location='ob') result.filtered_syn.set_codes(location='sy') all_trs.append(result.filtered_obs) all_trs.append(result.filtered_syn) elif show == 'processed': result.processed_obs.set_codes(location='ob') result.processed_syn.set_codes(location='sy') all_trs.append(result.processed_obs) all_trs.append(result.processed_syn) else: raise ValueError('Invalid argument for show: %s' % show) for station in ds.get_stations(): stations[station.nsl()] = station markers = [] for ev in events: markers.append(pmarker.EventMarker(ev)) trace.snuffle(all_trs, markers=markers, stations=list(stations.values()))
def manual_qc(options, conf, event_names): conf = conf['iris_pull_config'] if not event_names: sys.exit('need event name') for event_name in event_names: conf.event_name = event_name event = _get_event_infos(conf) stations = _get_stations(conf) traces = _get_prepared_traces(conf, stations) fn = conf.path('manual_blacklist_path') nsl_blacklist = read_manual_blacklist(fn) retval, markers = trace.snuffle(traces, events=[event], stations=stations, want_markers=True) for m in markers: if type(m) is gui_util.Marker: try: nslc = m.one_nslc() nsl_blacklist.add(nslc[:3]) except gui_util.MarkerOneNSLCRequired: pass update_manual_blacklist(nsl_blacklist, fn)
def snuffle(stream_or_trace, inventory=None, catalog=None, **kwargs): ''' Explore ObsPy data with Snuffler. :param stream_or_trace: :py:class:`obspy.Stream <obspy.core.stream.Stream>` or :py:class:`obspy.Trace <obspy.core.trace.Trace>` object :param inventory: :py:class:`obspy.Inventory <obspy.core.inventory.inventory.Inventory>` object :param catalog: :py:class:`obspy.Catalog <obspy.core.event.Catalog>` object :param kwargs: extra arguments passed to :meth:`pyrocko.trace.Trace.snuffle`. :returns: ``(return_tag, markers)``, where ``return_tag`` is the a string to flag how the Snuffler window has been closed and ``markers`` is a list of :py:class:`pyrocko.gui.marker.Marker` objects. This function displays an ObsPy stream object in Snuffler. It returns to the caller once the window has been closed. The ``return_tag`` returned by the function can be used as a primitive way to communicate a user decision to the calling script. By default it returns the key pressed to close the window (if any), either ``'q'`` or ``'x'``, but the value could be customized when the exit is triggered from within a Snuffling. See also :py:func:`fiddle` for a variant of this function returning an interactively modified ObsPy stream object. ''' from pyrocko import trace import obspy obspy_inventory = inventory obspy_catalog = catalog if isinstance(stream_or_trace, obspy.Trace): obspy_stream = obspy.core.stream.Stream(traces=[stream_or_trace]) else: obspy_stream = stream_or_trace events = to_pyrocko_events(obspy_catalog) stations = to_pyrocko_stations(obspy_inventory) return trace.snuffle( to_pyrocko_traces(obspy_stream), events=events, stations=stations, want_markers=True, **kwargs)
def snuffle(stream_or_trace, inventory=None, catalog=None, **kwargs): ''' Explore ObsPy data with Snuffler. :param stream_or_trace: :py:class:`obspy.Stream <obspy.core.stream.Stream>` or :py:class:`obspy.Trace <obspy.core.trace.Trace>` object :param inventory: :py:class:`obspy.Inventory <obspy.core.inventory.inventory.Inventory>` object :param catalog: :py:class:`obspy.Catalog <obspy.core.event.Catalog>` object :param kwargs: extra arguments passed to :meth:`pyrocko.trace.Trace.snuffle`. :returns: ``(return_tag, markers)``, where ``return_tag`` is the a string to flag how the Snuffler window has been closed and ``markers`` is a list of :py:class:`pyrocko.gui.marker.Marker` objects. This function displays an ObsPy stream object in Snuffler. It returns to the caller once the window has been closed. The ``return_tag`` returned by the function can be used as a primitive way to communicate a user decision to the calling script. By default it returns the key pressed to close the window (if any), either ``'q'`` or ``'x'``, but the value could be customized when the exit is triggered from within a Snuffling. See also :py:func:`fiddle` for a variant of this function returning an interactively modified ObsPy stream object. ''' from pyrocko import trace import obspy obspy_inventory = inventory obspy_catalog = catalog if isinstance(stream_or_trace, obspy.Trace): obspy_stream = obspy.core.stream.Stream(traces=[stream_or_trace]) else: obspy_stream = stream_or_trace events = to_pyrocko_events(obspy_catalog) stations = to_pyrocko_stations(obspy_inventory) return trace.snuffle(to_pyrocko_traces(obspy_stream), events=events, stations=stations, want_markers=True, **kwargs)
def iload(filename, load_data=True): for gse in readgse(filename, load_data=load_data): for wv in gse.waveforms: yield wv.trace() def detect(first512): lines = first512.lstrip().splitlines() if len(lines) >= 2: if lines[0].startswith('WID2 '): return True if lines[0].startswith('BEGIN GSE2'): return True return False if __name__ == '__main__': all_traces = [] for fn in sys.argv[1:]: if detect(open(fn).read(512)): all_traces.extend(iload(fn)) trace.snuffle(all_traces)
def plot_snuffler(self): for i in xrange(self.Nwav): manifold = self.seismo[i] # trace.snuffle(manifold.traces) trace.snuffle(manifold.syn+manifold.traces,events=manifold.events)
def model( engine, store_id, magnitude_min, magnitude_max, moment_tensor, stress_drop_min, stress_drop_max, rupture_velocity_min, rupture_velocity_max, depth_min, depth_max, distance_min, distance_max, measures, nsources=400, nreceivers=1, apply_source_response_via_spectra=True, debug=True): d2r = math.pi / 180. components = set() for measure in measures: if not measure.components: raise Exception('no components given in measurement rule') for component in measure.components: components.add(component) components = list(components) data = [] nerrors = 0 traces_debug = [] markers_debug = [] for isource in xrange(nsources): magnitude = num.random.uniform( magnitude_min, magnitude_max) stress_drop = num.random.uniform( stress_drop_min, stress_drop_max) rupture_velocity = num.random.uniform( rupture_velocity_min, rupture_velocity_max) radius = (pmt.magnitude_to_moment(magnitude) * 7./16. / stress_drop)**(1./3.) duration = 1.5 * radius / rupture_velocity if moment_tensor is None: mt = pmt.MomentTensor.random_dc(magnitude=magnitude) else: mt = copy.deepcopy(moment_tensor) mt.magnitude = magnitude depth = num.random.uniform(depth_min, depth_max) if apply_source_response_via_spectra: source = gf.MTSource( m6=mt.m6(), depth=depth) extra_responses = [ wmeasure.BruneResponse(duration=duration)] else: source = gf.MTSource( m6=mt.m6(), depth=depth, stf=gf.HalfSinusoidSTF(effective_duration=duration)) extra_responses = [] for ireceiver in xrange(nreceivers): angle = num.random.uniform(0., 360.) distance = num.exp(num.random.uniform( math.log(distance_min), math.log(distance_max))) targets = [] for comp in components: targets.append(gf.Target( quantity='displacement', codes=('', '%i_%i' % (isource, ireceiver), '', comp), north_shift=distance*math.cos(d2r*angle), east_shift=distance*math.sin(d2r*angle), depth=0., store_id=store_id)) resp = engine.process(source, targets) amps = [] for measure in measures: comp_to_tt = {} for (source, target, tr) in resp.iter_results(): comp_to_tt[target.codes[-1]] = (target, tr) targets, trs = zip(*( comp_to_tt[c] for c in measure.components)) try: result = wmeasure.evaluate( engine, source, targets, trs, extra_responses, debug=debug) if not debug: amps.append(result) else: amp, trs, marker = result amps.append(amp) traces_debug.extend(trs) markers_debug.append(marker) except wmeasure.AmplitudeMeasurementFailed: nerrors += 1 amps.append(None) data.append([magnitude, duration, depth, distance] + amps) if debug: trace.snuffle(traces_debug, markers=markers_debug) return num.array(data, dtype=num.float)
request_response = fdsn.station( site='geofon', selection=selection, level='response') # save the response in YAML and StationXML format request_response.dump(filename='responses_geofon.yaml') request_response.dump_xml(filename='responses_geofon.xml') # Loop through retrieved waveforms and request meta information # for each trace traces = io.load('traces.mseed') displacement = [] for tr in traces: polezero_response = request_response.get_pyrocko_response( nslc=tr.nslc_id, timespan=(tr.tmin, tr.tmax), fake_input_units='M') # *fake_input_units*: required for consistent responses throughout entire # data set # deconvolve transfer function restituted = tr.transfer( tfade=2., freqlimits=(0.01, 0.1, 1., 2.), transfer_function=polezero_response, invert=True) displacement.append(restituted) # Inspect waveforms using Snuffler trace.snuffle(displacement)
def command_view(args): def setup(parser): parser.add_option('--extract', dest='extract', metavar='start:stop[:step|@num],...', help='specify which traces to show') parser.add_option('--show-phases', dest='showphases', default=None, metavar='[phase_id1,phase_id2,...|all]', help='add phase markers from ttt') parser.add_option('--qt5', dest='gui_toolkit_qt5', action='store_true', default=False, help='use Qt5 for the GUI') parser.add_option('--qt4', dest='gui_toolkit_qt4', action='store_true', default=False, help='use Qt4 for the GUI') parser.add_option('--opengl', dest='opengl', action='store_true', default=False, help='use OpenGL for drawing') parser, options, args = cl_parse('view', args, setup=setup) if options.gui_toolkit_qt4: config.override_gui_toolkit = 'qt4' if options.gui_toolkit_qt5: config.override_gui_toolkit = 'qt5' gdef = None if options.extract: try: gdef = gf.meta.parse_grid_spec(options.extract) except gf.meta.GridSpecError as e: die(e) store_dirs = get_store_dirs(args) alpha = 'abcdefghijklmnopqrstxyz'.upper() markers = [] traces = [] try: for istore, store_dir in enumerate(store_dirs): store = gf.Store(store_dir) if options.showphases == 'all': phasenames = [pn.id for pn in store.config.tabulated_phases] elif options.showphases is not None: phasenames = options.showphases.split(',') ii = 0 for args in store.config.iter_extraction(gdef): gtr = store.get(args) loc_code = '' if len(store_dirs) > 1: loc_code = alpha[istore % len(alpha)] if gtr: sta_code = '%04i (%s)' % ( ii, ','.join('%gk' % (x/1000.) for x in args[:-1])) tmin = gtr.deltat * gtr.itmin tr = trace.Trace( '', sta_code, loc_code, '%02i' % args[-1], ydata=gtr.data, deltat=gtr.deltat, tmin=tmin) if options.showphases: for phasename in phasenames: phase_tmin = store.t(phasename, args[:-1]) if phase_tmin: m = marker.PhaseMarker( [('', sta_code, loc_code, '%02i' % args[-1])], phase_tmin, phase_tmin, 0, phasename=phasename) markers.append(m) traces.append(tr) ii += 1 except (gf.meta.GridSpecError, gf.StoreError, gf.meta.OutOfBounds) as e: die(e) trace.snuffle(traces, markers=markers, opengl=options.opengl)
def polarization(ds, store, timing, fmin, fmax, ffactor, time_factor_pre=2., time_factor_post=2., distance_min=None, distance_max=None, depth_min=None, depth_max=None, size_factor=0.05, nsl_to_time=None, output_filename=None, output_format=None, output_dpi=None): event = ds.get_event() stations = ds.get_stations() source = gf.Source.from_pyrocko_event(event) trs = [] for station in stations: nsl = station.nsl() dist = source.distance_to(station) if distance_min is not None and dist < distance_min: continue if distance_max is not None and distance_max < dist: continue if depth_min is not None and station.depth < depth_min: continue if depth_max is not None and depth_max < station.depth: continue if nsl_to_time is None: tp = event.time + store.t(timing, source, station) else: if nsl not in nsl_to_time: continue tp = nsl_to_time[nsl] for component in 'ZNE': tmin = tp - time_factor_pre / fmin tmax = tp + time_factor_post / fmin nslc = nsl + (component, ) freqlimits = [fmin / ffactor, fmin, fmax, fmax * ffactor] tfade = 1.0 / (fmin / ffactor) try: trs_projected, trs_restituted, trs_raw, _ = \ ds.get_waveform( nslc, tmin=tmin, tmax=tmax, tfade=tfade, freqlimits=freqlimits, debug=True) for tr in trs_projected: tr.shift(-tp) trs.extend(trs_projected) except dataset.NotFound as e: logger.warn(str(e)) continue trace.snuffle(trs, stations=stations) plot_polarizations(stations, trs, event=event, size_factor=size_factor, output_filename=output_filename, output_format=output_format, output_dpi=output_dpi)
# depthmin=2*km, # depthmax=600*km, # magmin=4.9) candidate_fn = '../candidates2013.pf' candidates = [ m.get_event() for m in gui_util.Marker.load_markers(candidate_fn) ] event_selector = EventCollection(events=candidates) ag = AutoGain(data_pile, stations=stations, reference_nsl=reference_id, event_selector=event_selector, component='Z') ag.set_phaser(phases) ag.set_window(window) ag.process(fband, taper) ag.save_mean(candidate_fn.replace('candidates', 'gains')) optics = Optics(ag) optics.plot() plt.show() for s in ag.get_sections(): scaled_traces = s.get_gained_traces() unscaled_traces = s.get_ungained_traces() scaled_traces.extend(unscaled_traces) trace.snuffle(scaled_traces, events=candidates)
import time import numpy as num from pyrocko import trace from pyrocko.io import save now = time.time() ydata = num.random.random(1000) - 0.5 tr = [trace.Trace('', 'sta1', '', 'N', deltat=0.1, tmin=now, ydata=ydata), trace.Trace('', 'sta2', '', 'E', deltat=0.1, tmin=now, ydata=ydata)] trace.snuffle(tr) c=0 for t in tr: c=c+1 save(tr,'/home/djamil/Workspace/Python/Course-Boot-Camp/git/tipb-exercise/tmp_file_'+str(c)+".txt",'text')
if True: traces, tmins = heart.seis_synthetics( engine, patches, targets, arrival_times=arrival_times, wavename='any_P', arrival_taper=arrival_taper, filterer=filterer, outmode='stacked_traces') all_traces = traces + synth_traces_nn_t + synth_traces_ml_t+ synth_traces_nn + synth_traces_ml # display to check trace.snuffle( all_traces, stations=sc.wavemaps[0].stations, events=[event]) if False: from pyrocko.io import save save(all_traces, 'traces_%s.yaff' % tshift_str, format='yaff') if False: traces1, tmins = heart.seis_synthetics( engine, [patches[0]], targets, arrival_times=ats, wavename='any_P', arrival_taper=arrival_taper, filterer=filterer, outmode='stacked_traces') gfs.set_stack_mode('numpy')
def _test_homogeneous_scenario( self, config_type_class, component_scheme, discretized_source_class): if config_type_class.short_type == 'C' \ or component_scheme.startswith('poro'): assert False store_id = 'homogeneous_%s_%s' % ( config_type_class.short_type, component_scheme) vp = 5.8 * km vs = 3.46 * km mod = cake.LayeredModel.from_scanlines(cake.read_nd_model_str(''' 0. %(vp)g %(vs)g 2.6 1264. 600. 20. %(vp)g %(vs)g 2.6 1264. 600.'''.lstrip() % dict(vp=vp/km, vs=vs/km))) store_type = config_type_class.short_type params = dict( id=store_id, sample_rate=1000., modelling_code_id='ahfullgreen', component_scheme=component_scheme, earthmodel_1d=mod) if store_type in ('A', 'B'): params.update( source_depth_min=1.*km, source_depth_max=2.*km, source_depth_delta=0.5*km, distance_min=4.*km, distance_max=6.*km, distance_delta=0.5*km) if store_type == 'A': params.update( receiver_depth=3.*km) if store_type == 'B': params.update( receiver_depth_min=2.*km, receiver_depth_max=3.*km, receiver_depth_delta=0.5*km) if store_type == 'C': params.update( source_depth_min=1.*km, source_depth_max=2.*km, source_depth_delta=0.5*km, source_east_shift_min=1.*km, source_east_shift_max=2.*km, source_east_shift_delta=0.5*km, source_north_shift_min=2.*km, source_north_shift_max=3.*km, source_north_shift_delta=0.5*km) config = config_type_class(**params) config.validate() store_dir = mkdtemp(prefix=store_id) self.tempdirs.append(store_dir) gf.store.Store.create_editables(store_dir, config=config) store = gf.store.Store(store_dir, 'r') store.make_ttt() store.close() fomosto_ahfullgreen.build(store_dir, nworkers=1) store = gf.store.Store(store_dir, 'r') dsource_type = discretized_source_class.__name__ params = {} if dsource_type == 'DiscretizedMTSource': params.update( m6s=num.array([ [1., 2., 3., 4., 5., 6.], [1., 2., 3., 4., 5., 6.]])) elif dsource_type == 'DiscretizedExplosionSource': params.update( m0s=num.array([2., 2.])) elif dsource_type == 'DiscretizedSFSource': params.update( forces=num.array([[1., 2., 3.], [1., 2., 3.]])) elif dsource_type == 'DiscretizedPorePressureSource': params.update( pp=num.array([3., 3.])) snorth = 2.0*km seast = 2.0*km sdepth = 1.0*km rnorth = snorth + 3.*km reast = seast + 4.*km rdepth = 3.0*km t0 = 10.0 * store.config.deltat dsource = discretized_source_class( times=num.array([t0, t0+5.*store.config.deltat]), north_shifts=num.array([snorth, snorth]), east_shifts=num.array([seast, seast]), depths=num.array([sdepth, sdepth]), **params) receiver = gf.Receiver( north_shift=rnorth, east_shift=reast, depth=rdepth) components = gf.component_scheme_to_description[ component_scheme].provided_components for seismogram in (store.seismogram, store.seismogram_old): for interpolation in ('nearest_neighbor', 'multilinear'): trs1 = [] for component, gtr in seismogram( dsource, receiver, components, interpolation=interpolation).items(): tr = gtr.to_trace('', 'STA', '', component) trs1.append(tr) trs2 = _make_traces_homogeneous( dsource, receiver, store.config.earthmodel_1d.require_homogeneous(), store.config.deltat, '', 'STA', 'a') tmin = max(tr.tmin for tr in trs1+trs2) tmax = min(tr.tmax for tr in trs1+trs2) for tr in trs1+trs2: tr.chop(tmin, tmax) assert tr.data_len() > 2 trs1.sort(key=lambda tr: tr.channel) trs2.sort(key=lambda tr: tr.channel) denom = 0.0 for t1, t2 in zip(trs1, trs2): assert t1.channel == t2.channel denom += num.sum(t1.ydata**2) + num.sum(t2.ydata**2) ds = [] for t1, t2 in zip(trs1, trs2): ds.append(2.0 * num.sum((t1.ydata - t2.ydata)**2) / denom) ds = num.array(ds) if component_scheme == 'elastic8': limit = 1e-2 else: limit = 1e-6 if not num.all(ds < limit): print(ds) trace.snuffle(trs1+trs2) assert num.all(ds < limit)
def fiddle(stream_or_trace, inventory=None, catalog=None, **kwargs): ''' Manipulate ObsPy stream object interactively. :param stream_or_trace: :py:class:`obspy.Stream <obspy.core.stream.Stream>` or :py:class:`obspy.Trace <obspy.core.trace.Trace>` object :param inventory: :py:class:`obspy.Inventory <obspy.core.inventory.inventory.Inventory>` object :param catalog: :py:class:`obspy.Catalog <obspy.core.event.Catalog>` object :param kwargs: extra arguments passed to :meth:`pyrocko.trace.Trace.snuffle`. :returns: :py:class:`obspy.Stream <obspy.core.stream.Stream>` object with changes applied interactively (or :py:class:`obspy.Trace <obspy.core.trace.Trace>` if called with a trace as first argument). This function displays an ObsPy stream object in Snuffler like :py:func:`snuffle`, but additionally adds a Snuffling panel to apply some basic ObsPy signal processing to the contained traces. The applied changes are handed back to the caller as a modified copy of the stream object. .. code:: import obspy from pyrocko import obspy_compat obspy_compat.plant() stream = obspy.read() stream_filtered = stream.fiddle() # returns once window has been # closed ''' from pyrocko import trace import obspy obspy_inventory = inventory obspy_catalog = catalog if isinstance(stream_or_trace, obspy.Trace): obspy_stream = obspy.core.stream.Stream(traces=[stream_or_trace]) else: obspy_stream = stream_or_trace events = to_pyrocko_events(obspy_catalog) stations = to_pyrocko_stations(obspy_inventory) snuffling_loader = ObsPyStreamSnufflingLoader(obspy_stream) launch_hook = kwargs.pop('launch_hook', []) if not isinstance(launch_hook, list): launch_hook = [launch_hook] launch_hook.append(snuffling_loader) trace.snuffle( [], events=events, stations=stations, controls=False, launch_hook=launch_hook, **kwargs) new_obspy_stream = snuffling_loader.get_snuffling().get_obspy_stream() if isinstance(obspy_stream, obspy.Trace): return new_obspy_stream[0] else: return new_obspy_stream
def snuffle(self): trace.snuffle(self.traces, events=[self.event], stations=self.stations)
lon=11., depth=10000., strike=20., dip=40., rake=60., magnitude=4.) # Processing that data will return a pyrocko.gf.Reponse object. response = engine.process(source_dc, targets) # This will return a list of the requested traces: synthetic_traces = response.pyrocko_traces() # In addition to that it is also possible to extract interpolated travel times # of phases which have been defined in the store's config file. store = engine.get_store(store_id) markers = [] for t in targets: dist = t.distance_to(source_dc) depth = source_dc.depth arrival_time = store.t('begin', (depth, dist)) m = PhaseMarker(tmin=arrival_time, tmax=arrival_time, phasename='P', nslc_ids=(t.codes,)) markers.append(m) # Finally, let's scrutinize these traces. trace.snuffle(synthetic_traces, markers=markers)
def process(self, fband, taper, twd, debug): no_events = len(self.events) for i_ev, event in enumerate(self.events): tr_nslc_ids = [] self.logs.info('Processing event %s of %s' % (i_ev, no_events)) section = Section(event, self.stations) skipped = 0 unskipped = 0 for i_s, s in enumerate(self.stations): dist = distance_accurate50m(event, s) arrival = self.arrT[i_ev, i_s] if num.isnan(arrival): skipped += 1 self.logs.warning('skipped %s.%s %s' % (s.network, s.station, event.time)) continue else: unskipped += 1 selector = lambda tr: (s.network, s.station, self.component)\ == (tr.network, tr.station, tr.channel) tr_generator = self.data_pile.chopper(tmin=arrival - twd[0], tmax=arrival + twd[1], trace_selector=selector, load_data=True) if self.method == 'syn_comp': tr_syn_generator = self.syn_data_pile.chopper( tmin=arrival - twd[0], tmax=arrival + twd[1], trace_selector=selector, load_data=True) for tr in tr_generator: if not len(tr) > 1 and tr: tr = tr[0] if len(tr.ydata) > 0 and num.max( num.abs(tr.get_ydata())) != 0: dtype = type(tr.ydata[0]) tr.ydata -= dtype(tr.get_ydata().mean()) # make SNR threshold here! st_s = num.argmax(num.abs(tr.ydata)) - 10 snr = num.mean([y*y for y in tr.ydata[st_s:st_s+60]])/\ num.mean([y*y for y in tr.ydata[0:60]]) if snr < self.snr_thresh: continue # mean(A*A_signal)/mean(A*A_noise) tr.highpass(fband['order'], fband['corner_hp']) tr.taper(taper, chop=False) tr.lowpass(fband['order'], fband['corner_lp']) if debug is True: self.logs.debug('SNR %s' % snr) self.logs.debug('arrival time %s' % util.time_to_str(arrival)) trace.snuffle(tr, markers=[ pm.Marker( nslc_ids=[tr.nslc_id], tmin=arrival, tmax=arrival + 3) ]) if num.max(num.abs(tr.get_ydata())) != 0: section.max_tr[tr.nslc_id] = num.max( num.abs(tr.get_ydata())) tr_nslc_ids.append(tr.nslc_id) else: for t in tr: tt = t #[0] if len(tt.ydata) > 0 and num.max( num.abs(tt.get_ydata())) != 0: dtype = type(tt.ydata[0]) # print(tr.ydata, type(tr.ydata)) tt.ydata -= dtype(tt.get_ydata().mean()) st_s = num.argmax(num.abs(tt.ydata)) - 10 snr = num.mean([y*y for y in tt.ydata[st_s:st_s+60]])/\ num.mean([y*y for y in tt.ydata[0:60]]) # print('SNR', snr) if snr < self.snr_thresh: continue tt.highpass(fband['order'], fband['corner_hp']) tt.taper(taper, chop=False) tt.lowpass(fband['order'], fband['corner_lp']) if debug is True: self.logs.debug('SNR %s' % snr) self.logs.debug('arrival time %s' % util.time_to_str(arrival)) trace.snuffle( tt, markers=[ pm.Marker(nslc_ids=[tt.nslc_id], tmin=arrival, tmax=arrival + 3) ]) if num.max(num.abs(tt.get_ydata())) != 0: section.max_tr[tt.nslc_id] = num.max( num.abs(tt.get_ydata())) tr_nslc_ids.append(tt.nslc_id) if self.method == 'syn_comp': for tr in tr_syn_generator: if not len(tr) > 1 and tr: tr = tr[0] if len(tr.ydata) > 0 and num.max( num.abs(tr.get_ydata())) != 0: dtype = type(tr.ydata[0]) tr.ydata -= dtype(tr.get_ydata().mean()) st_s = num.argmax(num.abs(tr.ydata)) - 10 snr = num.mean([y*y for y in tr.ydata[st_s:st_s+60]])/\ num.mean([y*y for y in tr.ydata[0:60]]) tr.highpass(fband['order'], fband['corner_hp']) tr.taper(taper, chop=False) tr.lowpass(fband['order'], fband['corner_lp']) if debug is True: self.logs.debug('SNR %s' % snr) self.logs.debug('arrival time %s' % util.time_to_str(arrival)) trace.snuffle( tr, markers=[ pm.Marker(nslc_ids=[tr.nslc_id], tmin=arrival, tmax=arrival + 3) ]) if num.max(num.abs(tr.get_ydata())) != 0: section.max_tr_syn[ tr.nslc_id[0:2]] = num.max( num.abs(tr.get_ydata())) # tr_nslc_ids_syn.append(tr.nslc_id) else: for t in tr: tt = t #[0] if len(tt.ydata) > 0 and num.max( num.abs(tt.get_ydata())) != 0: dtype = type(tt.ydata[0]) # print(tr.ydata, type(tr.ydata)) tt.ydata -= dtype(tt.get_ydata().mean()) st_s = num.argmax(num.abs(tt.ydata)) - 10 snr = num.mean([y*y for y in tt.ydata[st_s:st_s+60]])/\ num.mean([y*y for y in tt.ydata[0:60]]) # print('SNR', snr) if snr < self.snr_thresh: continue tt.highpass(fband['order'], fband['corner_hp']) tt.taper(taper, chop=False) tt.lowpass(fband['order'], fband['corner_lp']) if debug is True: self.logs.debug('SNR %s' % snr) self.logs.debug( 'arrival time %s' % util.time_to_str(arrival)) trace.snuffle( tt, markers=[ pm.Marker( nslc_ids=[tt.nslc_id], tmin=arrival, tmax=arrival + 3) ]) if num.max(num.abs(tt.get_ydata())) != 0: section.max_tr_syn[ tt.nslc_id[0:2]] = num.max( num.abs(tt.get_ydata())) #tr_nslc_ids_syn.append(tt.nslc_id) #else: # print('no trace', s.network, s.station, tr, util.time_to_str(event.time)) #break # print(i_s) self.logs.debug('skipped %s/%s' % (skipped, unskipped)) section.finish(self.method, fband, taper, i_ev) self.all_nslc_ids.update(tr_nslc_ids) gc.collect() self.sections.append(section) if self.method == 'median_all_avail' and i_ev == no_events - 1: self.handle_median_stats_option()
taper = trace.CosFader(xfrac=0.25) #event_selector = EventSelector(distmin=1000*km, # distmax=20000*km, # depthmin=2*km, # depthmax=600*km, # magmin=4.9) candidate_fn = '../candidates2013.pf' candidates = [m.get_event() for m in gui_util.Marker.load_markers(candidate_fn)] event_selector = EventCollection(events=candidates) ag = AutoGain(data_pile, stations=stations, reference_nsl=reference_id, event_selector=event_selector, component='Z') ag.set_phaser(phases) ag.set_window(window) ag.process(fband, taper) ag.save_mean(candidate_fn.replace('candidates', 'gains')) optics = Optics(ag) optics.plot() plt.show() for s in ag.get_sections(): scaled_traces = s.get_gained_traces() unscaled_traces = s.get_ungained_traces() scaled_traces.extend(unscaled_traces) trace.snuffle(scaled_traces, events=candidates)
from pyrocko import io, trace, pile from pyrocko.example import get_example_data get_example_data('test.mseed') traces = io.load('test.mseed') traces[0].snuffle() # look at a single trace trace.snuffle(traces) # look at a bunch of traces # do something with the traces: new_traces = [] for tr in traces: new = tr.copy() new.whiten() # to allow the viewer to distinguish the traces new.set_location('whitened') new_traces.append(new) trace.snuffle(traces + new_traces) # it is also possible to 'snuffle' a pile: p = pile.make_pile(['test.mseed']) p.snuffle()
deltat = 0.001 n = int(num.round(tlen / deltat)) out_x = num.zeros(n) out_y = num.zeros(n) out_z = num.zeros(n) import pylab as lab tau = 0.01 t = num.arange(1000) * deltat lab.plot(t, num.exp(-t**2 / tau**2)) #lab.show() add_seismogram(vp, vs, 1.0, 1.0, 1.0, x, f, m6, 'displacement', deltat, 0.0, out_x, out_y, out_z, Gauss(tau)) trs = [] for out, comp in zip([out_x, out_y, out_z], 'NED'): tr = trace.Trace('', 'Naja!', '', comp, deltat=deltat, tmin=0.0, ydata=out) trs.append(tr) trace.snuffle(trs)
def off_test_synthetic(self): from pyrocko import gf km = 1000. nstations = 10 edepth = 5*km store_id = 'crust2_d0' swin = 2. lwin = 9.*swin ks = 1.0 kl = 1.0 kd = 3.0 engine = gf.get_engine() snorths = (num.random.random(nstations)-1.0) * 50*km seasts = (num.random.random(nstations)-1.0) * 50*km targets = [] for istation, (snorths, seasts) in enumerate(zip(snorths, seasts)): targets.append( gf.Target( quantity='displacement', codes=('', 's%03i' % istation, '', 'Z'), north_shift=float(snorths), east_shift=float(seasts), store_id=store_id, interpolation='multilinear')) source = gf.DCSource( north_shift=50*km, east_shift=50*km, depth=edepth) store = engine.get_store(store_id) response = engine.process(source, targets) trs = [] station_traces = defaultdict(list) station_targets = defaultdict(list) for source, target, tr in response.iter_results(): tp = store.t('any_P', source, target) t = tp - 5 * tr.deltat + num.arange(11) * tr.deltat if False: gauss = trace.Trace( tmin=t[0], deltat=tr.deltat, ydata=num.exp(-((t-tp)**2)/((2*tr.deltat)**2))) tr.ydata[:] = 0.0 tr.add(gauss) trs.append(tr) station_traces[target.codes[:3]].append(tr) station_targets[target.codes[:3]].append(target) station_stalta_traces = {} for nsl, traces in station_traces.iteritems(): etr = None for tr in traces: sqr_tr = tr.copy(data=False) sqr_tr.ydata = tr.ydata**2 if etr is None: etr = sqr_tr else: etr += sqr_tr autopick.recursive_stalta(swin, lwin, ks, kl, kd, etr) etr.set_codes(channel='C') station_stalta_traces[nsl] = etr trace.snuffle(trs + station_stalta_traces.values()) deltat = trs[0].deltat nnorth = 50 neast = 50 size = 400*km north = num.linspace(-size/2., size/2., nnorth) north2 = num.repeat(north, neast) east = num.linspace(-size/2., size/2., neast) east2 = num.tile(east, nnorth) depth = 5*km def tcal(target, i): try: return store.t( 'any_P', gf.Location( north_shift=north2[i], east_shift=east2[i], depth=depth), target) except gf.OutOfBounds: return 0.0 nsls = sorted(station_stalta_traces.keys()) tts = num.fromiter((tcal(station_targets[nsl][0], i) for i in xrange(nnorth*neast) for nsl in nsls), dtype=num.float) arrays = [ station_stalta_traces[nsl].ydata.astype(num.float) for nsl in nsls] offsets = num.array( [int(round(station_stalta_traces[nsl].tmin / deltat)) for nsl in nsls], dtype=num.int32) shifts = -num.array( [int(round(tt / deltat)) for tt in tts], dtype=num.int32).reshape(nnorth*neast, nstations) weights = num.ones((nnorth*neast, nstations)) print shifts[25*neast + 25] * deltat print offsets.dtype, shifts.dtype, weights.dtype print 'stack start' mat, ioff = parstack(arrays, offsets, shifts, weights, 1) print 'stack stop' mat = num.reshape(mat, (nnorth, neast)) from matplotlib import pyplot as plt fig = plt.figure() axes = fig.add_subplot(1, 1, 1, aspect=1.0) axes.contourf(east/km, north/km, mat) axes.plot( g(targets, 'east_shift')/km, g(targets, 'north_shift')/km, '^') axes.plot(source.east_shift/km, source.north_shift/km, 'o') plt.show()
yield trace.Trace('', sta, '', cha, tmin=tmin, tmax=tmax, deltat=deltat, ydata=ydata) except EOF: pass def detect(first512): lines = first512.splitlines() if len(lines) >= 5 and \ lines[0].startswith(b'XW01') and lines[2].startswith(b'WID1') and \ lines[4].startswith(b'DAT1'): return True return False if __name__ == '__main__': all_traces = [] for fn in sys.argv[1:]: all_traces.extend(iload(fn)) trace.snuffle(all_traces)
def run_extract(config, debug=False): engine = config.get_engine() output_path = config.expand_path(config.output_path) util.ensuredir(output_path) output_measures_path = op.join(output_path, 'measures.txt') output_config_path = op.join(output_path, 'config.yaml') wconfig.write_config(config, output_config_path) with open(output_measures_path, 'w') as out: out.write('# event station %s\n' % ' '.join(measure.name for measure in config.measures)) event_names = config.get_event_names() for event_name in event_names: logger.info('processing event %s' % event_name) try: ds = config.get_dataset(event_name) except OSError as e: logger.warn('could not get dataset for event %s' % event_name) continue event = ds.get_event() source = gf.DCSource.from_pyrocko_event(event) stations = ds.get_stations() debug_infos = [] for station in stations: values = [] try: for measure in config.measures: targets = [ gf.Target(quantity='velocity', codes=station.nsl() + (component, ), store_id=config.store_id, lat=station.lat, lon=station.lon, depth=station.depth, elevation=station.elevation) for component in measure.components ] value, debug_info = measure.evaluate(engine, source, targets, ds, debug=debug) values.append(value) debug_infos.append(debug_info) out.write('%s %s %s\n' % (event.name, '.'.join(x for x in station.nsl()), ' '.join('%g' % value for value in values))) except (wmeasure.FeatureMeasurementFailed, dataset.NotFound, gf.OutOfBounds) as e: logger.warn( 'feature extraction failed for %s, %s:\n %s' % (event.name, '.'.join(x for x in station.nsl()), e)) if debug: traces = [] markers = [] for traces_this, markers_this in debug_infos: traces.extend(traces_this) markers.extend(markers_this) trace.snuffle(traces, markers=markers, events=[event], stations=stations)
def command_view(args): def setup(parser): parser.add_option('--extract', dest='extract', metavar='start:stop[:step|@num],...', help='specify which traces to show') parser.add_option('--show-phases', dest='showphases', default=None, metavar='[phase_id1,phase_id2,...|all]', help='add phase markers from ttt') parser.add_option('--qt5', dest='gui_toolkit_qt5', action='store_true', default=False, help='use Qt5 for the GUI') parser.add_option('--qt4', dest='gui_toolkit_qt4', action='store_true', default=False, help='use Qt4 for the GUI') parser.add_option('--opengl', dest='opengl', action='store_true', default=False, help='use OpenGL for drawing') parser, options, args = cl_parse('view', args, setup=setup) if options.gui_toolkit_qt4: config.override_gui_toolkit = 'qt4' if options.gui_toolkit_qt5: config.override_gui_toolkit = 'qt5' gdef = None if options.extract: try: gdef = gf.meta.parse_grid_spec(options.extract) except gf.meta.GridSpecError as e: die(e) store_dirs = get_store_dirs(args) alpha = 'abcdefghijklmnopqrstxyz'.upper() markers = [] traces = [] try: for istore, store_dir in enumerate(store_dirs): store = gf.Store(store_dir) if options.showphases == 'all': phasenames = [pn.id for pn in store.config.tabulated_phases] elif options.showphases is not None: phasenames = options.showphases.split(',') ii = 0 for args in store.config.iter_extraction(gdef): gtr = store.get(args) loc_code = '' if len(store_dirs) > 1: loc_code = alpha[istore % len(alpha)] if gtr: sta_code = '%04i (%s)' % (ii, ','.join('%gk' % (x / 1000.) for x in args[:-1])) tmin = gtr.deltat * gtr.itmin tr = trace.Trace('', sta_code, loc_code, '%02i' % args[-1], ydata=gtr.data, deltat=gtr.deltat, tmin=tmin) if options.showphases: for phasename in phasenames: phase_tmin = store.t(phasename, args[:-1]) if phase_tmin: m = marker.PhaseMarker([ ('', sta_code, loc_code, '%02i' % args[-1]) ], phase_tmin, phase_tmin, 0, phasename=phasename) markers.append(m) traces.append(tr) ii += 1 except (gf.meta.GridSpecError, gf.StoreError, gf.meta.OutOfBounds) as e: die(e) trace.snuffle(traces, markers=markers, opengl=options.opengl)
def fiddle(stream_or_trace, inventory=None, catalog=None, **kwargs): ''' Manipulate ObsPy stream object interactively. :param stream_or_trace: :py:class:`obspy.Stream <obspy.core.stream.Stream>` or :py:class:`obspy.Trace <obspy.core.trace.Trace>` object :param inventory: :py:class:`obspy.Inventory <obspy.core.inventory.inventory.Inventory>` object :param catalog: :py:class:`obspy.Catalog <obspy.core.event.Catalog>` object :param kwargs: extra arguments passed to :meth:`pyrocko.trace.Trace.snuffle`. :returns: :py:class:`obspy.Stream <obspy.core.stream.Stream>` object with changes applied interactively (or :py:class:`obspy.Trace <obspy.core.trace.Trace>` if called with a trace as first argument). This function displays an ObsPy stream object in Snuffler like :py:func:`snuffle`, but additionally adds a Snuffling panel to apply some basic ObsPy signal processing to the contained traces. The applied changes are handed back to the caller as a modified copy of the stream object. .. code:: import obspy from pyrocko import obspy_compat obspy_compat.plant() stream = obspy.read() stream_filtered = stream.fiddle() # returns once window has been # closed ''' from pyrocko import trace import obspy obspy_inventory = inventory obspy_catalog = catalog if isinstance(stream_or_trace, obspy.Trace): obspy_stream = obspy.core.stream.Stream(traces=[stream_or_trace]) else: obspy_stream = stream_or_trace events = to_pyrocko_events(obspy_catalog) stations = to_pyrocko_stations(obspy_inventory) snuffling_loader = ObsPyStreamSnufflingLoader(obspy_stream) launch_hook = kwargs.pop('launch_hook', []) if not isinstance(launch_hook, list): launch_hook = [launch_hook] launch_hook.append(snuffling_loader) trace.snuffle([], events=events, stations=stations, controls=False, launch_hook=launch_hook, **kwargs) new_obspy_stream = snuffling_loader.get_snuffling().get_obspy_stream() if isinstance(obspy_stream, obspy.Trace): return new_obspy_stream[0] else: return new_obspy_stream
def snuffle(self): trace.snuffle(self.pyrocko_traces())
def _test_homogeneous_scenario(self, config_type_class, component_scheme, discretized_source_class): if config_type_class.short_type == 'C' \ or component_scheme.startswith('poro'): assert False store_id = 'homogeneous_%s_%s' % (config_type_class.short_type, component_scheme) vp = 5.8 * km vs = 3.46 * km mod = cake.LayeredModel.from_scanlines( cake.read_nd_model_str(''' 0. %(vp)g %(vs)g 2.6 1264. 600. 20. %(vp)g %(vs)g 2.6 1264. 600.'''.lstrip() % dict(vp=vp / km, vs=vs / km))) store_type = config_type_class.short_type params = dict(id=store_id, sample_rate=1000., modelling_code_id='ahfullgreen', component_scheme=component_scheme, earthmodel_1d=mod) if store_type in ('A', 'B'): params.update(source_depth_min=1. * km, source_depth_max=2. * km, source_depth_delta=0.5 * km, distance_min=4. * km, distance_max=6. * km, distance_delta=0.5 * km) if store_type == 'A': params.update(receiver_depth=3. * km) if store_type == 'B': params.update(receiver_depth_min=2. * km, receiver_depth_max=3. * km, receiver_depth_delta=0.5 * km) if store_type == 'C': params.update(source_depth_min=1. * km, source_depth_max=2. * km, source_depth_delta=0.5 * km, source_east_shift_min=1. * km, source_east_shift_max=2. * km, source_east_shift_delta=0.5 * km, source_north_shift_min=2. * km, source_north_shift_max=3. * km, source_north_shift_delta=0.5 * km) config = config_type_class(**params) config.validate() store_dir = mkdtemp(prefix=store_id) self.tempdirs.append(store_dir) gf.store.Store.create_editables(store_dir, config=config) store = gf.store.Store(store_dir, 'r') store.make_ttt() store.close() fomosto_ahfullgreen.build(store_dir, nworkers=1) store = gf.store.Store(store_dir, 'r') dsource_type = discretized_source_class.__name__ params = {} if dsource_type == 'DiscretizedMTSource': params.update(m6s=num.array([[1., 2., 3., 4., 5., 6.], [1., 2., 3., 4., 5., 6.]])) elif dsource_type == 'DiscretizedExplosionSource': params.update(m0s=num.array([2., 2.])) elif dsource_type == 'DiscretizedSFSource': params.update(forces=num.array([[1., 2., 3.], [1., 2., 3.]])) elif dsource_type == 'DiscretizedPorePressureSource': params.update(pp=num.array([3., 3.])) snorth = 2.0 * km seast = 2.0 * km sdepth = 1.0 * km rnorth = snorth + 3. * km reast = seast + 4. * km rdepth = 3.0 * km t0 = 10.0 * store.config.deltat dsource = discretized_source_class( times=num.array([t0, t0 + 5. * store.config.deltat]), north_shifts=num.array([snorth, snorth]), east_shifts=num.array([seast, seast]), depths=num.array([sdepth, sdepth]), **params) receiver = gf.Receiver(north_shift=rnorth, east_shift=reast, depth=rdepth) components = gf.component_scheme_to_description[ component_scheme].provided_components for seismogram in (store.seismogram, store.seismogram_old): for interpolation in ('nearest_neighbor', 'multilinear'): trs1 = [] for component, gtr in seismogram( dsource, receiver, components, interpolation=interpolation).iteritems(): tr = gtr.to_trace('', 'STA', '', component) trs1.append(tr) trs2 = make_traces_homogeneous( dsource, receiver, store.config.earthmodel_1d.require_homogeneous(), store.config.deltat, '', 'STA', 'a') tmin = max(tr.tmin for tr in trs1 + trs2) tmax = min(tr.tmax for tr in trs1 + trs2) for tr in trs1 + trs2: tr.chop(tmin, tmax) assert tr.data_len() > 2 trs1.sort(key=lambda tr: tr.channel) trs2.sort(key=lambda tr: tr.channel) denom = 0.0 for t1, t2 in zip(trs1, trs2): assert t1.channel == t2.channel denom += num.sum(t1.ydata**2) + num.sum(t2.ydata**2) ds = [] for t1, t2 in zip(trs1, trs2): ds.append(2.0 * num.sum((t1.ydata - t2.ydata)**2) / denom) ds = num.array(ds) if component_scheme == 'elastic8': limit = 1e-2 else: limit = 1e-6 if not num.all(ds < limit): print ds trace.snuffle(trs1 + trs2) assert num.all(ds < limit)
def ccs_allstats_one_event(i_ev, ev, stat_list, all_stations, p_obs, p_syn, out_dir, bp, arrT_array, cc_thresh, debug_mode=False): """ for one event: call cc_single_stat_single_event for each station, collect optimal time shifts return list with timeshift, fixed order of stations! """ ev_t_str = util.time_to_str(ev.time).replace(' ', '_') #p_obs = pile.make_pile(datapath+ev_t_str, show_progress=False) #p_syn = pile.make_pile(syndatapath+ev_t_str, show_progress=False) tshift_list = [] if p_obs and p_syn: for i_st, st in enumerate(stat_list): try: s = st.station n = st.network l = 'not_set' except: n, s, l, c = st i_ast = [ i_ast for i_ast, ast in enumerate(all_stations) if ast.network == n and ast.station == s ] if len(i_ast) >= 1: ii_ast = i_ast[0] tmin = arrT_array[i_ev, ii_ast] - 30 elif len(i_ast) == 0: logging.warning('station %s.%s not in all station list' % (n, s)) continue if l != 'not_set': tr_obs = p_obs.all( trace_selector=lambda tr: tr.network == n and tr.station == s and tr.location == l and tr.channel == 'Z', tmin=tmin, tmax=tmin + 300, want_incomplete=True) else: tr_obs = p_obs.all(trace_selector=lambda tr: tr.network == n and tr.station == s and tr.channel == 'Z', tmin=tmin, tmax=tmin + 300, want_incomplete=True) tr_syn = p_syn.all(trace_selector=lambda tr: tr.network == n and tr .station == s and tr.channel == 'Z', tmin=tmin, tmax=tmin + 300, want_incomplete=True) if len(tr_obs) != 0 and len(tr_syn) != 0: tr_syn = tr_syn[0] tr_obs = tr_obs[0] tr_obs.bandpass(bp[0], bp[1], bp[2]) tr_syn.bandpass(bp[0], bp[1], bp[2]) c = trace.correlate(tr_syn, tr_obs, mode='same', normalization='normal') t, coef = c.max() if debug_mode is True: logging.debug('%s %s' % (t, coef)) trace.snuffle([tr_syn, tr_obs]) trace.snuffle([c]) if coef > cc_thresh: tshift_list.append(t) else: tshift_list.append(num.nan) else: tshift_list.append(num.nan) return tshift_list
def check( config, event_names=None, target_string_ids=None, show_waveforms=False, n_random_synthetics=10, stations_used_path=None): markers = [] stations_used = {} erroneous = [] for ievent, event_name in enumerate(event_names): ds = config.get_dataset(event_name) event = ds.get_event() trs_all = [] try: problem = config.get_problem(event) _, nfamilies = problem.get_family_mask() logger.info('Problem: %s' % problem.name) logger.info('Number of target families: %i' % nfamilies) logger.info('Number of targets (total): %i' % len(problem.targets)) if target_string_ids: problem.targets = [ target for target in problem.targets if util.match_nslc(target_string_ids, target.string_id())] logger.info( 'Number of targets (selected): %i' % len(problem.targets)) check_problem(problem) results_list = [] sources = [] if n_random_synthetics == 0: x = problem.get_reference_model() sources.append(problem.base_source) results = problem.evaluate(x) results_list.append(results) else: for i in range(n_random_synthetics): x = problem.get_random_model() sources.append(problem.get_source(x)) results = problem.evaluate(x) results_list.append(results) if show_waveforms: engine = config.engine_config.get_engine() times = [] tdata = [] for target in problem.targets: tobs_shift_group = [] tcuts = [] for source in sources: tmin_fit, tmax_fit, tfade, tfade_taper = \ target.get_taper_params(engine, source) times.extend((tmin_fit-tfade*2., tmax_fit+tfade*2.)) tobs, tsyn = target.get_pick_shift(engine, source) if None not in (tobs, tsyn): tobs_shift = tobs - tsyn else: tobs_shift = 0.0 tcuts.append(target.get_cutout_timespan( tmin_fit+tobs_shift, tmax_fit+tobs_shift, tfade)) tobs_shift_group.append(tobs_shift) tcuts = num.array(tcuts, dtype=num.float) tdata.append(( tfade, num.mean(tobs_shift_group), (num.min(tcuts[:, 0]), num.max(tcuts[:, 1])))) tmin = min(times) tmax = max(times) tmax += (tmax-tmin)*2 for (tfade, tobs_shift, tcut), target in zip( tdata, problem.targets): store = engine.get_store(target.store_id) deltat = store.config.deltat freqlimits = list(target.get_freqlimits()) freqlimits[2] = 0.45/deltat freqlimits[3] = 0.5/deltat freqlimits = tuple(freqlimits) try: trs_projected, trs_restituted, trs_raw, _ = \ ds.get_waveform( target.codes, tmin=tmin+tobs_shift, tmax=tmax+tobs_shift, tfade=tfade, freqlimits=freqlimits, deltat=deltat, backazimuth=target. get_backazimuth_for_waveform(), debug=True) except NotFound as e: logger.warn(str(e)) continue trs_projected = copy.deepcopy(trs_projected) trs_restituted = copy.deepcopy(trs_restituted) trs_raw = copy.deepcopy(trs_raw) for trx in trs_projected + trs_restituted + trs_raw: trx.shift(-tobs_shift) trx.set_codes( network='', station=target.string_id(), location='') for trx in trs_projected: trx.set_codes(location=trx.location + '2_proj') for trx in trs_restituted: trx.set_codes(location=trx.location + '1_rest') for trx in trs_raw: trx.set_codes(location=trx.location + '0_raw') trs_all.extend(trs_projected) trs_all.extend(trs_restituted) trs_all.extend(trs_raw) for source in sources: tmin_fit, tmax_fit, tfade, tfade_taper = \ target.get_taper_params(engine, source) markers.append(pmarker.Marker( nslc_ids=[('', target.string_id(), '*_proj', '*')], tmin=tmin_fit, tmax=tmax_fit)) markers.append(pmarker.Marker( nslc_ids=[('', target.string_id(), '*_raw', '*')], tmin=tcut[0]-tobs_shift, tmax=tcut[1]-tobs_shift, kind=1)) else: for itarget, target in enumerate(problem.targets): nok = 0 for results in results_list: result = results[itarget] if not isinstance(result, gf.SeismosizerError): nok += 1 if nok == 0: sok = 'not used' elif nok == len(results_list): sok = 'ok' try: s = ds.get_station(target) stations_used[s.nsl()] = s except (NotFound, InvalidObject): pass else: sok = 'not used (%i/%i ok)' % (nok, len(results_list)) logger.info('%-40s %s' % ( (target.string_id() + ':', sok))) except GrondError as e: logger.error('Event %i, "%s": %s' % ( ievent, event.name or util.time_to_str(event.time), str(e))) erroneous.append(event) if show_waveforms: trace.snuffle(trs_all, stations=ds.get_stations(), markers=markers) if stations_used_path: stations = list(stations_used.values()) stations.sort(key=lambda s: s.nsl()) model.dump_stations(stations, stations_used_path) if erroneous: raise GrondError( 'Check failed for events: %s' % ', '.join(ev.name for ev in erroneous))
lon=11., depth=10000., strike=20., dip=40., rake=60., magnitude=4.) # Processing that data will return a pyrocko.gf.Reponse object. response = engine.process(source_dc, targets) # This will return a list of the requested traces: synthetic_traces = response.pyrocko_traces() # In addition to that it is also possible to extract interpolated travel times # of phases which have been defined in the store's config file. store = engine.get_store(store_id) markers = [] for t in targets: dist = t.distance_to(source_dc) depth = source_dc.depth arrival_time = store.t('any_P', (depth, dist)) m = PhaseMarker(tmin=arrival_time, tmax=arrival_time, phasename='P', nslc_ids=(t.codes, )) markers.append(m) # Finally, let's scrutinize these traces. trace.snuffle(synthetic_traces, markers=markers)
def search(config, override_tmin=None, override_tmax=None, show_detections=False, show_movie=False, show_window_traces=False, force=False, stop_after_first=False, nparallel=6, save_imax=False, bark=False): fp = config.expand_path run_path = fp(config.run_path) # if op.exists(run_path): # if force: # shutil.rmtree(run_path) # else: # raise common.LassieError( # 'run directory already exists: %s' % # run_path) util.ensuredir(run_path) write_config(config, op.join(run_path, 'config.yaml')) ifm_path_template = config.get_ifm_path_template() detections_path = config.get_detections_path() events_path = config.get_events_path() figures_path_template = config.get_figures_path_template() config.setup_image_function_contributions() ifcs = config.image_function_contributions grid = config.get_grid() receivers = config.get_receivers() norm_map = gridmod.geometrical_normalization(grid, receivers) data_paths = fp(config.data_paths) for data_path in fp(data_paths): if not op.exists(data_path): pass p = pile.make_pile(data_paths, fileformat='detect') if p.is_empty(): raise common.LassieError('no usable waveforms found') for ifc in ifcs: ifc.prescan(p) shift_tables = [] tshift_minmaxs = [] for ifc in ifcs: shift_tables.append(ifc.get_table(grid, receivers)) tshift_minmaxs.append(num.nanmin(shift_tables[-1])) tshift_minmaxs.append(num.nanmax(shift_tables[-1])) fsmooth_min = min(ifc.get_fsmooth() for ifc in ifcs) tshift_min = min(tshift_minmaxs) tshift_max = max(tshift_minmaxs) if config.detector_tpeaksearch is not None: tpeaksearch = config.detector_tpeaksearch else: tpeaksearch = (tshift_max - tshift_min) + 1.0 / fsmooth_min tpad = max(ifc.get_tpad() for ifc in ifcs) + \ (tshift_max - tshift_min) + tpeaksearch tinc = (tshift_max - tshift_min) * 10. + 3.0 * tpad tavail = p.tmax - p.tmin tinc = min(tinc, tavail - 2.0 * tpad) if tinc <= 0: raise common.LassieError('available waveforms too short \n' 'required: %g s\n' 'available: %g s\n' % (2. * tpad, tavail)) blacklist = set(tuple(s.split('.')) for s in config.blacklist) whitelist = set(tuple(s.split('.')) for s in config.whitelist) distances = grid.distances(receivers) distances_to_grid = num.min(distances, axis=0) distance_min = num.min(distances) distance_max = num.max(distances) station_index = dict( (rec.codes, i) for (i, rec) in enumerate(receivers) if rec.codes not in blacklist and ( not whitelist or rec.codes in whitelist) and ( config.distance_max is None or distances_to_grid[i] <= config.distance_max)) check_data_consistency(p, config) deltat_cf = max(p.deltats.keys()) assert deltat_cf > 0.0 while True: if not all(ifc.deltat_cf_is_available(deltat_cf * 2) for ifc in ifcs): break deltat_cf *= 2 logger.info('CF lassie sampling interval (rate): %g s (%g Hz)' % (deltat_cf, 1.0 / deltat_cf)) ngridpoints = grid.size() logger.info('number of grid points: %i' % ngridpoints) logger.info('minimum source-receiver distance: %g m' % distance_min) logger.info('maximum source-receiver distance: %g m' % distance_max) logger.info('minimum travel-time: %g s' % tshift_min) logger.info('maximum travel-time: %g s' % tshift_max) idetection = 0 tmin = override_tmin or config.tmin or p.tmin + tpad tmax = override_tmax or config.tmax or p.tmax - tpad events = config.get_events() twindows = [] if events is not None: for ev in events: if tmin <= ev.time <= tmax: twindows.append( (ev.time + tshift_min - (tshift_max - tshift_min) * config.event_time_window_factor, ev.time + tshift_min + (tshift_max - tshift_min) * config.event_time_window_factor)) else: twindows.append((tmin, tmax)) for iwindow_group, (tmin_win, tmax_win) in enumerate(twindows): nwin = int(math.ceil((tmax_win - tmin_win) / tinc)) logger.info('start processing time window group %i/%i: %s - %s' % (iwindow_group + 1, len(twindows), util.time_to_str(tmin_win), util.time_to_str(tmax_win))) logger.info('number of time windows: %i' % nwin) logger.info('time window length: %g s' % (tinc + 2.0 * tpad)) logger.info('time window payload: %g s' % tinc) logger.info('time window padding: 2 x %g s' % tpad) logger.info('time window overlap: %g%%' % (100.0 * 2.0 * tpad / (tinc + 2.0 * tpad))) iwin = -1 for trs in p.chopper( tmin=tmin_win, tmax=tmax_win, tinc=tinc, tpad=tpad, want_incomplete=config.fill_incomplete_with_zeros, trace_selector=lambda tr: tr.nslc_id[:3] in station_index): iwin += 1 trs_ok = [] for tr in trs: if tr.ydata.size == 0: logger.warn('skipping empty trace: %s.%s.%s.%s' % tr.nslc_id) continue if not num.all(num.isfinite(tr.ydata)): logger.warn('skipping trace because of invalid values: ' '%s.%s.%s.%s' % tr.nslc_id) continue trs_ok.append(tr) trs = trs_ok if not trs: continue logger.info('processing time window %i/%i: %s - %s' % (iwin + 1, nwin, util.time_to_str( trs[0].wmin), util.time_to_str(trs[0].wmax))) wmin = trs[0].wmin wmax = trs[0].wmax if config.fill_incomplete_with_zeros: trs = zero_fill(trs, wmin - tpad, wmax + tpad) t0 = math.floor(wmin / deltat_cf) * deltat_cf iwmin = int(round((wmin - tpeaksearch - t0) / deltat_cf)) iwmax = int(round((wmax + tpeaksearch - t0) / deltat_cf)) lengthout = iwmax - iwmin + 1 pdata = [] trs_debug = [] parstack_params = [] for iifc, ifc in enumerate(ifcs): dataset = ifc.preprocess(trs, wmin - tpeaksearch, wmax + tpeaksearch, tshift_max - tshift_min, deltat_cf) if not dataset: continue nstations_selected = len(dataset) nsls_selected, trs_selected = zip(*dataset) for tr in trs_selected: tr.meta = {'tabu': True} trs_debug.extend(trs + list(trs_selected)) istations_selected = num.array( [station_index[nsl] for nsl in nsls_selected], dtype=num.int) arrays = [tr.ydata.astype(num.float) for tr in trs_selected] offsets = num.array([ int(round((tr.tmin - t0) / deltat_cf)) for tr in trs_selected ], dtype=num.int32) w = ifc.get_weights(nsls_selected) weights = num.ones((ngridpoints, nstations_selected)) weights *= w[num.newaxis, :] weights *= ifc.weight shift_table = shift_tables[iifc] ok = num.isfinite(shift_table[:, istations_selected]) bad = num.logical_not(ok) shifts = -num.round(shift_table[:, istations_selected] / deltat_cf).astype(num.int32) weights[bad] = 0.0 shifts[bad] = num.max(shifts[ok]) pdata.append((list(trs_selected), shift_table, ifc)) parstack_params.append((arrays, offsets, shifts, weights)) if config.stacking_blocksize is not None: ipstep = config.stacking_blocksize frames = None else: ipstep = lengthout frames = num.zeros((ngridpoints, lengthout)) twall_start = time.time() frame_maxs = num.zeros(lengthout) frame_argmaxs = num.zeros(lengthout, dtype=num.int) ipmin = iwmin while ipmin < iwmin + lengthout: ipsize = min(ipstep, iwmin + lengthout - ipmin) if ipstep == lengthout: frames_p = frames else: frames_p = num.zeros((ngridpoints, ipsize)) for (arrays, offsets, shifts, weights) in parstack_params: frames_p, _ = parstack(arrays, offsets, shifts, weights, 0, offsetout=ipmin, lengthout=ipsize, result=frames_p, nparallel=nparallel, impl='openmp') if config.sharpness_normalization: frame_p_maxs = frames_p.max(axis=0) frame_p_means = num.abs(frames_p).mean(axis=0) frames_p *= (frame_p_maxs / frame_p_means)[num.newaxis, :] frames_p *= norm_map[:, num.newaxis] if config.ifc_count_normalization: frames_p *= 1.0 / len(ifcs) frame_maxs[ipmin-iwmin:ipmin-iwmin+ipsize] = \ frames_p.max(axis=0) frame_argmaxs[ipmin-iwmin:ipmin-iwmin+ipsize] = \ pargmax(frames_p) ipmin += ipstep del frames_p twall_end = time.time() logger.info('wallclock time for stacking: %g s' % (twall_end - twall_start)) tmin_frames = t0 + iwmin * deltat_cf tr_stackmax = trace.Trace('', 'SMAX', '', '', tmin=tmin_frames, deltat=deltat_cf, ydata=frame_maxs) tr_stackmax.meta = {'tabu': True} trs_debug.append(tr_stackmax) if show_window_traces: trace.snuffle(trs_debug) ydata_window = tr_stackmax.chop(wmin, wmax, inplace=False).get_ydata() logger.info('CF stats: min %g, max %g, median %g' % (num.min(ydata_window), num.max(ydata_window), num.median(ydata_window))) if nstations_selected != 17: logger.info( 'Warning, station outage detected! Nr of station operable: %s' % nstations_selected) detector_threshold_seiger = config.detector_threshold - ( (17 - nstations_selected) * 4 ) # 17 is maximum number of seiger stations, 4 is a mean baseline for noise if nstations_selected != 17: logger.info( 'Warning, station outage detected! Nr of station operable: %s, threshold now: %s' % (nstations_selected, detector_threshold_seiger)) tpeaks, apeaks = list( zip(*[(tpeak, apeak) for (tpeak, apeak) in zip( *tr_stackmax.peaks(detector_threshold_seiger, tpeaksearch)) if wmin <= tpeak and tpeak < wmax])) or ([], []) tr_stackmax_indx = tr_stackmax.copy(data=False) tr_stackmax_indx.set_ydata(frame_argmaxs.astype(num.int32)) tr_stackmax_indx.set_location('i') for (tpeak, apeak) in zip(tpeaks, apeaks): iframe = int(round((tpeak - tmin_frames) / deltat_cf)) imax = frame_argmaxs[iframe] latpeak, lonpeak, xpeak, ypeak, zpeak = \ grid.index_to_location(imax) idetection += 1 detection = Detection(id='%06i' % idetection, time=tpeak, location=geo.Point(lat=float(latpeak), lon=float(lonpeak), x=float(xpeak), y=float(ypeak), z=float(zpeak)), ifm=float(apeak)) if bark: common.bark() logger.info('detection found: %s' % str(detection)) f = open(detections_path, 'a') f.write( '%06i %s %g %g %g %g %g %g\n' % (idetection, util.time_to_str(tpeak, format='%Y-%m-%d %H:%M:%S.6FRAC'), apeak, latpeak, lonpeak, xpeak, ypeak, zpeak)) f.close() ev = detection.get_event() f = open(events_path, 'a') model.dump_events([ev], stream=f) f.close() if show_detections or config.save_figures: fmin = min(ifc.fmin for ifc in ifcs) fmax = min(ifc.fmax for ifc in ifcs) fn = figures_path_template % { 'id': util.tts(t0).replace(" ", "T"), 'format': 'png' } util.ensuredirs(fn) if frames is not None: frames_p = frames tmin_frames_p = tmin_frames iframe_p = iframe else: iframe_min = max( 0, int(round(iframe - tpeaksearch / deltat_cf))) iframe_max = min( lengthout - 1, int(round(iframe + tpeaksearch / deltat_cf))) ipsize = iframe_max - iframe_min + 1 frames_p = num.zeros((ngridpoints, ipsize)) tmin_frames_p = tmin_frames + iframe_min * deltat_cf iframe_p = iframe - iframe_min for (arrays, offsets, shifts, weights) \ in parstack_params: frames_p, _ = parstack(arrays, offsets, shifts, weights, 0, offsetout=iwmin + iframe_min, lengthout=ipsize, result=frames_p, nparallel=nparallel, impl='openmp') if config.sharpness_normalization: frame_p_maxs = frames_p.max(axis=0) frame_p_means = num.abs(frames_p).mean(axis=0) frames_p *= (frame_p_maxs / frame_p_means)[num.newaxis, :] frames_p *= norm_map[:, num.newaxis] if config.ifc_count_normalization: frames_p *= 1.0 / len(ifcs) try: plot.plot_detection(grid, receivers, frames_p, tmin_frames_p, deltat_cf, imax, iframe_p, xpeak, ypeak, zpeak, tr_stackmax, tpeaks, apeaks, detector_threshold_seiger, wmin, wmax, pdata, trs, fmin, fmax, idetection, tpeaksearch, movie=show_movie, show=show_detections, save_filename=fn, event=ev) except: pass del frames_p if stop_after_first: return tr_stackmax.chop(wmin, wmax) tr_stackmax_indx.chop(wmin, wmax) if save_imax is True: io.save([tr_stackmax, tr_stackmax_indx], ifm_path_template) del frames logger.info('end processing time window group: %s - %s' % (util.time_to_str(tmin_win), util.time_to_str(tmax_win))) cat = Catalog() files = glob("%s/../figures/*qml*" % run_path) files.sort(key=os.path.getmtime) for file in files: cat_read = read_events(file) for event in cat_read: cat.append(event) cat.write("%s/../all_events_stacking.qml" % run_path, format="QUAKEML")
synth_traces_nn_t.append(tr) synth_traces_ml_t = [] for i, target in enumerate(targets): tr = trace.Trace( ydata=synthetics_ml_t[i, :], tmin=gfs.reference_times[i], deltat=gfs.deltat) #print 'trace tmin synthst', tr.tmin tr.set_codes(*target.codes) tr.set_location('ml_t') synth_traces_ml_t.append(tr) # display to check trace.snuffle( traces + synth_traces_nn + synth_traces_ml + synth_traces_nn_t + synth_traces_ml_t, stations=sc.wavemaps[0].stations, events=[event]) traces1, tmins = heart.seis_synthetics( engine, [patches[0]], targets, arrival_times=ats, wavename='any_P', arrival_taper=arrival_taper, filterer=filterer, outmode='stacked_traces') gfs.set_stack_mode('numpy') synth_traces_ml1 = [] for i in range(1): synthetics_ml1 = gfs.stack_all( targetidxs=targetidxs, patchidxs=[i], starttimes=starttimes[0],
def prep_orient(datapath, st, loc, catalog, dir_ro, v_rayleigh, bp, dt_start, dt_stop, ccmin=0.80, plot_heatmap=False, plot_distr=False, debug=False): """ Perform orientation analysis using Rayleigh waves, main function. time wdw: 20s before 4.0km/s arrival and 600 s afterwards (Stachnik et al. 2012) - compute radial component for back values of 0 to 360 deg - for each c-c of hilbert(R) with Z comp. - call plotting functions and/or write results to file :param datapath: path to rrd data :param st: current station (pyrocko station object) :param catalog: list of pyrocko events used for analysis :param dir_ro: output directory :param plot_heatmap: bool, optional :param plot_distr: bool, optional """ logs = logging.getLogger('prep_orient') st_data_pile = pile.make_pile(datapath, regex='%s_%s_' % (st.network, st.station), show_progress=False) n_ev = len(catalog) if st_data_pile.tmin is not None and st_data_pile.tmax is not None: # calculate dist between all events and current station r_arr_by_ev = num.empty(n_ev) ev_lats = num.asarray([ev.lat for ev in catalog]) ev_lons = num.asarray([ev.lon for ev in catalog]) dists = distance_accurate50m_numpy(a_lats=ev_lats, a_lons=ev_lons, b_lats=st.lat, b_lons=st.lon, implementation='c') r_arr_by_ev = (dists / 1000.) / v_rayleigh cc_i_ev_vs_rota = num.empty((n_ev, 360)) rot_angles = range(-180, 180, 1) for i_ev, ev in enumerate(catalog): arrT = ev.time + r_arr_by_ev[i_ev] start_twd1 = ev.time end_twd1 = arrT + 1800 trZ = get_tr_by_cha(st_data_pile, start_twd1, end_twd1, loc, 'Z') trR = get_tr_by_cha(st_data_pile, start_twd1, end_twd1, loc, 'R') trT = get_tr_by_cha(st_data_pile, start_twd1, end_twd1, loc, 'T') start_twd2 = ev.time + r_arr_by_ev[i_ev] - dt_start end_twd2 = arrT + dt_stop if len(trZ) == 1 and len(trR) == 1 and len(trT) == 1: trZ = trZ[0] trR = trR[0] trT = trT[0] # debugging - window selection: if debug is True: trace.snuffle([trZ, trR, trT], markers=[ pm.Marker(nslc_ids=[ trZ.nslc_id, trR.nslc_id, trT.nslc_id ], tmin=start_twd2, tmax=end_twd2), pm.Marker(nslc_ids=[ trZ.nslc_id, trR.nslc_id, trT.nslc_id ], tmin=arrT, tmax=arrT + 3) ]) else: cc_i_ev_vs_rota[i_ev, :] = num.nan continue try: trZ.bandpass(bp[0], bp[1], bp[2]) trZ.chop(tmin=start_twd2, tmax=end_twd2) except trace.NoData: logs.warning('no data %s %s %s' % (trZ, trR, trT)) continue for i_r, r in enumerate(rot_angles): print('rotation angle [deg]: %5d' % r, end='\r') rot_2, rot_3 = trace.rotate(traces=[trR, trT], azimuth=r, in_channels=['R', 'T'], out_channels=['2', '3']) rot_2_y = rot_2.ydata rot_2_hilb = num.imag(trace.hilbert(rot_2_y, len(rot_2_y))) rot_2_hilb_tr = trace.Trace(deltat=rot_2.deltat, ydata=rot_2_hilb, tmin=rot_2.tmin) # problem: rot_2 and rot_2_hilb look exactly the same! # --> no phase shift. why? should be num.imag!!! # trace.snuffle([rot_2, rot_2_hilb_tr]) rot_2_hilb_tr.bandpass(bp[0], bp[1], bp[2]) rot_2_hilb_tr.chop(tmin=start_twd2, tmax=end_twd2) # if st.station == 'RORO' and r == 0: # trace.snuffle([rot_2_hilb_tr, trZ]) # normalize traces trZ.ydata /= abs(max(trZ.ydata)) rot_2_hilb_tr.ydata /= abs(max(rot_2_hilb_tr.ydata)) c = trace.correlate(trZ, rot_2_hilb_tr, mode='valid', normalization='normal') t, coef = c.max() t2, coef2 = max_or_min(c) ''' if st.station == 'MATE' and r == 0: print(i_ev, ev.name, ev.depth) print(r, t, coef, t2, coef2) trace.snuffle([trZ, trR, rot_2_hilb_tr]) ''' cc_i_ev_vs_rota[i_ev, i_r] = coef ''' if st.station == 'MATE': for i_ev in range(n_ev): print(num.argmax(cc_i_ev_vs_rota[i_ev,:]), num.max(cc_i_ev_vs_rota[i_ev,:])) ''' if plot_heatmap is True: fig, ax = plt.subplots(nrows=1, ncols=1, figsize=(8, 2)) cax = ax.imshow(cc_i_ev_vs_rota, interpolation='nearest', vmin=-1.0, vmax=1.0, aspect='auto', extent=[-180, 180, n_ev, 0], cmap='binary') ax.set_ylabel('i_ev') ax.set_xlabel('Correction angle (deg)') ax.set_title('%s %s' % (st.network, st.station)) cbar = fig.colorbar(cax, ticks=[0, 0.5, 1.0], orientation='horizontal', fraction=0.05, pad=0.5) cbar.ax.set_xticklabels(['0', '0.5', '1.0']) plt.tight_layout() # plt.show(fig) fig.savefig( os.path.join( dir_ro, '%s_%s_%s_rot_cc_heatmap.png' % (st.network, st.station, loc))) plt.close() if plot_distr is True: plot_ccdistr_each_event(cc_i_ev_vs_rota, catalog, rot_angles, st, loc, dir_ro) median_a, mean_a, std_a, switched, n_ev =\ get_m_angle_switched(cc_i_ev_vs_rota, catalog, st, ccmin) dict_ev_angle = get_m_angle_all(cc_i_ev_vs_rota, catalog, st, ccmin) return median_a, mean_a, std_a, switched, dict_ev_angle, n_ev
def _off_test_synthetic(self): from pyrocko import gf km = 1000. nstations = 10 edepth = 5 * km store_id = 'crust2_d0' swin = 2. lwin = 9. * swin ks = 1.0 kl = 1.0 kd = 3.0 engine = gf.get_engine() snorths = (num.random.random(nstations) - 1.0) * 50 * km seasts = (num.random.random(nstations) - 1.0) * 50 * km targets = [] for istation, (snorths, seasts) in enumerate(zip(snorths, seasts)): targets.append( gf.Target(quantity='displacement', codes=('', 's%03i' % istation, '', 'Z'), north_shift=float(snorths), east_shift=float(seasts), store_id=store_id, interpolation='multilinear')) source = gf.DCSource(north_shift=50 * km, east_shift=50 * km, depth=edepth) store = engine.get_store(store_id) response = engine.process(source, targets) trs = [] station_traces = defaultdict(list) station_targets = defaultdict(list) for source, target, tr in response.iter_results(): tp = store.t('any_P', source, target) t = tp - 5 * tr.deltat + num.arange(11) * tr.deltat if False: gauss = trace.Trace(tmin=t[0], deltat=tr.deltat, ydata=num.exp(-((t - tp)**2) / ((2 * tr.deltat)**2))) tr.ydata[:] = 0.0 tr.add(gauss) trs.append(tr) station_traces[target.codes[:3]].append(tr) station_targets[target.codes[:3]].append(target) station_stalta_traces = {} for nsl, traces in station_traces.items(): etr = None for tr in traces: sqr_tr = tr.copy(data=False) sqr_tr.ydata = tr.ydata**2 if etr is None: etr = sqr_tr else: etr += sqr_tr autopick.recursive_stalta(swin, lwin, ks, kl, kd, etr) etr.set_codes(channel='C') station_stalta_traces[nsl] = etr trace.snuffle(trs + list(station_stalta_traces.values())) deltat = trs[0].deltat nnorth = 50 neast = 50 size = 200 * km north = num.linspace(-size, size, nnorth) north2 = num.repeat(north, neast) east = num.linspace(-size, size, neast) east2 = num.tile(east, nnorth) depth = 5 * km def tcal(target, i): try: return store.t( 'any_P', gf.Location(north_shift=north2[i], east_shift=east2[i], depth=depth), target) except gf.OutOfBounds: return 0.0 nsls = sorted(station_stalta_traces.keys()) tts = num.fromiter((tcal(station_targets[nsl][0], i) for i in range(nnorth * neast) for nsl in nsls), dtype=num.float) arrays = [ station_stalta_traces[nsl].ydata.astype(num.float) for nsl in nsls ] offsets = num.array([ int(round(station_stalta_traces[nsl].tmin / deltat)) for nsl in nsls ], dtype=num.int32) shifts = -num.array([int(round(tt / deltat)) for tt in tts], dtype=num.int32).reshape(nnorth * neast, nstations) weights = num.ones((nnorth * neast, nstations)) print(shifts[25 * neast + 25] * deltat) print(offsets.dtype, shifts.dtype, weights.dtype) print('stack start') mat, ioff = parstack(arrays, offsets, shifts, weights, 1) print('stack stop') mat = num.reshape(mat, (nnorth, neast)) from matplotlib import pyplot as plt fig = plt.figure() axes = fig.add_subplot(1, 1, 1, aspect=1.0) axes.contourf(east / km, north / km, mat) axes.plot( g(targets, 'east_shift') / km, g(targets, 'north_shift') / km, '^') axes.plot(source.east_shift / km, source.north_shift / km, 'o') plt.show()
def snuffle(self): '''Open *snuffler* with requested traces.''' trace.snuffle(self.traces_list())
def snuffle(self): trace.snuffle(self.seismograms)
def detect(first512): s = first512[:12] if len(s) != 12: return False tag = SudsStructtag.unpack(s) if tag.sync != 'S' \ or tag.machine != '6' \ or tag.struct_type < 0 \ or tag.struct_type > max_struct_type: return False return True if __name__ == '__main__': util.setup_logging('pyrocko.suds') trs = list(iload(sys.argv[1], 'rb')) stations = load_stations(sys.argv[1]) for station in stations: print station trace.snuffle(trs, stations=stations)
def test_against_kiwi(self): engine = gf.get_engine() store_id = 'chile_70km_crust' try: store = engine.get_store(store_id) except gf.NoSuchStore: logger.warn('GF Store %s not available - skipping test' % store_id) return base_source = gf.RectangularSource( depth=15*km, strike=0., dip=90., rake=0., magnitude=4.5, nucleation_x=-1., length=10*km, width=0*km, stf=gf.BoxcarSTF(duration=1.0)) base_event = base_source.pyrocko_event() channels = 'NEZ' nstations = 10 stations = [] targets = [] for istation in xrange(nstations): dist = rand(40.*km, 900*km) azi = rand(-180., 180.) north_shift = dist * math.cos(azi*d2r) east_shift = dist * math.sin(azi*d2r) lat, lon = od.ne_to_latlon(0., 0., north_shift, east_shift) sta = 'S%02i' % istation station = model.Station( '', sta, '', lat=lat, lon=lon) station.set_channels_by_name('N', 'E', 'Z') stations.append(station) for cha in channels: target = gf.Target( codes=station.nsl() + (cha,), lat=lat, lon=lon, quantity='displacement', interpolation='multilinear', optimization='enable', store_id=store_id) targets.append(target) from tunguska import glue nsources = 10 # nprocs_max = multiprocessing.cpu_count() nprocs = 1 try: seis = glue.start_seismosizer( gfdb_path=op.join(store.store_dir, 'db'), event=base_event, stations=stations, hosts=['localhost']*nprocs, balance_method='123321', effective_dt=0.5, verbose=False) ksource = to_kiwi_source(base_source) seis.set_source(ksource) recs = seis.get_receivers_snapshot(('syn',), (), 'plain') trs = [] for rec in recs: for tr in rec.get_traces(): tr.set_codes(channel=transchan[tr.channel]) trs.append(tr) trs2 = engine.process(base_source, targets).pyrocko_traces() trace.snuffle(trs + trs2) seis.set_synthetic_reference() for sourcetype in ['point', 'rect']: sources = [] for isource in xrange(nsources): m = pmt.MomentTensor.random_dc() strike, dip, rake = map(float, m.both_strike_dip_rake()[0]) if sourcetype == 'point': source = gf.RectangularSource( north_shift=rand(-20.*km, 20*km), east_shift=rand(-20.*km, 20*km), depth=rand(10*km, 20*km), nucleation_x=0.0, nucleation_y=0.0, strike=strike, dip=dip, rake=rake, magnitude=rand(4.0, 5.0), stf=gf.BoxcarSTF(duration=1.0)) elif sourcetype == 'rect': source = gf.RectangularSource( north_shift=rand(-20.*km, 20*km), east_shift=rand(-20.*km, 20*km), depth=rand(10*km, 20*km), length=10*km, width=5*km, nucleation_x=-1., nucleation_y=0, strike=strike, dip=dip, rake=rake, magnitude=rand(4.0, 5.0), stf=gf.BoxcarSTF(duration=1.0)) else: assert False sources.append(source) for temperature in ['cold', 'hot']: t0 = time.time() resp = engine.process(sources, targets, nprocs=nprocs) t1 = time.time() if temperature == 'hot': dur_pyrocko = t1 - t0 del resp ksources = map(to_kiwi_source, sources) for temperature in ['cold', 'hot']: t0 = time.time() seis.make_misfits_for_sources( ksources, show_progress=False) t1 = time.time() if temperature == 'hot': dur_kiwi = t1 - t0 print 'pyrocko %-5s %5.2fs %5.1fx' % ( sourcetype, dur_pyrocko, 1.0) print 'kiwi %-5s %5.2fs %5.1fx' % ( sourcetype, dur_kiwi, dur_pyrocko/dur_kiwi) finally: seis.close() del seis
vs = 2000. tlen = x[0] / vs * 2. deltat = 0.001 n = int(num.round(tlen / deltat)) out_x = num.zeros(n) out_y = num.zeros(n) out_z = num.zeros(n) import pylab as lab tau = 0.01 t = num.arange(1000) * deltat lab.plot(t, num.exp(-t**2/tau**2)) # lab.show() add_seismogram( vp, vs, 1.0, 1.0, 1.0, x, f, m6, 'displacement', deltat, 0.0, out_x, out_y, out_z, Gauss(tau)) trs = [] for out, comp in zip([out_x, out_y, out_z], 'NED'): tr = trace.Trace( '', 'Naja!', '', comp, deltat=deltat, tmin=0.0, ydata=out) trs.append(tr) trace.snuffle(trs)